diff --git "a/log/log-train-2022-05-03-11-40-27-7" "b/log/log-train-2022-05-03-11-40-27-7" new file mode 100644--- /dev/null +++ "b/log/log-train-2022-05-03-11-40-27-7" @@ -0,0 +1,14399 @@ +2022-05-03 11:40:27,485 INFO [train.py:775] (7/8) Training started +2022-05-03 11:40:27,486 INFO [train.py:785] (7/8) Device: cuda:7 +2022-05-03 11:40:27,488 INFO [train.py:794] (7/8) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'encoder_dim': 512, 'nhead': 8, 'dim_feedforward': 2048, 'num_encoder_layers': 12, 'decoder_dim': 512, 'joiner_dim': 512, 'model_warm_step': 3000, 'env_info': {'k2-version': '1.14', 'k2-build-type': 'Debug', 'k2-with-cuda': True, 'k2-git-sha1': '1b29f0a946f50186aaa82df46a59f492ade9692b', 'k2-git-date': 'Tue Apr 12 20:46:49 2022', 'lhotse-version': '1.1.0', 'torch-version': '1.10.1+cu111', 'torch-cuda-available': True, 'torch-cuda-version': '11.1', 'python-version': '3.8', 'icefall-git-branch': 'spgi', 'icefall-git-sha1': 'e2e5c77-dirty', 'icefall-git-date': 'Mon May 2 14:38:25 2022', 'icefall-path': '/exp/draj/mini_scale_2022/icefall', 'k2-path': '/exp/draj/mini_scale_2022/k2/k2/python/k2/__init__.py', 'lhotse-path': '/exp/draj/mini_scale_2022/lhotse/lhotse/__init__.py', 'hostname': 'r8n04', 'IP address': '10.1.8.4'}, 'world_size': 8, 'master_port': 12354, 'tensorboard': True, 'num_epochs': 20, 'start_epoch': 0, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless2/exp/v2'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'initial_lr': 0.003, 'lr_batches': 5000, 'lr_epochs': 4, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'save_every_n': 8000, 'keep_last_k': 10, 'use_fp16': True, 'manifest_dir': PosixPath('data/manifests'), 'enable_musan': True, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'max_duration': 200, 'num_buckets': 30, 'on_the_fly_feats': False, 'shuffle': True, 'num_workers': 8, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'blank_id': 0, 'vocab_size': 500} +2022-05-03 11:40:27,488 INFO [train.py:796] (7/8) About to create model +2022-05-03 11:40:27,829 INFO [train.py:800] (7/8) Number of model parameters: 78648040 +2022-05-03 11:40:33,380 INFO [train.py:806] (7/8) Using DDP +2022-05-03 11:40:34,137 INFO [asr_datamodule.py:321] (7/8) About to get SPGISpeech train cuts +2022-05-03 11:40:34,140 INFO [asr_datamodule.py:179] (7/8) About to get Musan cuts +2022-05-03 11:40:35,911 INFO [asr_datamodule.py:184] (7/8) Enable MUSAN +2022-05-03 11:40:35,911 INFO [asr_datamodule.py:207] (7/8) Enable SpecAugment +2022-05-03 11:40:35,911 INFO [asr_datamodule.py:208] (7/8) Time warp factor: 80 +2022-05-03 11:40:35,911 INFO [asr_datamodule.py:221] (7/8) About to create train dataset +2022-05-03 11:40:35,911 INFO [asr_datamodule.py:234] (7/8) Using DynamicBucketingSampler. +2022-05-03 11:40:36,306 INFO [asr_datamodule.py:242] (7/8) About to create train dataloader +2022-05-03 11:40:36,307 INFO [asr_datamodule.py:326] (7/8) About to get SPGISpeech dev cuts +2022-05-03 11:40:36,308 INFO [asr_datamodule.py:274] (7/8) About to create dev dataset +2022-05-03 11:40:36,454 INFO [asr_datamodule.py:289] (7/8) About to create dev dataloader +2022-05-03 11:41:08,018 INFO [train.py:715] (7/8) Epoch 0, batch 0, loss[loss=3.354, simple_loss=6.708, pruned_loss=5.747, over 4901.00 frames.], tot_loss[loss=3.354, simple_loss=6.708, pruned_loss=5.747, over 4901.00 frames.], batch size: 19, lr: 3.00e-03 +2022-05-03 11:41:08,561 INFO [distributed.py:874] (7/8) Reducer buckets have been rebuilt in this iteration. +2022-05-03 11:41:46,317 INFO [train.py:715] (7/8) Epoch 0, batch 50, loss[loss=0.4694, simple_loss=0.9389, pruned_loss=6.723, over 4819.00 frames.], tot_loss[loss=1.333, simple_loss=2.665, pruned_loss=6.494, over 219136.62 frames.], batch size: 26, lr: 3.00e-03 +2022-05-03 11:42:25,580 INFO [train.py:715] (7/8) Epoch 0, batch 100, loss[loss=0.4115, simple_loss=0.823, pruned_loss=6.686, over 4957.00 frames.], tot_loss[loss=0.8213, simple_loss=1.643, pruned_loss=6.593, over 386833.35 frames.], batch size: 35, lr: 3.00e-03 +2022-05-03 11:43:04,759 INFO [train.py:715] (7/8) Epoch 0, batch 150, loss[loss=0.3703, simple_loss=0.7406, pruned_loss=6.685, over 4709.00 frames.], tot_loss[loss=0.6312, simple_loss=1.262, pruned_loss=6.591, over 516852.13 frames.], batch size: 15, lr: 3.00e-03 +2022-05-03 11:43:43,124 INFO [train.py:715] (7/8) Epoch 0, batch 200, loss[loss=0.3759, simple_loss=0.7517, pruned_loss=6.762, over 4681.00 frames.], tot_loss[loss=0.5329, simple_loss=1.066, pruned_loss=6.578, over 618398.22 frames.], batch size: 15, lr: 3.00e-03 +2022-05-03 11:44:22,066 INFO [train.py:715] (7/8) Epoch 0, batch 250, loss[loss=0.3049, simple_loss=0.6098, pruned_loss=6.561, over 4829.00 frames.], tot_loss[loss=0.4729, simple_loss=0.9459, pruned_loss=6.592, over 697223.71 frames.], batch size: 13, lr: 3.00e-03 +2022-05-03 11:45:01,533 INFO [train.py:715] (7/8) Epoch 0, batch 300, loss[loss=0.3221, simple_loss=0.6441, pruned_loss=6.694, over 4955.00 frames.], tot_loss[loss=0.4334, simple_loss=0.8667, pruned_loss=6.603, over 758479.11 frames.], batch size: 14, lr: 3.00e-03 +2022-05-03 11:45:41,188 INFO [train.py:715] (7/8) Epoch 0, batch 350, loss[loss=0.3004, simple_loss=0.6008, pruned_loss=6.571, over 4794.00 frames.], tot_loss[loss=0.4039, simple_loss=0.8079, pruned_loss=6.617, over 804764.12 frames.], batch size: 13, lr: 3.00e-03 +2022-05-03 11:46:19,553 INFO [train.py:715] (7/8) Epoch 0, batch 400, loss[loss=0.3421, simple_loss=0.6842, pruned_loss=6.551, over 4936.00 frames.], tot_loss[loss=0.3838, simple_loss=0.7675, pruned_loss=6.633, over 841668.97 frames.], batch size: 29, lr: 3.00e-03 +2022-05-03 11:46:58,910 INFO [train.py:715] (7/8) Epoch 0, batch 450, loss[loss=0.341, simple_loss=0.6819, pruned_loss=6.75, over 4894.00 frames.], tot_loss[loss=0.3683, simple_loss=0.7366, pruned_loss=6.646, over 870491.33 frames.], batch size: 22, lr: 2.99e-03 +2022-05-03 11:47:38,003 INFO [train.py:715] (7/8) Epoch 0, batch 500, loss[loss=0.3129, simple_loss=0.6258, pruned_loss=6.717, over 4954.00 frames.], tot_loss[loss=0.3562, simple_loss=0.7124, pruned_loss=6.647, over 892937.97 frames.], batch size: 35, lr: 2.99e-03 +2022-05-03 11:48:17,109 INFO [train.py:715] (7/8) Epoch 0, batch 550, loss[loss=0.329, simple_loss=0.6581, pruned_loss=6.796, over 4815.00 frames.], tot_loss[loss=0.3461, simple_loss=0.6921, pruned_loss=6.65, over 909752.57 frames.], batch size: 27, lr: 2.99e-03 +2022-05-03 11:48:55,930 INFO [train.py:715] (7/8) Epoch 0, batch 600, loss[loss=0.3173, simple_loss=0.6345, pruned_loss=6.752, over 4774.00 frames.], tot_loss[loss=0.3368, simple_loss=0.6737, pruned_loss=6.663, over 924151.81 frames.], batch size: 18, lr: 2.99e-03 +2022-05-03 11:49:35,149 INFO [train.py:715] (7/8) Epoch 0, batch 650, loss[loss=0.2794, simple_loss=0.5588, pruned_loss=6.756, over 4887.00 frames.], tot_loss[loss=0.3256, simple_loss=0.6512, pruned_loss=6.683, over 934171.24 frames.], batch size: 19, lr: 2.99e-03 +2022-05-03 11:50:14,495 INFO [train.py:715] (7/8) Epoch 0, batch 700, loss[loss=0.2709, simple_loss=0.5418, pruned_loss=6.779, over 4982.00 frames.], tot_loss[loss=0.3139, simple_loss=0.6278, pruned_loss=6.702, over 943352.54 frames.], batch size: 24, lr: 2.99e-03 +2022-05-03 11:50:53,001 INFO [train.py:715] (7/8) Epoch 0, batch 750, loss[loss=0.2685, simple_loss=0.537, pruned_loss=6.787, over 4979.00 frames.], tot_loss[loss=0.3014, simple_loss=0.6028, pruned_loss=6.714, over 950430.90 frames.], batch size: 28, lr: 2.98e-03 +2022-05-03 11:51:32,782 INFO [train.py:715] (7/8) Epoch 0, batch 800, loss[loss=0.2031, simple_loss=0.4062, pruned_loss=6.543, over 4791.00 frames.], tot_loss[loss=0.2897, simple_loss=0.5794, pruned_loss=6.715, over 955486.28 frames.], batch size: 14, lr: 2.98e-03 +2022-05-03 11:52:12,745 INFO [train.py:715] (7/8) Epoch 0, batch 850, loss[loss=0.2162, simple_loss=0.4324, pruned_loss=6.668, over 4822.00 frames.], tot_loss[loss=0.2786, simple_loss=0.5573, pruned_loss=6.715, over 959338.44 frames.], batch size: 25, lr: 2.98e-03 +2022-05-03 11:52:51,641 INFO [train.py:715] (7/8) Epoch 0, batch 900, loss[loss=0.2232, simple_loss=0.4464, pruned_loss=6.706, over 4887.00 frames.], tot_loss[loss=0.2683, simple_loss=0.5366, pruned_loss=6.711, over 962685.88 frames.], batch size: 19, lr: 2.98e-03 +2022-05-03 11:53:30,233 INFO [train.py:715] (7/8) Epoch 0, batch 950, loss[loss=0.2236, simple_loss=0.4471, pruned_loss=6.646, over 4970.00 frames.], tot_loss[loss=0.259, simple_loss=0.518, pruned_loss=6.711, over 964894.05 frames.], batch size: 35, lr: 2.97e-03 +2022-05-03 11:54:09,539 INFO [train.py:715] (7/8) Epoch 0, batch 1000, loss[loss=0.2381, simple_loss=0.4762, pruned_loss=6.799, over 4818.00 frames.], tot_loss[loss=0.2508, simple_loss=0.5016, pruned_loss=6.713, over 967222.45 frames.], batch size: 27, lr: 2.97e-03 +2022-05-03 11:54:48,901 INFO [train.py:715] (7/8) Epoch 0, batch 1050, loss[loss=0.1809, simple_loss=0.3618, pruned_loss=6.661, over 4819.00 frames.], tot_loss[loss=0.2444, simple_loss=0.4888, pruned_loss=6.718, over 968242.22 frames.], batch size: 12, lr: 2.97e-03 +2022-05-03 11:55:27,476 INFO [train.py:715] (7/8) Epoch 0, batch 1100, loss[loss=0.2433, simple_loss=0.4865, pruned_loss=6.804, over 4871.00 frames.], tot_loss[loss=0.2375, simple_loss=0.4749, pruned_loss=6.715, over 969628.61 frames.], batch size: 16, lr: 2.96e-03 +2022-05-03 11:56:07,480 INFO [train.py:715] (7/8) Epoch 0, batch 1150, loss[loss=0.2083, simple_loss=0.4165, pruned_loss=6.728, over 4873.00 frames.], tot_loss[loss=0.232, simple_loss=0.464, pruned_loss=6.72, over 970278.03 frames.], batch size: 16, lr: 2.96e-03 +2022-05-03 11:56:47,812 INFO [train.py:715] (7/8) Epoch 0, batch 1200, loss[loss=0.2331, simple_loss=0.4662, pruned_loss=6.8, over 4789.00 frames.], tot_loss[loss=0.2268, simple_loss=0.4536, pruned_loss=6.718, over 969837.40 frames.], batch size: 14, lr: 2.96e-03 +2022-05-03 11:57:28,443 INFO [train.py:715] (7/8) Epoch 0, batch 1250, loss[loss=0.2264, simple_loss=0.4529, pruned_loss=6.757, over 4853.00 frames.], tot_loss[loss=0.2217, simple_loss=0.4433, pruned_loss=6.713, over 971332.91 frames.], batch size: 32, lr: 2.95e-03 +2022-05-03 11:58:07,340 INFO [train.py:715] (7/8) Epoch 0, batch 1300, loss[loss=0.2097, simple_loss=0.4195, pruned_loss=6.711, over 4878.00 frames.], tot_loss[loss=0.217, simple_loss=0.4339, pruned_loss=6.712, over 972276.39 frames.], batch size: 16, lr: 2.95e-03 +2022-05-03 11:58:47,751 INFO [train.py:715] (7/8) Epoch 0, batch 1350, loss[loss=0.1924, simple_loss=0.3848, pruned_loss=6.679, over 4834.00 frames.], tot_loss[loss=0.2134, simple_loss=0.4269, pruned_loss=6.707, over 972587.39 frames.], batch size: 26, lr: 2.95e-03 +2022-05-03 11:59:28,713 INFO [train.py:715] (7/8) Epoch 0, batch 1400, loss[loss=0.2144, simple_loss=0.4288, pruned_loss=6.7, over 4828.00 frames.], tot_loss[loss=0.2106, simple_loss=0.4212, pruned_loss=6.714, over 973967.11 frames.], batch size: 30, lr: 2.94e-03 +2022-05-03 12:00:09,330 INFO [train.py:715] (7/8) Epoch 0, batch 1450, loss[loss=0.1977, simple_loss=0.3954, pruned_loss=6.801, over 4964.00 frames.], tot_loss[loss=0.2075, simple_loss=0.415, pruned_loss=6.712, over 974759.02 frames.], batch size: 24, lr: 2.94e-03 +2022-05-03 12:00:48,852 INFO [train.py:715] (7/8) Epoch 0, batch 1500, loss[loss=0.2052, simple_loss=0.4103, pruned_loss=6.789, over 4854.00 frames.], tot_loss[loss=0.2042, simple_loss=0.4084, pruned_loss=6.705, over 975028.66 frames.], batch size: 32, lr: 2.94e-03 +2022-05-03 12:01:29,926 INFO [train.py:715] (7/8) Epoch 0, batch 1550, loss[loss=0.183, simple_loss=0.366, pruned_loss=6.576, over 4825.00 frames.], tot_loss[loss=0.2008, simple_loss=0.4016, pruned_loss=6.701, over 973883.05 frames.], batch size: 15, lr: 2.93e-03 +2022-05-03 12:02:11,273 INFO [train.py:715] (7/8) Epoch 0, batch 1600, loss[loss=0.1919, simple_loss=0.3838, pruned_loss=6.652, over 4635.00 frames.], tot_loss[loss=0.1986, simple_loss=0.3973, pruned_loss=6.696, over 973520.23 frames.], batch size: 13, lr: 2.93e-03 +2022-05-03 12:02:51,041 INFO [train.py:715] (7/8) Epoch 0, batch 1650, loss[loss=0.2136, simple_loss=0.4272, pruned_loss=6.699, over 4838.00 frames.], tot_loss[loss=0.1956, simple_loss=0.3912, pruned_loss=6.691, over 973340.57 frames.], batch size: 15, lr: 2.92e-03 +2022-05-03 12:03:32,815 INFO [train.py:715] (7/8) Epoch 0, batch 1700, loss[loss=0.1759, simple_loss=0.3518, pruned_loss=6.556, over 4836.00 frames.], tot_loss[loss=0.1934, simple_loss=0.3869, pruned_loss=6.684, over 973343.35 frames.], batch size: 30, lr: 2.92e-03 +2022-05-03 12:04:14,558 INFO [train.py:715] (7/8) Epoch 0, batch 1750, loss[loss=0.2026, simple_loss=0.4051, pruned_loss=6.672, over 4868.00 frames.], tot_loss[loss=0.1924, simple_loss=0.3848, pruned_loss=6.683, over 972802.22 frames.], batch size: 32, lr: 2.91e-03 +2022-05-03 12:04:56,014 INFO [train.py:715] (7/8) Epoch 0, batch 1800, loss[loss=0.1832, simple_loss=0.3665, pruned_loss=6.737, over 4827.00 frames.], tot_loss[loss=0.1909, simple_loss=0.3818, pruned_loss=6.68, over 972995.21 frames.], batch size: 15, lr: 2.91e-03 +2022-05-03 12:05:36,584 INFO [train.py:715] (7/8) Epoch 0, batch 1850, loss[loss=0.1865, simple_loss=0.373, pruned_loss=6.624, over 4879.00 frames.], tot_loss[loss=0.1901, simple_loss=0.3803, pruned_loss=6.678, over 973437.78 frames.], batch size: 30, lr: 2.91e-03 +2022-05-03 12:06:18,619 INFO [train.py:715] (7/8) Epoch 0, batch 1900, loss[loss=0.1658, simple_loss=0.3317, pruned_loss=6.625, over 4779.00 frames.], tot_loss[loss=0.1885, simple_loss=0.377, pruned_loss=6.678, over 973409.07 frames.], batch size: 18, lr: 2.90e-03 +2022-05-03 12:07:00,158 INFO [train.py:715] (7/8) Epoch 0, batch 1950, loss[loss=0.1777, simple_loss=0.3555, pruned_loss=6.496, over 4882.00 frames.], tot_loss[loss=0.1872, simple_loss=0.3743, pruned_loss=6.674, over 973590.53 frames.], batch size: 16, lr: 2.90e-03 +2022-05-03 12:07:38,879 INFO [train.py:715] (7/8) Epoch 0, batch 2000, loss[loss=0.1997, simple_loss=0.3993, pruned_loss=6.659, over 4805.00 frames.], tot_loss[loss=0.185, simple_loss=0.3701, pruned_loss=6.667, over 972170.81 frames.], batch size: 25, lr: 2.89e-03 +2022-05-03 12:08:20,000 INFO [train.py:715] (7/8) Epoch 0, batch 2050, loss[loss=0.1927, simple_loss=0.3854, pruned_loss=6.83, over 4897.00 frames.], tot_loss[loss=0.1843, simple_loss=0.3686, pruned_loss=6.664, over 972324.68 frames.], batch size: 19, lr: 2.89e-03 +2022-05-03 12:09:00,599 INFO [train.py:715] (7/8) Epoch 0, batch 2100, loss[loss=0.167, simple_loss=0.334, pruned_loss=6.618, over 4809.00 frames.], tot_loss[loss=0.1833, simple_loss=0.3666, pruned_loss=6.661, over 972811.51 frames.], batch size: 24, lr: 2.88e-03 +2022-05-03 12:09:41,211 INFO [train.py:715] (7/8) Epoch 0, batch 2150, loss[loss=0.1597, simple_loss=0.3194, pruned_loss=6.539, over 4770.00 frames.], tot_loss[loss=0.1824, simple_loss=0.3649, pruned_loss=6.665, over 972992.40 frames.], batch size: 18, lr: 2.88e-03 +2022-05-03 12:10:20,508 INFO [train.py:715] (7/8) Epoch 0, batch 2200, loss[loss=0.1994, simple_loss=0.3988, pruned_loss=6.745, over 4869.00 frames.], tot_loss[loss=0.181, simple_loss=0.3621, pruned_loss=6.667, over 972049.29 frames.], batch size: 32, lr: 2.87e-03 +2022-05-03 12:11:01,499 INFO [train.py:715] (7/8) Epoch 0, batch 2250, loss[loss=0.1982, simple_loss=0.3965, pruned_loss=6.818, over 4909.00 frames.], tot_loss[loss=0.1815, simple_loss=0.3629, pruned_loss=6.671, over 972353.23 frames.], batch size: 17, lr: 2.86e-03 +2022-05-03 12:11:42,780 INFO [train.py:715] (7/8) Epoch 0, batch 2300, loss[loss=0.1924, simple_loss=0.3848, pruned_loss=6.675, over 4877.00 frames.], tot_loss[loss=0.1802, simple_loss=0.3604, pruned_loss=6.669, over 972683.29 frames.], batch size: 16, lr: 2.86e-03 +2022-05-03 12:12:22,385 INFO [train.py:715] (7/8) Epoch 0, batch 2350, loss[loss=0.1591, simple_loss=0.3182, pruned_loss=6.636, over 4994.00 frames.], tot_loss[loss=0.1791, simple_loss=0.3583, pruned_loss=6.664, over 972238.67 frames.], batch size: 14, lr: 2.85e-03 +2022-05-03 12:13:03,134 INFO [train.py:715] (7/8) Epoch 0, batch 2400, loss[loss=0.1606, simple_loss=0.3213, pruned_loss=6.693, over 4916.00 frames.], tot_loss[loss=0.1783, simple_loss=0.3567, pruned_loss=6.667, over 972187.33 frames.], batch size: 23, lr: 2.85e-03 +2022-05-03 12:13:43,817 INFO [train.py:715] (7/8) Epoch 0, batch 2450, loss[loss=0.1491, simple_loss=0.2983, pruned_loss=6.491, over 4837.00 frames.], tot_loss[loss=0.1773, simple_loss=0.3546, pruned_loss=6.666, over 972355.26 frames.], batch size: 30, lr: 2.84e-03 +2022-05-03 12:14:24,686 INFO [train.py:715] (7/8) Epoch 0, batch 2500, loss[loss=0.1732, simple_loss=0.3464, pruned_loss=6.605, over 4976.00 frames.], tot_loss[loss=0.1772, simple_loss=0.3543, pruned_loss=6.666, over 972191.08 frames.], batch size: 14, lr: 2.84e-03 +2022-05-03 12:15:03,915 INFO [train.py:715] (7/8) Epoch 0, batch 2550, loss[loss=0.1956, simple_loss=0.3912, pruned_loss=6.693, over 4964.00 frames.], tot_loss[loss=0.1764, simple_loss=0.3528, pruned_loss=6.665, over 972588.92 frames.], batch size: 24, lr: 2.83e-03 +2022-05-03 12:15:44,628 INFO [train.py:715] (7/8) Epoch 0, batch 2600, loss[loss=0.1921, simple_loss=0.3842, pruned_loss=6.806, over 4736.00 frames.], tot_loss[loss=0.176, simple_loss=0.3519, pruned_loss=6.662, over 972857.44 frames.], batch size: 16, lr: 2.83e-03 +2022-05-03 12:16:25,712 INFO [train.py:715] (7/8) Epoch 0, batch 2650, loss[loss=0.1967, simple_loss=0.3934, pruned_loss=6.758, over 4829.00 frames.], tot_loss[loss=0.1747, simple_loss=0.3494, pruned_loss=6.659, over 973264.07 frames.], batch size: 15, lr: 2.82e-03 +2022-05-03 12:17:08,084 INFO [train.py:715] (7/8) Epoch 0, batch 2700, loss[loss=0.1761, simple_loss=0.3521, pruned_loss=6.654, over 4936.00 frames.], tot_loss[loss=0.1746, simple_loss=0.3491, pruned_loss=6.655, over 973590.18 frames.], batch size: 23, lr: 2.81e-03 +2022-05-03 12:17:48,876 INFO [train.py:715] (7/8) Epoch 0, batch 2750, loss[loss=0.1773, simple_loss=0.3545, pruned_loss=6.655, over 4814.00 frames.], tot_loss[loss=0.1737, simple_loss=0.3474, pruned_loss=6.652, over 972567.53 frames.], batch size: 13, lr: 2.81e-03 +2022-05-03 12:18:29,716 INFO [train.py:715] (7/8) Epoch 0, batch 2800, loss[loss=0.1737, simple_loss=0.3473, pruned_loss=6.483, over 4955.00 frames.], tot_loss[loss=0.1732, simple_loss=0.3463, pruned_loss=6.644, over 972535.36 frames.], batch size: 35, lr: 2.80e-03 +2022-05-03 12:19:10,268 INFO [train.py:715] (7/8) Epoch 0, batch 2850, loss[loss=0.1773, simple_loss=0.3547, pruned_loss=6.729, over 4888.00 frames.], tot_loss[loss=0.173, simple_loss=0.346, pruned_loss=6.65, over 973225.34 frames.], batch size: 17, lr: 2.80e-03 +2022-05-03 12:19:49,122 INFO [train.py:715] (7/8) Epoch 0, batch 2900, loss[loss=0.186, simple_loss=0.3721, pruned_loss=6.708, over 4972.00 frames.], tot_loss[loss=0.1726, simple_loss=0.3452, pruned_loss=6.644, over 973420.91 frames.], batch size: 25, lr: 2.79e-03 +2022-05-03 12:20:29,375 INFO [train.py:715] (7/8) Epoch 0, batch 2950, loss[loss=0.1804, simple_loss=0.3607, pruned_loss=6.672, over 4837.00 frames.], tot_loss[loss=0.1718, simple_loss=0.3436, pruned_loss=6.643, over 974421.16 frames.], batch size: 30, lr: 2.78e-03 +2022-05-03 12:21:11,359 INFO [train.py:715] (7/8) Epoch 0, batch 3000, loss[loss=0.8418, simple_loss=0.3617, pruned_loss=6.61, over 4836.00 frames.], tot_loss[loss=0.2084, simple_loss=0.3435, pruned_loss=6.647, over 973739.34 frames.], batch size: 30, lr: 2.78e-03 +2022-05-03 12:21:11,359 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 12:21:21,130 INFO [train.py:742] (7/8) Epoch 0, validation: loss=2.223, simple_loss=0.2788, pruned_loss=2.083, over 914524.00 frames. +2022-05-03 12:22:02,160 INFO [train.py:715] (7/8) Epoch 0, batch 3050, loss[loss=0.2318, simple_loss=0.3297, pruned_loss=0.6697, over 4792.00 frames.], tot_loss[loss=0.2243, simple_loss=0.3443, pruned_loss=5.41, over 973614.01 frames.], batch size: 24, lr: 2.77e-03 +2022-05-03 12:22:41,559 INFO [train.py:715] (7/8) Epoch 0, batch 3100, loss[loss=0.2023, simple_loss=0.3326, pruned_loss=0.3597, over 4769.00 frames.], tot_loss[loss=0.2223, simple_loss=0.3419, pruned_loss=4.319, over 973336.28 frames.], batch size: 19, lr: 2.77e-03 +2022-05-03 12:23:22,440 INFO [train.py:715] (7/8) Epoch 0, batch 3150, loss[loss=0.2004, simple_loss=0.3465, pruned_loss=0.2716, over 4965.00 frames.], tot_loss[loss=0.218, simple_loss=0.3421, pruned_loss=3.432, over 973378.30 frames.], batch size: 15, lr: 2.76e-03 +2022-05-03 12:24:03,665 INFO [train.py:715] (7/8) Epoch 0, batch 3200, loss[loss=0.2074, simple_loss=0.3629, pruned_loss=0.2597, over 4977.00 frames.], tot_loss[loss=0.2123, simple_loss=0.3404, pruned_loss=2.727, over 972803.00 frames.], batch size: 28, lr: 2.75e-03 +2022-05-03 12:24:44,882 INFO [train.py:715] (7/8) Epoch 0, batch 3250, loss[loss=0.1925, simple_loss=0.3402, pruned_loss=0.2246, over 4771.00 frames.], tot_loss[loss=0.2072, simple_loss=0.3389, pruned_loss=2.175, over 971820.35 frames.], batch size: 14, lr: 2.75e-03 +2022-05-03 12:25:24,111 INFO [train.py:715] (7/8) Epoch 0, batch 3300, loss[loss=0.2001, simple_loss=0.3547, pruned_loss=0.2279, over 4890.00 frames.], tot_loss[loss=0.2033, simple_loss=0.3382, pruned_loss=1.741, over 971846.85 frames.], batch size: 19, lr: 2.74e-03 +2022-05-03 12:26:05,358 INFO [train.py:715] (7/8) Epoch 0, batch 3350, loss[loss=0.2204, simple_loss=0.3888, pruned_loss=0.2599, over 4849.00 frames.], tot_loss[loss=0.1998, simple_loss=0.3374, pruned_loss=1.399, over 972160.75 frames.], batch size: 30, lr: 2.73e-03 +2022-05-03 12:26:46,190 INFO [train.py:715] (7/8) Epoch 0, batch 3400, loss[loss=0.1481, simple_loss=0.2696, pruned_loss=0.133, over 4753.00 frames.], tot_loss[loss=0.1966, simple_loss=0.336, pruned_loss=1.134, over 971735.79 frames.], batch size: 16, lr: 2.73e-03 +2022-05-03 12:27:25,313 INFO [train.py:715] (7/8) Epoch 0, batch 3450, loss[loss=0.2044, simple_loss=0.3665, pruned_loss=0.2113, over 4975.00 frames.], tot_loss[loss=0.194, simple_loss=0.335, pruned_loss=0.9251, over 971884.98 frames.], batch size: 24, lr: 2.72e-03 +2022-05-03 12:28:06,925 INFO [train.py:715] (7/8) Epoch 0, batch 3500, loss[loss=0.1646, simple_loss=0.2958, pruned_loss=0.1665, over 4702.00 frames.], tot_loss[loss=0.1902, simple_loss=0.3313, pruned_loss=0.7586, over 972403.46 frames.], batch size: 15, lr: 2.72e-03 +2022-05-03 12:28:48,562 INFO [train.py:715] (7/8) Epoch 0, batch 3550, loss[loss=0.1812, simple_loss=0.3261, pruned_loss=0.182, over 4844.00 frames.], tot_loss[loss=0.1887, simple_loss=0.3309, pruned_loss=0.6309, over 973025.03 frames.], batch size: 15, lr: 2.71e-03 +2022-05-03 12:29:29,805 INFO [train.py:715] (7/8) Epoch 0, batch 3600, loss[loss=0.1968, simple_loss=0.3542, pruned_loss=0.1972, over 4901.00 frames.], tot_loss[loss=0.1867, simple_loss=0.3296, pruned_loss=0.5295, over 973091.51 frames.], batch size: 17, lr: 2.70e-03 +2022-05-03 12:30:09,004 INFO [train.py:715] (7/8) Epoch 0, batch 3650, loss[loss=0.1753, simple_loss=0.3186, pruned_loss=0.1603, over 4889.00 frames.], tot_loss[loss=0.1847, simple_loss=0.3277, pruned_loss=0.4498, over 972581.19 frames.], batch size: 22, lr: 2.70e-03 +2022-05-03 12:30:50,509 INFO [train.py:715] (7/8) Epoch 0, batch 3700, loss[loss=0.1635, simple_loss=0.2972, pruned_loss=0.1485, over 4780.00 frames.], tot_loss[loss=0.1845, simple_loss=0.3287, pruned_loss=0.3893, over 972276.17 frames.], batch size: 12, lr: 2.69e-03 +2022-05-03 12:31:32,108 INFO [train.py:715] (7/8) Epoch 0, batch 3750, loss[loss=0.2026, simple_loss=0.3631, pruned_loss=0.2109, over 4886.00 frames.], tot_loss[loss=0.1833, simple_loss=0.3278, pruned_loss=0.341, over 972004.16 frames.], batch size: 22, lr: 2.68e-03 +2022-05-03 12:32:11,313 INFO [train.py:715] (7/8) Epoch 0, batch 3800, loss[loss=0.1656, simple_loss=0.3013, pruned_loss=0.1496, over 4914.00 frames.], tot_loss[loss=0.1818, simple_loss=0.326, pruned_loss=0.3018, over 972313.23 frames.], batch size: 18, lr: 2.68e-03 +2022-05-03 12:33:05,630 INFO [train.py:715] (7/8) Epoch 0, batch 3850, loss[loss=0.1822, simple_loss=0.3275, pruned_loss=0.1844, over 4979.00 frames.], tot_loss[loss=0.1804, simple_loss=0.3242, pruned_loss=0.2714, over 972125.96 frames.], batch size: 14, lr: 2.67e-03 +2022-05-03 12:33:46,703 INFO [train.py:715] (7/8) Epoch 0, batch 3900, loss[loss=0.1459, simple_loss=0.2675, pruned_loss=0.1216, over 4872.00 frames.], tot_loss[loss=0.1794, simple_loss=0.3231, pruned_loss=0.2475, over 972217.53 frames.], batch size: 22, lr: 2.66e-03 +2022-05-03 12:34:26,861 INFO [train.py:715] (7/8) Epoch 0, batch 3950, loss[loss=0.1776, simple_loss=0.3192, pruned_loss=0.1801, over 4789.00 frames.], tot_loss[loss=0.1794, simple_loss=0.3236, pruned_loss=0.23, over 972142.12 frames.], batch size: 14, lr: 2.66e-03 +2022-05-03 12:35:06,670 INFO [train.py:715] (7/8) Epoch 0, batch 4000, loss[loss=0.1689, simple_loss=0.3056, pruned_loss=0.1607, over 4644.00 frames.], tot_loss[loss=0.1791, simple_loss=0.3235, pruned_loss=0.2155, over 971539.70 frames.], batch size: 13, lr: 2.65e-03 +2022-05-03 12:35:47,596 INFO [train.py:715] (7/8) Epoch 0, batch 4050, loss[loss=0.1604, simple_loss=0.2936, pruned_loss=0.1361, over 4824.00 frames.], tot_loss[loss=0.1791, simple_loss=0.3238, pruned_loss=0.205, over 971238.48 frames.], batch size: 15, lr: 2.64e-03 +2022-05-03 12:36:28,813 INFO [train.py:715] (7/8) Epoch 0, batch 4100, loss[loss=0.1706, simple_loss=0.3119, pruned_loss=0.1464, over 4797.00 frames.], tot_loss[loss=0.179, simple_loss=0.3238, pruned_loss=0.1961, over 971747.80 frames.], batch size: 24, lr: 2.64e-03 +2022-05-03 12:37:07,961 INFO [train.py:715] (7/8) Epoch 0, batch 4150, loss[loss=0.1872, simple_loss=0.3414, pruned_loss=0.1651, over 4893.00 frames.], tot_loss[loss=0.178, simple_loss=0.3224, pruned_loss=0.1871, over 972129.08 frames.], batch size: 19, lr: 2.63e-03 +2022-05-03 12:37:49,193 INFO [train.py:715] (7/8) Epoch 0, batch 4200, loss[loss=0.1776, simple_loss=0.3228, pruned_loss=0.1619, over 4931.00 frames.], tot_loss[loss=0.1779, simple_loss=0.3225, pruned_loss=0.1817, over 971879.43 frames.], batch size: 18, lr: 2.63e-03 +2022-05-03 12:38:30,918 INFO [train.py:715] (7/8) Epoch 0, batch 4250, loss[loss=0.2063, simple_loss=0.3718, pruned_loss=0.204, over 4957.00 frames.], tot_loss[loss=0.1773, simple_loss=0.3218, pruned_loss=0.176, over 972305.15 frames.], batch size: 14, lr: 2.62e-03 +2022-05-03 12:39:11,502 INFO [train.py:715] (7/8) Epoch 0, batch 4300, loss[loss=0.2199, simple_loss=0.3962, pruned_loss=0.2177, over 4794.00 frames.], tot_loss[loss=0.1767, simple_loss=0.321, pruned_loss=0.1716, over 972557.10 frames.], batch size: 24, lr: 2.61e-03 +2022-05-03 12:39:51,578 INFO [train.py:715] (7/8) Epoch 0, batch 4350, loss[loss=0.1817, simple_loss=0.3286, pruned_loss=0.1735, over 4772.00 frames.], tot_loss[loss=0.1774, simple_loss=0.3222, pruned_loss=0.1697, over 972665.73 frames.], batch size: 17, lr: 2.61e-03 +2022-05-03 12:40:33,094 INFO [train.py:715] (7/8) Epoch 0, batch 4400, loss[loss=0.1725, simple_loss=0.3181, pruned_loss=0.1342, over 4775.00 frames.], tot_loss[loss=0.1767, simple_loss=0.3214, pruned_loss=0.1659, over 972924.94 frames.], batch size: 18, lr: 2.60e-03 +2022-05-03 12:41:14,317 INFO [train.py:715] (7/8) Epoch 0, batch 4450, loss[loss=0.1735, simple_loss=0.3169, pruned_loss=0.15, over 4791.00 frames.], tot_loss[loss=0.1769, simple_loss=0.3218, pruned_loss=0.1642, over 971860.65 frames.], batch size: 17, lr: 2.59e-03 +2022-05-03 12:41:53,449 INFO [train.py:715] (7/8) Epoch 0, batch 4500, loss[loss=0.1599, simple_loss=0.2926, pruned_loss=0.1361, over 4857.00 frames.], tot_loss[loss=0.1764, simple_loss=0.3211, pruned_loss=0.1619, over 972472.78 frames.], batch size: 20, lr: 2.59e-03 +2022-05-03 12:42:34,819 INFO [train.py:715] (7/8) Epoch 0, batch 4550, loss[loss=0.1811, simple_loss=0.3272, pruned_loss=0.1753, over 4966.00 frames.], tot_loss[loss=0.1763, simple_loss=0.3211, pruned_loss=0.1603, over 972114.31 frames.], batch size: 35, lr: 2.58e-03 +2022-05-03 12:43:16,373 INFO [train.py:715] (7/8) Epoch 0, batch 4600, loss[loss=0.1696, simple_loss=0.3107, pruned_loss=0.1425, over 4826.00 frames.], tot_loss[loss=0.1751, simple_loss=0.3192, pruned_loss=0.1572, over 971949.69 frames.], batch size: 26, lr: 2.57e-03 +2022-05-03 12:43:56,541 INFO [train.py:715] (7/8) Epoch 0, batch 4650, loss[loss=0.1845, simple_loss=0.3326, pruned_loss=0.1823, over 4886.00 frames.], tot_loss[loss=0.1743, simple_loss=0.3179, pruned_loss=0.1554, over 972660.84 frames.], batch size: 16, lr: 2.57e-03 +2022-05-03 12:44:36,477 INFO [train.py:715] (7/8) Epoch 0, batch 4700, loss[loss=0.1862, simple_loss=0.3394, pruned_loss=0.1648, over 4754.00 frames.], tot_loss[loss=0.1739, simple_loss=0.3171, pruned_loss=0.1545, over 971683.34 frames.], batch size: 18, lr: 2.56e-03 +2022-05-03 12:45:17,611 INFO [train.py:715] (7/8) Epoch 0, batch 4750, loss[loss=0.1868, simple_loss=0.3393, pruned_loss=0.1716, over 4949.00 frames.], tot_loss[loss=0.1742, simple_loss=0.3178, pruned_loss=0.1543, over 971983.69 frames.], batch size: 21, lr: 2.55e-03 +2022-05-03 12:45:58,877 INFO [train.py:715] (7/8) Epoch 0, batch 4800, loss[loss=0.1958, simple_loss=0.3575, pruned_loss=0.1699, over 4977.00 frames.], tot_loss[loss=0.1743, simple_loss=0.3179, pruned_loss=0.1546, over 972788.46 frames.], batch size: 15, lr: 2.55e-03 +2022-05-03 12:46:38,838 INFO [train.py:715] (7/8) Epoch 0, batch 4850, loss[loss=0.1702, simple_loss=0.312, pruned_loss=0.1418, over 4823.00 frames.], tot_loss[loss=0.1737, simple_loss=0.3169, pruned_loss=0.1529, over 972449.84 frames.], batch size: 25, lr: 2.54e-03 +2022-05-03 12:47:19,645 INFO [train.py:715] (7/8) Epoch 0, batch 4900, loss[loss=0.1916, simple_loss=0.3469, pruned_loss=0.1819, over 4836.00 frames.], tot_loss[loss=0.1736, simple_loss=0.317, pruned_loss=0.1519, over 972037.38 frames.], batch size: 15, lr: 2.54e-03 +2022-05-03 12:48:01,147 INFO [train.py:715] (7/8) Epoch 0, batch 4950, loss[loss=0.1625, simple_loss=0.2982, pruned_loss=0.1343, over 4946.00 frames.], tot_loss[loss=0.1725, simple_loss=0.3151, pruned_loss=0.1497, over 972380.76 frames.], batch size: 35, lr: 2.53e-03 +2022-05-03 12:48:41,426 INFO [train.py:715] (7/8) Epoch 0, batch 5000, loss[loss=0.1808, simple_loss=0.3276, pruned_loss=0.1704, over 4894.00 frames.], tot_loss[loss=0.1723, simple_loss=0.3149, pruned_loss=0.1489, over 973013.29 frames.], batch size: 38, lr: 2.52e-03 +2022-05-03 12:49:22,154 INFO [train.py:715] (7/8) Epoch 0, batch 5050, loss[loss=0.1158, simple_loss=0.217, pruned_loss=0.073, over 4767.00 frames.], tot_loss[loss=0.1726, simple_loss=0.3155, pruned_loss=0.1487, over 973803.03 frames.], batch size: 12, lr: 2.52e-03 +2022-05-03 12:50:05,001 INFO [train.py:715] (7/8) Epoch 0, batch 5100, loss[loss=0.1876, simple_loss=0.3392, pruned_loss=0.1802, over 4977.00 frames.], tot_loss[loss=0.1729, simple_loss=0.3159, pruned_loss=0.1497, over 973526.23 frames.], batch size: 25, lr: 2.51e-03 +2022-05-03 12:50:48,210 INFO [train.py:715] (7/8) Epoch 0, batch 5150, loss[loss=0.1614, simple_loss=0.2969, pruned_loss=0.1291, over 4920.00 frames.], tot_loss[loss=0.1734, simple_loss=0.3169, pruned_loss=0.1499, over 974111.89 frames.], batch size: 18, lr: 2.50e-03 +2022-05-03 12:51:28,088 INFO [train.py:715] (7/8) Epoch 0, batch 5200, loss[loss=0.1761, simple_loss=0.3235, pruned_loss=0.1437, over 4894.00 frames.], tot_loss[loss=0.1721, simple_loss=0.3148, pruned_loss=0.1471, over 973329.86 frames.], batch size: 17, lr: 2.50e-03 +2022-05-03 12:52:08,702 INFO [train.py:715] (7/8) Epoch 0, batch 5250, loss[loss=0.1681, simple_loss=0.307, pruned_loss=0.1463, over 4820.00 frames.], tot_loss[loss=0.1717, simple_loss=0.3142, pruned_loss=0.146, over 973637.19 frames.], batch size: 27, lr: 2.49e-03 +2022-05-03 12:52:49,823 INFO [train.py:715] (7/8) Epoch 0, batch 5300, loss[loss=0.1686, simple_loss=0.309, pruned_loss=0.1411, over 4794.00 frames.], tot_loss[loss=0.1705, simple_loss=0.3124, pruned_loss=0.1435, over 973033.33 frames.], batch size: 14, lr: 2.49e-03 +2022-05-03 12:53:30,343 INFO [train.py:715] (7/8) Epoch 0, batch 5350, loss[loss=0.22, simple_loss=0.3937, pruned_loss=0.2318, over 4910.00 frames.], tot_loss[loss=0.17, simple_loss=0.3116, pruned_loss=0.1423, over 973615.58 frames.], batch size: 39, lr: 2.48e-03 +2022-05-03 12:54:10,024 INFO [train.py:715] (7/8) Epoch 0, batch 5400, loss[loss=0.1774, simple_loss=0.32, pruned_loss=0.1742, over 4960.00 frames.], tot_loss[loss=0.1702, simple_loss=0.312, pruned_loss=0.142, over 974105.35 frames.], batch size: 21, lr: 2.47e-03 +2022-05-03 12:54:50,456 INFO [train.py:715] (7/8) Epoch 0, batch 5450, loss[loss=0.189, simple_loss=0.3439, pruned_loss=0.1703, over 4763.00 frames.], tot_loss[loss=0.1692, simple_loss=0.3104, pruned_loss=0.1402, over 973755.34 frames.], batch size: 18, lr: 2.47e-03 +2022-05-03 12:55:31,414 INFO [train.py:715] (7/8) Epoch 0, batch 5500, loss[loss=0.1795, simple_loss=0.3318, pruned_loss=0.1364, over 4805.00 frames.], tot_loss[loss=0.1691, simple_loss=0.3102, pruned_loss=0.1399, over 972716.86 frames.], batch size: 21, lr: 2.46e-03 +2022-05-03 12:56:11,126 INFO [train.py:715] (7/8) Epoch 0, batch 5550, loss[loss=0.1832, simple_loss=0.3363, pruned_loss=0.1504, over 4810.00 frames.], tot_loss[loss=0.1697, simple_loss=0.3114, pruned_loss=0.1406, over 973285.86 frames.], batch size: 25, lr: 2.45e-03 +2022-05-03 12:56:51,166 INFO [train.py:715] (7/8) Epoch 0, batch 5600, loss[loss=0.1742, simple_loss=0.3197, pruned_loss=0.1439, over 4965.00 frames.], tot_loss[loss=0.1687, simple_loss=0.3095, pruned_loss=0.1392, over 972833.02 frames.], batch size: 28, lr: 2.45e-03 +2022-05-03 12:57:32,365 INFO [train.py:715] (7/8) Epoch 0, batch 5650, loss[loss=0.1677, simple_loss=0.3087, pruned_loss=0.1334, over 4797.00 frames.], tot_loss[loss=0.1683, simple_loss=0.309, pruned_loss=0.1387, over 972501.64 frames.], batch size: 24, lr: 2.44e-03 +2022-05-03 12:58:12,925 INFO [train.py:715] (7/8) Epoch 0, batch 5700, loss[loss=0.1815, simple_loss=0.3312, pruned_loss=0.1591, over 4951.00 frames.], tot_loss[loss=0.1681, simple_loss=0.3085, pruned_loss=0.1382, over 973048.45 frames.], batch size: 23, lr: 2.44e-03 +2022-05-03 12:58:52,127 INFO [train.py:715] (7/8) Epoch 0, batch 5750, loss[loss=0.193, simple_loss=0.351, pruned_loss=0.1753, over 4961.00 frames.], tot_loss[loss=0.1679, simple_loss=0.3083, pruned_loss=0.1381, over 972786.93 frames.], batch size: 39, lr: 2.43e-03 +2022-05-03 12:59:33,134 INFO [train.py:715] (7/8) Epoch 0, batch 5800, loss[loss=0.1904, simple_loss=0.3512, pruned_loss=0.1479, over 4832.00 frames.], tot_loss[loss=0.1671, simple_loss=0.307, pruned_loss=0.1362, over 971780.33 frames.], batch size: 25, lr: 2.42e-03 +2022-05-03 13:00:14,325 INFO [train.py:715] (7/8) Epoch 0, batch 5850, loss[loss=0.1581, simple_loss=0.2914, pruned_loss=0.124, over 4886.00 frames.], tot_loss[loss=0.1677, simple_loss=0.308, pruned_loss=0.137, over 971483.29 frames.], batch size: 22, lr: 2.42e-03 +2022-05-03 13:00:54,236 INFO [train.py:715] (7/8) Epoch 0, batch 5900, loss[loss=0.1769, simple_loss=0.3217, pruned_loss=0.1605, over 4903.00 frames.], tot_loss[loss=0.1676, simple_loss=0.3079, pruned_loss=0.1368, over 972400.97 frames.], batch size: 39, lr: 2.41e-03 +2022-05-03 13:01:33,984 INFO [train.py:715] (7/8) Epoch 0, batch 5950, loss[loss=0.1579, simple_loss=0.2899, pruned_loss=0.1298, over 4788.00 frames.], tot_loss[loss=0.1678, simple_loss=0.3083, pruned_loss=0.1367, over 972203.68 frames.], batch size: 18, lr: 2.41e-03 +2022-05-03 13:02:14,782 INFO [train.py:715] (7/8) Epoch 0, batch 6000, loss[loss=0.3113, simple_loss=0.3347, pruned_loss=0.144, over 4811.00 frames.], tot_loss[loss=0.1674, simple_loss=0.3058, pruned_loss=0.1342, over 971474.17 frames.], batch size: 25, lr: 2.40e-03 +2022-05-03 13:02:14,783 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 13:02:25,809 INFO [train.py:742] (7/8) Epoch 0, validation: loss=0.1779, simple_loss=0.2457, pruned_loss=0.05502, over 914524.00 frames. +2022-05-03 13:03:07,308 INFO [train.py:715] (7/8) Epoch 0, batch 6050, loss[loss=0.3091, simple_loss=0.3047, pruned_loss=0.1567, over 4870.00 frames.], tot_loss[loss=0.1978, simple_loss=0.3069, pruned_loss=0.1375, over 971918.36 frames.], batch size: 30, lr: 2.39e-03 +2022-05-03 13:03:47,846 INFO [train.py:715] (7/8) Epoch 0, batch 6100, loss[loss=0.3228, simple_loss=0.3337, pruned_loss=0.156, over 4854.00 frames.], tot_loss[loss=0.2167, simple_loss=0.3061, pruned_loss=0.1361, over 972701.92 frames.], batch size: 20, lr: 2.39e-03 +2022-05-03 13:04:27,381 INFO [train.py:715] (7/8) Epoch 0, batch 6150, loss[loss=0.2958, simple_loss=0.3232, pruned_loss=0.1342, over 4805.00 frames.], tot_loss[loss=0.2337, simple_loss=0.3076, pruned_loss=0.1363, over 972787.83 frames.], batch size: 21, lr: 2.38e-03 +2022-05-03 13:05:08,108 INFO [train.py:715] (7/8) Epoch 0, batch 6200, loss[loss=0.3288, simple_loss=0.3459, pruned_loss=0.1558, over 4829.00 frames.], tot_loss[loss=0.2444, simple_loss=0.3074, pruned_loss=0.1346, over 973194.07 frames.], batch size: 27, lr: 2.38e-03 +2022-05-03 13:05:48,919 INFO [train.py:715] (7/8) Epoch 0, batch 6250, loss[loss=0.2986, simple_loss=0.3146, pruned_loss=0.1413, over 4839.00 frames.], tot_loss[loss=0.2524, simple_loss=0.3067, pruned_loss=0.1331, over 972870.14 frames.], batch size: 30, lr: 2.37e-03 +2022-05-03 13:06:29,115 INFO [train.py:715] (7/8) Epoch 0, batch 6300, loss[loss=0.3022, simple_loss=0.3198, pruned_loss=0.1423, over 4924.00 frames.], tot_loss[loss=0.2583, simple_loss=0.3063, pruned_loss=0.1317, over 972724.74 frames.], batch size: 18, lr: 2.37e-03 +2022-05-03 13:07:09,800 INFO [train.py:715] (7/8) Epoch 0, batch 6350, loss[loss=0.2723, simple_loss=0.294, pruned_loss=0.1254, over 4977.00 frames.], tot_loss[loss=0.2653, simple_loss=0.3078, pruned_loss=0.1321, over 972733.08 frames.], batch size: 14, lr: 2.36e-03 +2022-05-03 13:07:50,724 INFO [train.py:715] (7/8) Epoch 0, batch 6400, loss[loss=0.3441, simple_loss=0.3502, pruned_loss=0.169, over 4944.00 frames.], tot_loss[loss=0.2711, simple_loss=0.3094, pruned_loss=0.1325, over 972666.52 frames.], batch size: 21, lr: 2.35e-03 +2022-05-03 13:08:30,737 INFO [train.py:715] (7/8) Epoch 0, batch 6450, loss[loss=0.2457, simple_loss=0.2861, pruned_loss=0.1026, over 4887.00 frames.], tot_loss[loss=0.2734, simple_loss=0.3086, pruned_loss=0.1316, over 972502.90 frames.], batch size: 19, lr: 2.35e-03 +2022-05-03 13:09:10,069 INFO [train.py:715] (7/8) Epoch 0, batch 6500, loss[loss=0.2563, simple_loss=0.2887, pruned_loss=0.112, over 4844.00 frames.], tot_loss[loss=0.2738, simple_loss=0.3073, pruned_loss=0.1299, over 972797.99 frames.], batch size: 26, lr: 2.34e-03 +2022-05-03 13:09:50,943 INFO [train.py:715] (7/8) Epoch 0, batch 6550, loss[loss=0.3265, simple_loss=0.3411, pruned_loss=0.1559, over 4928.00 frames.], tot_loss[loss=0.2769, simple_loss=0.3083, pruned_loss=0.1303, over 972392.71 frames.], batch size: 18, lr: 2.34e-03 +2022-05-03 13:10:31,734 INFO [train.py:715] (7/8) Epoch 0, batch 6600, loss[loss=0.2389, simple_loss=0.2769, pruned_loss=0.1005, over 4959.00 frames.], tot_loss[loss=0.2777, simple_loss=0.308, pruned_loss=0.1296, over 973567.53 frames.], batch size: 35, lr: 2.33e-03 +2022-05-03 13:11:11,225 INFO [train.py:715] (7/8) Epoch 0, batch 6650, loss[loss=0.2708, simple_loss=0.305, pruned_loss=0.1183, over 4968.00 frames.], tot_loss[loss=0.2783, simple_loss=0.308, pruned_loss=0.1289, over 972888.81 frames.], batch size: 24, lr: 2.33e-03 +2022-05-03 13:11:51,658 INFO [train.py:715] (7/8) Epoch 0, batch 6700, loss[loss=0.2867, simple_loss=0.3119, pruned_loss=0.1308, over 4973.00 frames.], tot_loss[loss=0.2781, simple_loss=0.3073, pruned_loss=0.128, over 972540.86 frames.], batch size: 24, lr: 2.32e-03 +2022-05-03 13:12:32,421 INFO [train.py:715] (7/8) Epoch 0, batch 6750, loss[loss=0.3944, simple_loss=0.3705, pruned_loss=0.2092, over 4828.00 frames.], tot_loss[loss=0.2786, simple_loss=0.3074, pruned_loss=0.1277, over 972424.34 frames.], batch size: 15, lr: 2.31e-03 +2022-05-03 13:13:12,501 INFO [train.py:715] (7/8) Epoch 0, batch 6800, loss[loss=0.3251, simple_loss=0.3343, pruned_loss=0.1579, over 4808.00 frames.], tot_loss[loss=0.2769, simple_loss=0.3064, pruned_loss=0.1259, over 972694.61 frames.], batch size: 21, lr: 2.31e-03 +2022-05-03 13:13:52,213 INFO [train.py:715] (7/8) Epoch 0, batch 6850, loss[loss=0.2862, simple_loss=0.3124, pruned_loss=0.13, over 4940.00 frames.], tot_loss[loss=0.2764, simple_loss=0.3061, pruned_loss=0.1251, over 972924.73 frames.], batch size: 23, lr: 2.30e-03 +2022-05-03 13:14:32,495 INFO [train.py:715] (7/8) Epoch 0, batch 6900, loss[loss=0.226, simple_loss=0.2689, pruned_loss=0.09158, over 4843.00 frames.], tot_loss[loss=0.2757, simple_loss=0.3056, pruned_loss=0.1242, over 972272.57 frames.], batch size: 30, lr: 2.30e-03 +2022-05-03 13:15:12,917 INFO [train.py:715] (7/8) Epoch 0, batch 6950, loss[loss=0.2995, simple_loss=0.322, pruned_loss=0.1385, over 4827.00 frames.], tot_loss[loss=0.2748, simple_loss=0.305, pruned_loss=0.1233, over 971477.83 frames.], batch size: 15, lr: 2.29e-03 +2022-05-03 13:15:53,037 INFO [train.py:715] (7/8) Epoch 0, batch 7000, loss[loss=0.2536, simple_loss=0.2845, pruned_loss=0.1113, over 4803.00 frames.], tot_loss[loss=0.2747, simple_loss=0.3055, pruned_loss=0.1227, over 972450.51 frames.], batch size: 21, lr: 2.29e-03 +2022-05-03 13:16:33,742 INFO [train.py:715] (7/8) Epoch 0, batch 7050, loss[loss=0.249, simple_loss=0.2857, pruned_loss=0.1061, over 4931.00 frames.], tot_loss[loss=0.2741, simple_loss=0.3045, pruned_loss=0.1224, over 972074.76 frames.], batch size: 18, lr: 2.28e-03 +2022-05-03 13:17:14,927 INFO [train.py:715] (7/8) Epoch 0, batch 7100, loss[loss=0.2466, simple_loss=0.2789, pruned_loss=0.1072, over 4741.00 frames.], tot_loss[loss=0.2749, simple_loss=0.3047, pruned_loss=0.123, over 971534.25 frames.], batch size: 16, lr: 2.28e-03 +2022-05-03 13:17:55,877 INFO [train.py:715] (7/8) Epoch 0, batch 7150, loss[loss=0.3337, simple_loss=0.3516, pruned_loss=0.1579, over 4876.00 frames.], tot_loss[loss=0.2766, simple_loss=0.3064, pruned_loss=0.1238, over 972082.33 frames.], batch size: 22, lr: 2.27e-03 +2022-05-03 13:18:35,511 INFO [train.py:715] (7/8) Epoch 0, batch 7200, loss[loss=0.2801, simple_loss=0.3091, pruned_loss=0.1255, over 4921.00 frames.], tot_loss[loss=0.2747, simple_loss=0.3056, pruned_loss=0.1222, over 972587.49 frames.], batch size: 39, lr: 2.27e-03 +2022-05-03 13:19:16,096 INFO [train.py:715] (7/8) Epoch 0, batch 7250, loss[loss=0.2171, simple_loss=0.265, pruned_loss=0.0846, over 4781.00 frames.], tot_loss[loss=0.2733, simple_loss=0.3048, pruned_loss=0.1212, over 972149.11 frames.], batch size: 14, lr: 2.26e-03 +2022-05-03 13:19:55,970 INFO [train.py:715] (7/8) Epoch 0, batch 7300, loss[loss=0.2874, simple_loss=0.3244, pruned_loss=0.1252, over 4896.00 frames.], tot_loss[loss=0.2727, simple_loss=0.3047, pruned_loss=0.1205, over 971291.85 frames.], batch size: 19, lr: 2.26e-03 +2022-05-03 13:20:36,068 INFO [train.py:715] (7/8) Epoch 0, batch 7350, loss[loss=0.2855, simple_loss=0.3311, pruned_loss=0.12, over 4821.00 frames.], tot_loss[loss=0.2723, simple_loss=0.3048, pruned_loss=0.12, over 970676.52 frames.], batch size: 25, lr: 2.25e-03 +2022-05-03 13:21:16,440 INFO [train.py:715] (7/8) Epoch 0, batch 7400, loss[loss=0.291, simple_loss=0.3261, pruned_loss=0.1279, over 4915.00 frames.], tot_loss[loss=0.2731, simple_loss=0.3053, pruned_loss=0.1205, over 971577.62 frames.], batch size: 29, lr: 2.24e-03 +2022-05-03 13:21:57,043 INFO [train.py:715] (7/8) Epoch 0, batch 7450, loss[loss=0.2552, simple_loss=0.2839, pruned_loss=0.1133, over 4862.00 frames.], tot_loss[loss=0.2727, simple_loss=0.3049, pruned_loss=0.1203, over 971628.37 frames.], batch size: 32, lr: 2.24e-03 +2022-05-03 13:22:36,848 INFO [train.py:715] (7/8) Epoch 0, batch 7500, loss[loss=0.2567, simple_loss=0.2927, pruned_loss=0.1104, over 4810.00 frames.], tot_loss[loss=0.2704, simple_loss=0.3035, pruned_loss=0.1187, over 972350.60 frames.], batch size: 14, lr: 2.23e-03 +2022-05-03 13:23:16,576 INFO [train.py:715] (7/8) Epoch 0, batch 7550, loss[loss=0.3303, simple_loss=0.347, pruned_loss=0.1568, over 4697.00 frames.], tot_loss[loss=0.2714, simple_loss=0.3043, pruned_loss=0.1193, over 972587.92 frames.], batch size: 15, lr: 2.23e-03 +2022-05-03 13:23:57,046 INFO [train.py:715] (7/8) Epoch 0, batch 7600, loss[loss=0.245, simple_loss=0.2896, pruned_loss=0.1002, over 4967.00 frames.], tot_loss[loss=0.2717, simple_loss=0.3049, pruned_loss=0.1193, over 972050.36 frames.], batch size: 15, lr: 2.22e-03 +2022-05-03 13:24:37,504 INFO [train.py:715] (7/8) Epoch 0, batch 7650, loss[loss=0.2618, simple_loss=0.3107, pruned_loss=0.1065, over 4976.00 frames.], tot_loss[loss=0.2716, simple_loss=0.3047, pruned_loss=0.1192, over 971667.70 frames.], batch size: 15, lr: 2.22e-03 +2022-05-03 13:25:16,998 INFO [train.py:715] (7/8) Epoch 0, batch 7700, loss[loss=0.2446, simple_loss=0.2827, pruned_loss=0.1033, over 4815.00 frames.], tot_loss[loss=0.2722, simple_loss=0.305, pruned_loss=0.1197, over 972295.89 frames.], batch size: 12, lr: 2.21e-03 +2022-05-03 13:25:57,326 INFO [train.py:715] (7/8) Epoch 0, batch 7750, loss[loss=0.2231, simple_loss=0.2737, pruned_loss=0.08626, over 4892.00 frames.], tot_loss[loss=0.2706, simple_loss=0.3042, pruned_loss=0.1185, over 972300.94 frames.], batch size: 19, lr: 2.21e-03 +2022-05-03 13:26:38,377 INFO [train.py:715] (7/8) Epoch 0, batch 7800, loss[loss=0.2761, simple_loss=0.3152, pruned_loss=0.1185, over 4936.00 frames.], tot_loss[loss=0.2707, simple_loss=0.3043, pruned_loss=0.1185, over 972845.26 frames.], batch size: 21, lr: 2.20e-03 +2022-05-03 13:27:18,720 INFO [train.py:715] (7/8) Epoch 0, batch 7850, loss[loss=0.339, simple_loss=0.3568, pruned_loss=0.1606, over 4756.00 frames.], tot_loss[loss=0.2716, simple_loss=0.3052, pruned_loss=0.119, over 972377.79 frames.], batch size: 14, lr: 2.20e-03 +2022-05-03 13:27:58,883 INFO [train.py:715] (7/8) Epoch 0, batch 7900, loss[loss=0.2029, simple_loss=0.259, pruned_loss=0.0734, over 4873.00 frames.], tot_loss[loss=0.2693, simple_loss=0.3037, pruned_loss=0.1174, over 972415.40 frames.], batch size: 16, lr: 2.19e-03 +2022-05-03 13:28:39,530 INFO [train.py:715] (7/8) Epoch 0, batch 7950, loss[loss=0.2488, simple_loss=0.2828, pruned_loss=0.1074, over 4853.00 frames.], tot_loss[loss=0.2674, simple_loss=0.3021, pruned_loss=0.1163, over 972436.78 frames.], batch size: 34, lr: 2.19e-03 +2022-05-03 13:29:22,247 INFO [train.py:715] (7/8) Epoch 0, batch 8000, loss[loss=0.3132, simple_loss=0.337, pruned_loss=0.1447, over 4943.00 frames.], tot_loss[loss=0.2681, simple_loss=0.3027, pruned_loss=0.1167, over 971956.78 frames.], batch size: 39, lr: 2.18e-03 +2022-05-03 13:30:02,111 INFO [train.py:715] (7/8) Epoch 0, batch 8050, loss[loss=0.3809, simple_loss=0.4012, pruned_loss=0.1803, over 4912.00 frames.], tot_loss[loss=0.2681, simple_loss=0.3029, pruned_loss=0.1166, over 971988.33 frames.], batch size: 19, lr: 2.18e-03 +2022-05-03 13:30:41,989 INFO [train.py:715] (7/8) Epoch 0, batch 8100, loss[loss=0.3043, simple_loss=0.3373, pruned_loss=0.1356, over 4813.00 frames.], tot_loss[loss=0.2702, simple_loss=0.3046, pruned_loss=0.1179, over 972547.07 frames.], batch size: 27, lr: 2.17e-03 +2022-05-03 13:31:23,000 INFO [train.py:715] (7/8) Epoch 0, batch 8150, loss[loss=0.2472, simple_loss=0.2957, pruned_loss=0.09929, over 4815.00 frames.], tot_loss[loss=0.2692, simple_loss=0.3039, pruned_loss=0.1172, over 972521.98 frames.], batch size: 27, lr: 2.17e-03 +2022-05-03 13:32:02,633 INFO [train.py:715] (7/8) Epoch 0, batch 8200, loss[loss=0.2621, simple_loss=0.2942, pruned_loss=0.115, over 4903.00 frames.], tot_loss[loss=0.2688, simple_loss=0.3036, pruned_loss=0.117, over 973344.73 frames.], batch size: 19, lr: 2.16e-03 +2022-05-03 13:32:42,147 INFO [train.py:715] (7/8) Epoch 0, batch 8250, loss[loss=0.2881, simple_loss=0.3326, pruned_loss=0.1218, over 4956.00 frames.], tot_loss[loss=0.2684, simple_loss=0.3038, pruned_loss=0.1165, over 973413.89 frames.], batch size: 15, lr: 2.16e-03 +2022-05-03 13:33:23,005 INFO [train.py:715] (7/8) Epoch 0, batch 8300, loss[loss=0.2209, simple_loss=0.2653, pruned_loss=0.08826, over 4964.00 frames.], tot_loss[loss=0.2663, simple_loss=0.3022, pruned_loss=0.1152, over 973431.51 frames.], batch size: 15, lr: 2.15e-03 +2022-05-03 13:34:03,432 INFO [train.py:715] (7/8) Epoch 0, batch 8350, loss[loss=0.2282, simple_loss=0.2698, pruned_loss=0.09331, over 4877.00 frames.], tot_loss[loss=0.2666, simple_loss=0.3029, pruned_loss=0.1151, over 972400.05 frames.], batch size: 16, lr: 2.15e-03 +2022-05-03 13:34:43,098 INFO [train.py:715] (7/8) Epoch 0, batch 8400, loss[loss=0.2392, simple_loss=0.2842, pruned_loss=0.09708, over 4921.00 frames.], tot_loss[loss=0.2664, simple_loss=0.3029, pruned_loss=0.115, over 972622.44 frames.], batch size: 18, lr: 2.15e-03 +2022-05-03 13:35:23,387 INFO [train.py:715] (7/8) Epoch 0, batch 8450, loss[loss=0.2636, simple_loss=0.2983, pruned_loss=0.1144, over 4867.00 frames.], tot_loss[loss=0.2657, simple_loss=0.3027, pruned_loss=0.1144, over 972643.82 frames.], batch size: 32, lr: 2.14e-03 +2022-05-03 13:36:04,644 INFO [train.py:715] (7/8) Epoch 0, batch 8500, loss[loss=0.2733, simple_loss=0.3078, pruned_loss=0.1194, over 4830.00 frames.], tot_loss[loss=0.2656, simple_loss=0.3021, pruned_loss=0.1145, over 972451.83 frames.], batch size: 26, lr: 2.14e-03 +2022-05-03 13:36:45,703 INFO [train.py:715] (7/8) Epoch 0, batch 8550, loss[loss=0.2435, simple_loss=0.2941, pruned_loss=0.09645, over 4867.00 frames.], tot_loss[loss=0.2659, simple_loss=0.3021, pruned_loss=0.1148, over 972824.87 frames.], batch size: 16, lr: 2.13e-03 +2022-05-03 13:37:25,358 INFO [train.py:715] (7/8) Epoch 0, batch 8600, loss[loss=0.2878, simple_loss=0.3173, pruned_loss=0.1292, over 4857.00 frames.], tot_loss[loss=0.2662, simple_loss=0.3028, pruned_loss=0.1148, over 972044.82 frames.], batch size: 38, lr: 2.13e-03 +2022-05-03 13:38:06,740 INFO [train.py:715] (7/8) Epoch 0, batch 8650, loss[loss=0.2788, simple_loss=0.322, pruned_loss=0.1178, over 4911.00 frames.], tot_loss[loss=0.2645, simple_loss=0.3018, pruned_loss=0.1136, over 972247.68 frames.], batch size: 23, lr: 2.12e-03 +2022-05-03 13:38:47,678 INFO [train.py:715] (7/8) Epoch 0, batch 8700, loss[loss=0.2299, simple_loss=0.2753, pruned_loss=0.09224, over 4773.00 frames.], tot_loss[loss=0.2642, simple_loss=0.3015, pruned_loss=0.1134, over 972348.78 frames.], batch size: 12, lr: 2.12e-03 +2022-05-03 13:39:27,766 INFO [train.py:715] (7/8) Epoch 0, batch 8750, loss[loss=0.2327, simple_loss=0.282, pruned_loss=0.09175, over 4910.00 frames.], tot_loss[loss=0.2616, simple_loss=0.2998, pruned_loss=0.1117, over 972481.08 frames.], batch size: 39, lr: 2.11e-03 +2022-05-03 13:40:08,243 INFO [train.py:715] (7/8) Epoch 0, batch 8800, loss[loss=0.2485, simple_loss=0.275, pruned_loss=0.111, over 4791.00 frames.], tot_loss[loss=0.2617, simple_loss=0.2999, pruned_loss=0.1118, over 972746.54 frames.], batch size: 12, lr: 2.11e-03 +2022-05-03 13:40:48,812 INFO [train.py:715] (7/8) Epoch 0, batch 8850, loss[loss=0.2916, simple_loss=0.3254, pruned_loss=0.1289, over 4864.00 frames.], tot_loss[loss=0.2622, simple_loss=0.3005, pruned_loss=0.112, over 972898.06 frames.], batch size: 30, lr: 2.10e-03 +2022-05-03 13:41:29,539 INFO [train.py:715] (7/8) Epoch 0, batch 8900, loss[loss=0.2859, simple_loss=0.2988, pruned_loss=0.1365, over 4784.00 frames.], tot_loss[loss=0.2601, simple_loss=0.2988, pruned_loss=0.1107, over 973014.06 frames.], batch size: 14, lr: 2.10e-03 +2022-05-03 13:42:09,376 INFO [train.py:715] (7/8) Epoch 0, batch 8950, loss[loss=0.2591, simple_loss=0.2986, pruned_loss=0.1097, over 4891.00 frames.], tot_loss[loss=0.2586, simple_loss=0.2977, pruned_loss=0.1097, over 973018.09 frames.], batch size: 19, lr: 2.10e-03 +2022-05-03 13:42:49,922 INFO [train.py:715] (7/8) Epoch 0, batch 9000, loss[loss=0.272, simple_loss=0.3215, pruned_loss=0.1113, over 4971.00 frames.], tot_loss[loss=0.2589, simple_loss=0.2981, pruned_loss=0.1099, over 973835.02 frames.], batch size: 39, lr: 2.09e-03 +2022-05-03 13:42:49,922 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 13:43:03,384 INFO [train.py:742] (7/8) Epoch 0, validation: loss=0.1592, simple_loss=0.2426, pruned_loss=0.03794, over 914524.00 frames. +2022-05-03 13:43:44,299 INFO [train.py:715] (7/8) Epoch 0, batch 9050, loss[loss=0.264, simple_loss=0.3054, pruned_loss=0.1113, over 4882.00 frames.], tot_loss[loss=0.2579, simple_loss=0.2971, pruned_loss=0.1094, over 973654.50 frames.], batch size: 22, lr: 2.09e-03 +2022-05-03 13:44:24,662 INFO [train.py:715] (7/8) Epoch 0, batch 9100, loss[loss=0.2412, simple_loss=0.2725, pruned_loss=0.105, over 4647.00 frames.], tot_loss[loss=0.259, simple_loss=0.2982, pruned_loss=0.1099, over 974194.72 frames.], batch size: 13, lr: 2.08e-03 +2022-05-03 13:45:04,790 INFO [train.py:715] (7/8) Epoch 0, batch 9150, loss[loss=0.2628, simple_loss=0.3009, pruned_loss=0.1124, over 4893.00 frames.], tot_loss[loss=0.2583, simple_loss=0.2977, pruned_loss=0.1094, over 974197.06 frames.], batch size: 19, lr: 2.08e-03 +2022-05-03 13:45:44,990 INFO [train.py:715] (7/8) Epoch 0, batch 9200, loss[loss=0.2092, simple_loss=0.2618, pruned_loss=0.07826, over 4742.00 frames.], tot_loss[loss=0.2603, simple_loss=0.2993, pruned_loss=0.1107, over 974332.92 frames.], batch size: 16, lr: 2.07e-03 +2022-05-03 13:46:26,074 INFO [train.py:715] (7/8) Epoch 0, batch 9250, loss[loss=0.2379, simple_loss=0.2903, pruned_loss=0.09275, over 4986.00 frames.], tot_loss[loss=0.2588, simple_loss=0.2982, pruned_loss=0.1097, over 974273.69 frames.], batch size: 25, lr: 2.07e-03 +2022-05-03 13:47:06,384 INFO [train.py:715] (7/8) Epoch 0, batch 9300, loss[loss=0.2776, simple_loss=0.3163, pruned_loss=0.1195, over 4863.00 frames.], tot_loss[loss=0.2584, simple_loss=0.2982, pruned_loss=0.1093, over 974022.11 frames.], batch size: 20, lr: 2.06e-03 +2022-05-03 13:47:45,670 INFO [train.py:715] (7/8) Epoch 0, batch 9350, loss[loss=0.2593, simple_loss=0.2854, pruned_loss=0.1166, over 4810.00 frames.], tot_loss[loss=0.26, simple_loss=0.2992, pruned_loss=0.1103, over 973968.99 frames.], batch size: 21, lr: 2.06e-03 +2022-05-03 13:48:27,107 INFO [train.py:715] (7/8) Epoch 0, batch 9400, loss[loss=0.2432, simple_loss=0.297, pruned_loss=0.09474, over 4827.00 frames.], tot_loss[loss=0.2586, simple_loss=0.2984, pruned_loss=0.1094, over 974038.77 frames.], batch size: 15, lr: 2.06e-03 +2022-05-03 13:49:07,600 INFO [train.py:715] (7/8) Epoch 0, batch 9450, loss[loss=0.275, simple_loss=0.3128, pruned_loss=0.1186, over 4775.00 frames.], tot_loss[loss=0.2595, simple_loss=0.2991, pruned_loss=0.11, over 973815.17 frames.], batch size: 12, lr: 2.05e-03 +2022-05-03 13:49:47,928 INFO [train.py:715] (7/8) Epoch 0, batch 9500, loss[loss=0.2557, simple_loss=0.2913, pruned_loss=0.11, over 4949.00 frames.], tot_loss[loss=0.2586, simple_loss=0.2987, pruned_loss=0.1093, over 973450.09 frames.], batch size: 14, lr: 2.05e-03 +2022-05-03 13:50:28,013 INFO [train.py:715] (7/8) Epoch 0, batch 9550, loss[loss=0.2426, simple_loss=0.2931, pruned_loss=0.09608, over 4771.00 frames.], tot_loss[loss=0.2583, simple_loss=0.2987, pruned_loss=0.1089, over 972697.52 frames.], batch size: 14, lr: 2.04e-03 +2022-05-03 13:51:08,464 INFO [train.py:715] (7/8) Epoch 0, batch 9600, loss[loss=0.2665, simple_loss=0.311, pruned_loss=0.111, over 4785.00 frames.], tot_loss[loss=0.2586, simple_loss=0.2994, pruned_loss=0.1089, over 972257.14 frames.], batch size: 17, lr: 2.04e-03 +2022-05-03 13:51:48,913 INFO [train.py:715] (7/8) Epoch 0, batch 9650, loss[loss=0.3087, simple_loss=0.3317, pruned_loss=0.1429, over 4959.00 frames.], tot_loss[loss=0.258, simple_loss=0.2988, pruned_loss=0.1086, over 971242.93 frames.], batch size: 29, lr: 2.03e-03 +2022-05-03 13:52:27,670 INFO [train.py:715] (7/8) Epoch 0, batch 9700, loss[loss=0.2832, simple_loss=0.3181, pruned_loss=0.1241, over 4764.00 frames.], tot_loss[loss=0.258, simple_loss=0.2989, pruned_loss=0.1085, over 971089.32 frames.], batch size: 16, lr: 2.03e-03 +2022-05-03 13:53:08,246 INFO [train.py:715] (7/8) Epoch 0, batch 9750, loss[loss=0.2558, simple_loss=0.295, pruned_loss=0.1083, over 4979.00 frames.], tot_loss[loss=0.2571, simple_loss=0.2984, pruned_loss=0.1079, over 971571.14 frames.], batch size: 24, lr: 2.03e-03 +2022-05-03 13:53:47,975 INFO [train.py:715] (7/8) Epoch 0, batch 9800, loss[loss=0.2169, simple_loss=0.2746, pruned_loss=0.07964, over 4936.00 frames.], tot_loss[loss=0.2575, simple_loss=0.2992, pruned_loss=0.1079, over 972533.38 frames.], batch size: 21, lr: 2.02e-03 +2022-05-03 13:54:27,878 INFO [train.py:715] (7/8) Epoch 0, batch 9850, loss[loss=0.2347, simple_loss=0.281, pruned_loss=0.09418, over 4892.00 frames.], tot_loss[loss=0.2565, simple_loss=0.2983, pruned_loss=0.1073, over 973063.18 frames.], batch size: 19, lr: 2.02e-03 +2022-05-03 13:55:07,636 INFO [train.py:715] (7/8) Epoch 0, batch 9900, loss[loss=0.2535, simple_loss=0.3053, pruned_loss=0.1008, over 4930.00 frames.], tot_loss[loss=0.258, simple_loss=0.2993, pruned_loss=0.1083, over 973041.56 frames.], batch size: 23, lr: 2.01e-03 +2022-05-03 13:55:47,712 INFO [train.py:715] (7/8) Epoch 0, batch 9950, loss[loss=0.2654, simple_loss=0.3138, pruned_loss=0.1085, over 4965.00 frames.], tot_loss[loss=0.2548, simple_loss=0.2971, pruned_loss=0.1062, over 973165.15 frames.], batch size: 24, lr: 2.01e-03 +2022-05-03 13:56:27,938 INFO [train.py:715] (7/8) Epoch 0, batch 10000, loss[loss=0.2343, simple_loss=0.2691, pruned_loss=0.09977, over 4951.00 frames.], tot_loss[loss=0.2546, simple_loss=0.2968, pruned_loss=0.1062, over 972943.47 frames.], batch size: 14, lr: 2.01e-03 +2022-05-03 13:57:07,310 INFO [train.py:715] (7/8) Epoch 0, batch 10050, loss[loss=0.2612, simple_loss=0.3077, pruned_loss=0.1073, over 4895.00 frames.], tot_loss[loss=0.2542, simple_loss=0.2963, pruned_loss=0.1061, over 972863.75 frames.], batch size: 17, lr: 2.00e-03 +2022-05-03 13:57:47,864 INFO [train.py:715] (7/8) Epoch 0, batch 10100, loss[loss=0.2472, simple_loss=0.2897, pruned_loss=0.1023, over 4976.00 frames.], tot_loss[loss=0.2534, simple_loss=0.2957, pruned_loss=0.1055, over 972521.60 frames.], batch size: 35, lr: 2.00e-03 +2022-05-03 13:58:27,715 INFO [train.py:715] (7/8) Epoch 0, batch 10150, loss[loss=0.3187, simple_loss=0.3338, pruned_loss=0.1518, over 4738.00 frames.], tot_loss[loss=0.2533, simple_loss=0.2955, pruned_loss=0.1056, over 972413.56 frames.], batch size: 16, lr: 1.99e-03 +2022-05-03 13:59:07,283 INFO [train.py:715] (7/8) Epoch 0, batch 10200, loss[loss=0.2431, simple_loss=0.2987, pruned_loss=0.09373, over 4924.00 frames.], tot_loss[loss=0.2534, simple_loss=0.2953, pruned_loss=0.1057, over 972767.16 frames.], batch size: 23, lr: 1.99e-03 +2022-05-03 13:59:47,208 INFO [train.py:715] (7/8) Epoch 0, batch 10250, loss[loss=0.2533, simple_loss=0.2965, pruned_loss=0.1051, over 4947.00 frames.], tot_loss[loss=0.2529, simple_loss=0.2956, pruned_loss=0.1051, over 973473.02 frames.], batch size: 23, lr: 1.99e-03 +2022-05-03 14:00:28,085 INFO [train.py:715] (7/8) Epoch 0, batch 10300, loss[loss=0.2212, simple_loss=0.2789, pruned_loss=0.08172, over 4816.00 frames.], tot_loss[loss=0.2556, simple_loss=0.2975, pruned_loss=0.1069, over 972067.07 frames.], batch size: 25, lr: 1.98e-03 +2022-05-03 14:01:08,334 INFO [train.py:715] (7/8) Epoch 0, batch 10350, loss[loss=0.3256, simple_loss=0.3451, pruned_loss=0.1531, over 4803.00 frames.], tot_loss[loss=0.2557, simple_loss=0.2974, pruned_loss=0.107, over 971987.61 frames.], batch size: 25, lr: 1.98e-03 +2022-05-03 14:01:47,791 INFO [train.py:715] (7/8) Epoch 0, batch 10400, loss[loss=0.2754, simple_loss=0.3092, pruned_loss=0.1208, over 4959.00 frames.], tot_loss[loss=0.2536, simple_loss=0.2956, pruned_loss=0.1058, over 972381.62 frames.], batch size: 35, lr: 1.97e-03 +2022-05-03 14:02:28,429 INFO [train.py:715] (7/8) Epoch 0, batch 10450, loss[loss=0.2341, simple_loss=0.2793, pruned_loss=0.09443, over 4851.00 frames.], tot_loss[loss=0.2531, simple_loss=0.2953, pruned_loss=0.1054, over 972077.06 frames.], batch size: 30, lr: 1.97e-03 +2022-05-03 14:03:09,170 INFO [train.py:715] (7/8) Epoch 0, batch 10500, loss[loss=0.3443, simple_loss=0.3502, pruned_loss=0.1692, over 4960.00 frames.], tot_loss[loss=0.2537, simple_loss=0.296, pruned_loss=0.1057, over 972983.29 frames.], batch size: 35, lr: 1.97e-03 +2022-05-03 14:03:48,865 INFO [train.py:715] (7/8) Epoch 0, batch 10550, loss[loss=0.3072, simple_loss=0.3424, pruned_loss=0.1359, over 4842.00 frames.], tot_loss[loss=0.2541, simple_loss=0.2965, pruned_loss=0.1058, over 972065.68 frames.], batch size: 20, lr: 1.96e-03 +2022-05-03 14:04:28,879 INFO [train.py:715] (7/8) Epoch 0, batch 10600, loss[loss=0.229, simple_loss=0.2787, pruned_loss=0.08966, over 4932.00 frames.], tot_loss[loss=0.2539, simple_loss=0.2965, pruned_loss=0.1057, over 971474.91 frames.], batch size: 18, lr: 1.96e-03 +2022-05-03 14:05:09,753 INFO [train.py:715] (7/8) Epoch 0, batch 10650, loss[loss=0.2743, simple_loss=0.3045, pruned_loss=0.1221, over 4754.00 frames.], tot_loss[loss=0.2533, simple_loss=0.296, pruned_loss=0.1054, over 971242.20 frames.], batch size: 19, lr: 1.96e-03 +2022-05-03 14:05:49,658 INFO [train.py:715] (7/8) Epoch 0, batch 10700, loss[loss=0.2044, simple_loss=0.2464, pruned_loss=0.08126, over 4839.00 frames.], tot_loss[loss=0.2539, simple_loss=0.2961, pruned_loss=0.1059, over 971191.48 frames.], batch size: 32, lr: 1.95e-03 +2022-05-03 14:06:29,548 INFO [train.py:715] (7/8) Epoch 0, batch 10750, loss[loss=0.2809, simple_loss=0.3156, pruned_loss=0.1231, over 4801.00 frames.], tot_loss[loss=0.2525, simple_loss=0.2952, pruned_loss=0.1049, over 972463.76 frames.], batch size: 21, lr: 1.95e-03 +2022-05-03 14:07:09,724 INFO [train.py:715] (7/8) Epoch 0, batch 10800, loss[loss=0.2429, simple_loss=0.2791, pruned_loss=0.1033, over 4835.00 frames.], tot_loss[loss=0.2505, simple_loss=0.2938, pruned_loss=0.1036, over 971789.54 frames.], batch size: 32, lr: 1.94e-03 +2022-05-03 14:07:50,569 INFO [train.py:715] (7/8) Epoch 0, batch 10850, loss[loss=0.2725, simple_loss=0.3149, pruned_loss=0.1151, over 4884.00 frames.], tot_loss[loss=0.2506, simple_loss=0.294, pruned_loss=0.1036, over 971785.07 frames.], batch size: 38, lr: 1.94e-03 +2022-05-03 14:08:30,099 INFO [train.py:715] (7/8) Epoch 0, batch 10900, loss[loss=0.2482, simple_loss=0.3024, pruned_loss=0.09701, over 4866.00 frames.], tot_loss[loss=0.249, simple_loss=0.2932, pruned_loss=0.1024, over 972518.67 frames.], batch size: 22, lr: 1.94e-03 +2022-05-03 14:09:10,042 INFO [train.py:715] (7/8) Epoch 0, batch 10950, loss[loss=0.2516, simple_loss=0.2913, pruned_loss=0.1059, over 4913.00 frames.], tot_loss[loss=0.2472, simple_loss=0.292, pruned_loss=0.1012, over 973147.30 frames.], batch size: 18, lr: 1.93e-03 +2022-05-03 14:09:50,821 INFO [train.py:715] (7/8) Epoch 0, batch 11000, loss[loss=0.285, simple_loss=0.3212, pruned_loss=0.1244, over 4990.00 frames.], tot_loss[loss=0.2462, simple_loss=0.2911, pruned_loss=0.1006, over 973127.64 frames.], batch size: 28, lr: 1.93e-03 +2022-05-03 14:10:31,096 INFO [train.py:715] (7/8) Epoch 0, batch 11050, loss[loss=0.312, simple_loss=0.3406, pruned_loss=0.1417, over 4737.00 frames.], tot_loss[loss=0.2463, simple_loss=0.2912, pruned_loss=0.1007, over 973321.85 frames.], batch size: 16, lr: 1.93e-03 +2022-05-03 14:11:11,145 INFO [train.py:715] (7/8) Epoch 0, batch 11100, loss[loss=0.2418, simple_loss=0.2735, pruned_loss=0.1051, over 4852.00 frames.], tot_loss[loss=0.2454, simple_loss=0.2898, pruned_loss=0.1005, over 973640.53 frames.], batch size: 15, lr: 1.92e-03 +2022-05-03 14:11:51,021 INFO [train.py:715] (7/8) Epoch 0, batch 11150, loss[loss=0.2586, simple_loss=0.2969, pruned_loss=0.1101, over 4968.00 frames.], tot_loss[loss=0.2446, simple_loss=0.2892, pruned_loss=0.1, over 973308.67 frames.], batch size: 35, lr: 1.92e-03 +2022-05-03 14:12:31,470 INFO [train.py:715] (7/8) Epoch 0, batch 11200, loss[loss=0.2611, simple_loss=0.31, pruned_loss=0.1061, over 4834.00 frames.], tot_loss[loss=0.2447, simple_loss=0.2894, pruned_loss=0.09997, over 972638.88 frames.], batch size: 15, lr: 1.92e-03 +2022-05-03 14:13:10,950 INFO [train.py:715] (7/8) Epoch 0, batch 11250, loss[loss=0.2111, simple_loss=0.2673, pruned_loss=0.0775, over 4911.00 frames.], tot_loss[loss=0.2448, simple_loss=0.2894, pruned_loss=0.1001, over 973132.88 frames.], batch size: 17, lr: 1.91e-03 +2022-05-03 14:13:51,036 INFO [train.py:715] (7/8) Epoch 0, batch 11300, loss[loss=0.1866, simple_loss=0.2329, pruned_loss=0.07015, over 4774.00 frames.], tot_loss[loss=0.2449, simple_loss=0.2892, pruned_loss=0.1003, over 973036.43 frames.], batch size: 14, lr: 1.91e-03 +2022-05-03 14:14:31,686 INFO [train.py:715] (7/8) Epoch 0, batch 11350, loss[loss=0.2147, simple_loss=0.2743, pruned_loss=0.07753, over 4966.00 frames.], tot_loss[loss=0.243, simple_loss=0.2879, pruned_loss=0.09905, over 972933.10 frames.], batch size: 15, lr: 1.90e-03 +2022-05-03 14:15:12,115 INFO [train.py:715] (7/8) Epoch 0, batch 11400, loss[loss=0.203, simple_loss=0.264, pruned_loss=0.07098, over 4810.00 frames.], tot_loss[loss=0.2445, simple_loss=0.2889, pruned_loss=0.1001, over 972089.21 frames.], batch size: 21, lr: 1.90e-03 +2022-05-03 14:15:51,357 INFO [train.py:715] (7/8) Epoch 0, batch 11450, loss[loss=0.2283, simple_loss=0.279, pruned_loss=0.08874, over 4986.00 frames.], tot_loss[loss=0.2442, simple_loss=0.2889, pruned_loss=0.09979, over 972264.31 frames.], batch size: 25, lr: 1.90e-03 +2022-05-03 14:16:32,013 INFO [train.py:715] (7/8) Epoch 0, batch 11500, loss[loss=0.2296, simple_loss=0.2666, pruned_loss=0.09624, over 4929.00 frames.], tot_loss[loss=0.2432, simple_loss=0.2884, pruned_loss=0.09897, over 972088.59 frames.], batch size: 18, lr: 1.89e-03 +2022-05-03 14:17:12,410 INFO [train.py:715] (7/8) Epoch 0, batch 11550, loss[loss=0.2872, simple_loss=0.3116, pruned_loss=0.1314, over 4946.00 frames.], tot_loss[loss=0.2439, simple_loss=0.289, pruned_loss=0.0994, over 972677.73 frames.], batch size: 21, lr: 1.89e-03 +2022-05-03 14:17:52,484 INFO [train.py:715] (7/8) Epoch 0, batch 11600, loss[loss=0.2417, simple_loss=0.2865, pruned_loss=0.09849, over 4854.00 frames.], tot_loss[loss=0.2442, simple_loss=0.2894, pruned_loss=0.09949, over 973288.10 frames.], batch size: 30, lr: 1.89e-03 +2022-05-03 14:18:32,578 INFO [train.py:715] (7/8) Epoch 0, batch 11650, loss[loss=0.2364, simple_loss=0.2832, pruned_loss=0.09482, over 4890.00 frames.], tot_loss[loss=0.243, simple_loss=0.2888, pruned_loss=0.09862, over 972455.92 frames.], batch size: 16, lr: 1.88e-03 +2022-05-03 14:19:13,490 INFO [train.py:715] (7/8) Epoch 0, batch 11700, loss[loss=0.2364, simple_loss=0.2871, pruned_loss=0.09286, over 4713.00 frames.], tot_loss[loss=0.2442, simple_loss=0.2894, pruned_loss=0.09955, over 970982.53 frames.], batch size: 15, lr: 1.88e-03 +2022-05-03 14:19:53,843 INFO [train.py:715] (7/8) Epoch 0, batch 11750, loss[loss=0.248, simple_loss=0.2942, pruned_loss=0.1009, over 4864.00 frames.], tot_loss[loss=0.2441, simple_loss=0.2895, pruned_loss=0.09936, over 972701.36 frames.], batch size: 20, lr: 1.88e-03 +2022-05-03 14:20:34,219 INFO [train.py:715] (7/8) Epoch 0, batch 11800, loss[loss=0.2051, simple_loss=0.266, pruned_loss=0.07207, over 4939.00 frames.], tot_loss[loss=0.2452, simple_loss=0.2902, pruned_loss=0.1001, over 972613.84 frames.], batch size: 24, lr: 1.87e-03 +2022-05-03 14:21:14,576 INFO [train.py:715] (7/8) Epoch 0, batch 11850, loss[loss=0.2737, simple_loss=0.3109, pruned_loss=0.1183, over 4971.00 frames.], tot_loss[loss=0.2457, simple_loss=0.2907, pruned_loss=0.1003, over 972562.89 frames.], batch size: 24, lr: 1.87e-03 +2022-05-03 14:21:55,679 INFO [train.py:715] (7/8) Epoch 0, batch 11900, loss[loss=0.252, simple_loss=0.3081, pruned_loss=0.098, over 4967.00 frames.], tot_loss[loss=0.2441, simple_loss=0.2895, pruned_loss=0.09931, over 973814.45 frames.], batch size: 24, lr: 1.87e-03 +2022-05-03 14:22:35,856 INFO [train.py:715] (7/8) Epoch 0, batch 11950, loss[loss=0.2338, simple_loss=0.2899, pruned_loss=0.08885, over 4871.00 frames.], tot_loss[loss=0.2427, simple_loss=0.2885, pruned_loss=0.0985, over 973715.42 frames.], batch size: 16, lr: 1.86e-03 +2022-05-03 14:23:15,974 INFO [train.py:715] (7/8) Epoch 0, batch 12000, loss[loss=0.2586, simple_loss=0.2903, pruned_loss=0.1135, over 4963.00 frames.], tot_loss[loss=0.2425, simple_loss=0.2883, pruned_loss=0.09832, over 973452.19 frames.], batch size: 35, lr: 1.86e-03 +2022-05-03 14:23:15,975 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 14:23:31,273 INFO [train.py:742] (7/8) Epoch 0, validation: loss=0.1516, simple_loss=0.2368, pruned_loss=0.03315, over 914524.00 frames. +2022-05-03 14:24:11,270 INFO [train.py:715] (7/8) Epoch 0, batch 12050, loss[loss=0.2292, simple_loss=0.2768, pruned_loss=0.09084, over 4749.00 frames.], tot_loss[loss=0.2407, simple_loss=0.2875, pruned_loss=0.09693, over 974243.28 frames.], batch size: 16, lr: 1.86e-03 +2022-05-03 14:24:51,295 INFO [train.py:715] (7/8) Epoch 0, batch 12100, loss[loss=0.2275, simple_loss=0.2816, pruned_loss=0.08671, over 4793.00 frames.], tot_loss[loss=0.2421, simple_loss=0.2884, pruned_loss=0.09786, over 973732.12 frames.], batch size: 17, lr: 1.85e-03 +2022-05-03 14:25:31,595 INFO [train.py:715] (7/8) Epoch 0, batch 12150, loss[loss=0.1909, simple_loss=0.2409, pruned_loss=0.07046, over 4815.00 frames.], tot_loss[loss=0.2421, simple_loss=0.2882, pruned_loss=0.09797, over 973762.67 frames.], batch size: 27, lr: 1.85e-03 +2022-05-03 14:26:11,158 INFO [train.py:715] (7/8) Epoch 0, batch 12200, loss[loss=0.2183, simple_loss=0.2748, pruned_loss=0.08088, over 4830.00 frames.], tot_loss[loss=0.2406, simple_loss=0.287, pruned_loss=0.09712, over 973692.36 frames.], batch size: 26, lr: 1.85e-03 +2022-05-03 14:26:51,062 INFO [train.py:715] (7/8) Epoch 0, batch 12250, loss[loss=0.2439, simple_loss=0.2917, pruned_loss=0.09803, over 4817.00 frames.], tot_loss[loss=0.2417, simple_loss=0.2879, pruned_loss=0.09774, over 973071.39 frames.], batch size: 26, lr: 1.84e-03 +2022-05-03 14:27:31,550 INFO [train.py:715] (7/8) Epoch 0, batch 12300, loss[loss=0.2282, simple_loss=0.2865, pruned_loss=0.08496, over 4833.00 frames.], tot_loss[loss=0.2428, simple_loss=0.289, pruned_loss=0.09833, over 972872.63 frames.], batch size: 13, lr: 1.84e-03 +2022-05-03 14:28:10,860 INFO [train.py:715] (7/8) Epoch 0, batch 12350, loss[loss=0.2467, simple_loss=0.2955, pruned_loss=0.09899, over 4896.00 frames.], tot_loss[loss=0.2443, simple_loss=0.2902, pruned_loss=0.09923, over 973038.28 frames.], batch size: 17, lr: 1.84e-03 +2022-05-03 14:28:50,839 INFO [train.py:715] (7/8) Epoch 0, batch 12400, loss[loss=0.1997, simple_loss=0.2561, pruned_loss=0.07162, over 4856.00 frames.], tot_loss[loss=0.2439, simple_loss=0.2897, pruned_loss=0.09899, over 973493.65 frames.], batch size: 20, lr: 1.83e-03 +2022-05-03 14:29:31,166 INFO [train.py:715] (7/8) Epoch 0, batch 12450, loss[loss=0.2241, simple_loss=0.2706, pruned_loss=0.08877, over 4768.00 frames.], tot_loss[loss=0.2438, simple_loss=0.2899, pruned_loss=0.09887, over 972578.79 frames.], batch size: 18, lr: 1.83e-03 +2022-05-03 14:30:11,386 INFO [train.py:715] (7/8) Epoch 0, batch 12500, loss[loss=0.2827, simple_loss=0.3146, pruned_loss=0.1254, over 4956.00 frames.], tot_loss[loss=0.2437, simple_loss=0.2896, pruned_loss=0.09895, over 972502.56 frames.], batch size: 15, lr: 1.83e-03 +2022-05-03 14:30:50,308 INFO [train.py:715] (7/8) Epoch 0, batch 12550, loss[loss=0.2762, simple_loss=0.3075, pruned_loss=0.1224, over 4692.00 frames.], tot_loss[loss=0.2437, simple_loss=0.2898, pruned_loss=0.09877, over 972325.52 frames.], batch size: 15, lr: 1.83e-03 +2022-05-03 14:31:30,337 INFO [train.py:715] (7/8) Epoch 0, batch 12600, loss[loss=0.25, simple_loss=0.2976, pruned_loss=0.1012, over 4886.00 frames.], tot_loss[loss=0.2424, simple_loss=0.2889, pruned_loss=0.09799, over 971657.07 frames.], batch size: 22, lr: 1.82e-03 +2022-05-03 14:32:11,361 INFO [train.py:715] (7/8) Epoch 0, batch 12650, loss[loss=0.2983, simple_loss=0.3211, pruned_loss=0.1377, over 4808.00 frames.], tot_loss[loss=0.2431, simple_loss=0.2893, pruned_loss=0.09847, over 972234.27 frames.], batch size: 21, lr: 1.82e-03 +2022-05-03 14:32:51,088 INFO [train.py:715] (7/8) Epoch 0, batch 12700, loss[loss=0.2828, simple_loss=0.3173, pruned_loss=0.1242, over 4857.00 frames.], tot_loss[loss=0.2427, simple_loss=0.2894, pruned_loss=0.09803, over 972028.67 frames.], batch size: 30, lr: 1.82e-03 +2022-05-03 14:33:30,732 INFO [train.py:715] (7/8) Epoch 0, batch 12750, loss[loss=0.238, simple_loss=0.3036, pruned_loss=0.08615, over 4779.00 frames.], tot_loss[loss=0.2405, simple_loss=0.2876, pruned_loss=0.09672, over 972558.32 frames.], batch size: 17, lr: 1.81e-03 +2022-05-03 14:34:11,173 INFO [train.py:715] (7/8) Epoch 0, batch 12800, loss[loss=0.2341, simple_loss=0.2865, pruned_loss=0.09083, over 4920.00 frames.], tot_loss[loss=0.2414, simple_loss=0.2886, pruned_loss=0.09706, over 972551.40 frames.], batch size: 29, lr: 1.81e-03 +2022-05-03 14:34:51,660 INFO [train.py:715] (7/8) Epoch 0, batch 12850, loss[loss=0.2602, simple_loss=0.291, pruned_loss=0.1147, over 4834.00 frames.], tot_loss[loss=0.2413, simple_loss=0.2883, pruned_loss=0.09714, over 972578.38 frames.], batch size: 15, lr: 1.81e-03 +2022-05-03 14:35:31,485 INFO [train.py:715] (7/8) Epoch 0, batch 12900, loss[loss=0.2447, simple_loss=0.303, pruned_loss=0.09322, over 4882.00 frames.], tot_loss[loss=0.2408, simple_loss=0.288, pruned_loss=0.09675, over 972900.93 frames.], batch size: 16, lr: 1.80e-03 +2022-05-03 14:36:11,741 INFO [train.py:715] (7/8) Epoch 0, batch 12950, loss[loss=0.3032, simple_loss=0.3258, pruned_loss=0.1403, over 4880.00 frames.], tot_loss[loss=0.2408, simple_loss=0.2881, pruned_loss=0.09674, over 972499.99 frames.], batch size: 39, lr: 1.80e-03 +2022-05-03 14:36:52,261 INFO [train.py:715] (7/8) Epoch 0, batch 13000, loss[loss=0.2297, simple_loss=0.2815, pruned_loss=0.08897, over 4884.00 frames.], tot_loss[loss=0.242, simple_loss=0.2889, pruned_loss=0.09755, over 971384.01 frames.], batch size: 16, lr: 1.80e-03 +2022-05-03 14:37:32,733 INFO [train.py:715] (7/8) Epoch 0, batch 13050, loss[loss=0.2416, simple_loss=0.28, pruned_loss=0.1016, over 4769.00 frames.], tot_loss[loss=0.2418, simple_loss=0.2888, pruned_loss=0.09744, over 971980.33 frames.], batch size: 18, lr: 1.79e-03 +2022-05-03 14:38:12,068 INFO [train.py:715] (7/8) Epoch 0, batch 13100, loss[loss=0.2127, simple_loss=0.2555, pruned_loss=0.0849, over 4947.00 frames.], tot_loss[loss=0.2419, simple_loss=0.2884, pruned_loss=0.09768, over 972277.52 frames.], batch size: 35, lr: 1.79e-03 +2022-05-03 14:38:52,512 INFO [train.py:715] (7/8) Epoch 0, batch 13150, loss[loss=0.2288, simple_loss=0.2773, pruned_loss=0.09013, over 4808.00 frames.], tot_loss[loss=0.2417, simple_loss=0.2885, pruned_loss=0.09743, over 973307.53 frames.], batch size: 21, lr: 1.79e-03 +2022-05-03 14:39:33,003 INFO [train.py:715] (7/8) Epoch 0, batch 13200, loss[loss=0.312, simple_loss=0.3456, pruned_loss=0.1392, over 4787.00 frames.], tot_loss[loss=0.2412, simple_loss=0.2882, pruned_loss=0.09714, over 972697.56 frames.], batch size: 17, lr: 1.79e-03 +2022-05-03 14:40:12,568 INFO [train.py:715] (7/8) Epoch 0, batch 13250, loss[loss=0.2221, simple_loss=0.2885, pruned_loss=0.07786, over 4692.00 frames.], tot_loss[loss=0.2406, simple_loss=0.2878, pruned_loss=0.0967, over 972591.51 frames.], batch size: 15, lr: 1.78e-03 +2022-05-03 14:40:52,443 INFO [train.py:715] (7/8) Epoch 0, batch 13300, loss[loss=0.2034, simple_loss=0.2583, pruned_loss=0.0742, over 4933.00 frames.], tot_loss[loss=0.2388, simple_loss=0.2865, pruned_loss=0.09555, over 972104.01 frames.], batch size: 23, lr: 1.78e-03 +2022-05-03 14:41:32,820 INFO [train.py:715] (7/8) Epoch 0, batch 13350, loss[loss=0.2631, simple_loss=0.3068, pruned_loss=0.1097, over 4730.00 frames.], tot_loss[loss=0.2403, simple_loss=0.2874, pruned_loss=0.09654, over 972172.70 frames.], batch size: 16, lr: 1.78e-03 +2022-05-03 14:42:13,152 INFO [train.py:715] (7/8) Epoch 0, batch 13400, loss[loss=0.2893, simple_loss=0.32, pruned_loss=0.1293, over 4904.00 frames.], tot_loss[loss=0.2397, simple_loss=0.2869, pruned_loss=0.09625, over 971413.19 frames.], batch size: 17, lr: 1.77e-03 +2022-05-03 14:42:52,946 INFO [train.py:715] (7/8) Epoch 0, batch 13450, loss[loss=0.247, simple_loss=0.293, pruned_loss=0.1005, over 4817.00 frames.], tot_loss[loss=0.2397, simple_loss=0.2871, pruned_loss=0.09616, over 971696.30 frames.], batch size: 15, lr: 1.77e-03 +2022-05-03 14:43:33,178 INFO [train.py:715] (7/8) Epoch 0, batch 13500, loss[loss=0.203, simple_loss=0.2569, pruned_loss=0.07459, over 4803.00 frames.], tot_loss[loss=0.2395, simple_loss=0.2869, pruned_loss=0.09601, over 972380.88 frames.], batch size: 21, lr: 1.77e-03 +2022-05-03 14:44:13,350 INFO [train.py:715] (7/8) Epoch 0, batch 13550, loss[loss=0.2529, simple_loss=0.296, pruned_loss=0.105, over 4818.00 frames.], tot_loss[loss=0.24, simple_loss=0.2872, pruned_loss=0.09641, over 971636.24 frames.], batch size: 25, lr: 1.77e-03 +2022-05-03 14:44:52,805 INFO [train.py:715] (7/8) Epoch 0, batch 13600, loss[loss=0.2644, simple_loss=0.3012, pruned_loss=0.1138, over 4700.00 frames.], tot_loss[loss=0.2394, simple_loss=0.2867, pruned_loss=0.09604, over 972071.24 frames.], batch size: 15, lr: 1.76e-03 +2022-05-03 14:45:32,771 INFO [train.py:715] (7/8) Epoch 0, batch 13650, loss[loss=0.1735, simple_loss=0.2358, pruned_loss=0.05563, over 4800.00 frames.], tot_loss[loss=0.2391, simple_loss=0.2864, pruned_loss=0.09589, over 972063.49 frames.], batch size: 21, lr: 1.76e-03 +2022-05-03 14:46:12,698 INFO [train.py:715] (7/8) Epoch 0, batch 13700, loss[loss=0.2607, simple_loss=0.2919, pruned_loss=0.1148, over 4828.00 frames.], tot_loss[loss=0.2386, simple_loss=0.2858, pruned_loss=0.09566, over 973064.66 frames.], batch size: 26, lr: 1.76e-03 +2022-05-03 14:46:52,707 INFO [train.py:715] (7/8) Epoch 0, batch 13750, loss[loss=0.2342, simple_loss=0.2763, pruned_loss=0.09606, over 4837.00 frames.], tot_loss[loss=0.2384, simple_loss=0.2856, pruned_loss=0.09557, over 973650.54 frames.], batch size: 12, lr: 1.75e-03 +2022-05-03 14:47:32,546 INFO [train.py:715] (7/8) Epoch 0, batch 13800, loss[loss=0.2274, simple_loss=0.2789, pruned_loss=0.08797, over 4760.00 frames.], tot_loss[loss=0.2372, simple_loss=0.2848, pruned_loss=0.09486, over 973641.67 frames.], batch size: 19, lr: 1.75e-03 +2022-05-03 14:48:12,874 INFO [train.py:715] (7/8) Epoch 0, batch 13850, loss[loss=0.2433, simple_loss=0.2903, pruned_loss=0.09817, over 4761.00 frames.], tot_loss[loss=0.2381, simple_loss=0.2855, pruned_loss=0.09533, over 974266.90 frames.], batch size: 18, lr: 1.75e-03 +2022-05-03 14:48:53,735 INFO [train.py:715] (7/8) Epoch 0, batch 13900, loss[loss=0.2517, simple_loss=0.3013, pruned_loss=0.101, over 4807.00 frames.], tot_loss[loss=0.237, simple_loss=0.2849, pruned_loss=0.09452, over 974133.41 frames.], batch size: 21, lr: 1.75e-03 +2022-05-03 14:49:33,783 INFO [train.py:715] (7/8) Epoch 0, batch 13950, loss[loss=0.2524, simple_loss=0.2998, pruned_loss=0.1025, over 4661.00 frames.], tot_loss[loss=0.2361, simple_loss=0.2845, pruned_loss=0.09381, over 973742.39 frames.], batch size: 13, lr: 1.74e-03 +2022-05-03 14:50:14,379 INFO [train.py:715] (7/8) Epoch 0, batch 14000, loss[loss=0.2349, simple_loss=0.2813, pruned_loss=0.09428, over 4803.00 frames.], tot_loss[loss=0.2374, simple_loss=0.2856, pruned_loss=0.09456, over 972801.80 frames.], batch size: 21, lr: 1.74e-03 +2022-05-03 14:50:55,239 INFO [train.py:715] (7/8) Epoch 0, batch 14050, loss[loss=0.284, simple_loss=0.3176, pruned_loss=0.1252, over 4841.00 frames.], tot_loss[loss=0.2364, simple_loss=0.2847, pruned_loss=0.09408, over 972849.96 frames.], batch size: 32, lr: 1.74e-03 +2022-05-03 14:51:35,694 INFO [train.py:715] (7/8) Epoch 0, batch 14100, loss[loss=0.2344, simple_loss=0.2831, pruned_loss=0.09287, over 4881.00 frames.], tot_loss[loss=0.235, simple_loss=0.2836, pruned_loss=0.0932, over 972776.79 frames.], batch size: 22, lr: 1.73e-03 +2022-05-03 14:52:16,209 INFO [train.py:715] (7/8) Epoch 0, batch 14150, loss[loss=0.2508, simple_loss=0.2942, pruned_loss=0.1037, over 4847.00 frames.], tot_loss[loss=0.2359, simple_loss=0.2843, pruned_loss=0.09374, over 972609.65 frames.], batch size: 26, lr: 1.73e-03 +2022-05-03 14:52:56,861 INFO [train.py:715] (7/8) Epoch 0, batch 14200, loss[loss=0.3033, simple_loss=0.3355, pruned_loss=0.1356, over 4830.00 frames.], tot_loss[loss=0.2366, simple_loss=0.2847, pruned_loss=0.09422, over 972435.08 frames.], batch size: 30, lr: 1.73e-03 +2022-05-03 14:53:37,708 INFO [train.py:715] (7/8) Epoch 0, batch 14250, loss[loss=0.1846, simple_loss=0.2388, pruned_loss=0.06518, over 4857.00 frames.], tot_loss[loss=0.2368, simple_loss=0.285, pruned_loss=0.09436, over 972316.55 frames.], batch size: 26, lr: 1.73e-03 +2022-05-03 14:54:18,412 INFO [train.py:715] (7/8) Epoch 0, batch 14300, loss[loss=0.3035, simple_loss=0.3249, pruned_loss=0.1411, over 4791.00 frames.], tot_loss[loss=0.2374, simple_loss=0.2854, pruned_loss=0.09472, over 972185.27 frames.], batch size: 14, lr: 1.72e-03 +2022-05-03 14:54:59,484 INFO [train.py:715] (7/8) Epoch 0, batch 14350, loss[loss=0.1887, simple_loss=0.2529, pruned_loss=0.06224, over 4834.00 frames.], tot_loss[loss=0.2355, simple_loss=0.2835, pruned_loss=0.09371, over 972427.46 frames.], batch size: 15, lr: 1.72e-03 +2022-05-03 14:55:40,717 INFO [train.py:715] (7/8) Epoch 0, batch 14400, loss[loss=0.2233, simple_loss=0.2807, pruned_loss=0.08301, over 4905.00 frames.], tot_loss[loss=0.2344, simple_loss=0.2828, pruned_loss=0.09303, over 972378.48 frames.], batch size: 17, lr: 1.72e-03 +2022-05-03 14:56:21,187 INFO [train.py:715] (7/8) Epoch 0, batch 14450, loss[loss=0.2178, simple_loss=0.2795, pruned_loss=0.07801, over 4750.00 frames.], tot_loss[loss=0.2331, simple_loss=0.2823, pruned_loss=0.09201, over 972708.68 frames.], batch size: 16, lr: 1.72e-03 +2022-05-03 14:57:01,540 INFO [train.py:715] (7/8) Epoch 0, batch 14500, loss[loss=0.2513, simple_loss=0.2998, pruned_loss=0.1014, over 4872.00 frames.], tot_loss[loss=0.2317, simple_loss=0.281, pruned_loss=0.09118, over 972561.79 frames.], batch size: 32, lr: 1.71e-03 +2022-05-03 14:57:42,207 INFO [train.py:715] (7/8) Epoch 0, batch 14550, loss[loss=0.2181, simple_loss=0.2751, pruned_loss=0.08058, over 4826.00 frames.], tot_loss[loss=0.2324, simple_loss=0.2812, pruned_loss=0.09175, over 973026.67 frames.], batch size: 27, lr: 1.71e-03 +2022-05-03 14:58:22,170 INFO [train.py:715] (7/8) Epoch 0, batch 14600, loss[loss=0.2063, simple_loss=0.2702, pruned_loss=0.07119, over 4880.00 frames.], tot_loss[loss=0.2326, simple_loss=0.2817, pruned_loss=0.09171, over 973282.55 frames.], batch size: 22, lr: 1.71e-03 +2022-05-03 14:59:01,457 INFO [train.py:715] (7/8) Epoch 0, batch 14650, loss[loss=0.2324, simple_loss=0.284, pruned_loss=0.09042, over 4707.00 frames.], tot_loss[loss=0.2327, simple_loss=0.2822, pruned_loss=0.09155, over 972727.46 frames.], batch size: 15, lr: 1.70e-03 +2022-05-03 14:59:41,813 INFO [train.py:715] (7/8) Epoch 0, batch 14700, loss[loss=0.2708, simple_loss=0.3074, pruned_loss=0.1171, over 4920.00 frames.], tot_loss[loss=0.2319, simple_loss=0.2817, pruned_loss=0.091, over 971447.70 frames.], batch size: 39, lr: 1.70e-03 +2022-05-03 15:00:22,079 INFO [train.py:715] (7/8) Epoch 0, batch 14750, loss[loss=0.2559, simple_loss=0.2851, pruned_loss=0.1133, over 4958.00 frames.], tot_loss[loss=0.232, simple_loss=0.2815, pruned_loss=0.09128, over 971544.89 frames.], batch size: 15, lr: 1.70e-03 +2022-05-03 15:01:02,120 INFO [train.py:715] (7/8) Epoch 0, batch 14800, loss[loss=0.2071, simple_loss=0.2592, pruned_loss=0.07746, over 4915.00 frames.], tot_loss[loss=0.2351, simple_loss=0.2836, pruned_loss=0.09337, over 972029.18 frames.], batch size: 29, lr: 1.70e-03 +2022-05-03 15:01:41,998 INFO [train.py:715] (7/8) Epoch 0, batch 14850, loss[loss=0.2361, simple_loss=0.2777, pruned_loss=0.09726, over 4880.00 frames.], tot_loss[loss=0.2328, simple_loss=0.2819, pruned_loss=0.09187, over 971456.73 frames.], batch size: 30, lr: 1.69e-03 +2022-05-03 15:02:22,717 INFO [train.py:715] (7/8) Epoch 0, batch 14900, loss[loss=0.2384, simple_loss=0.2837, pruned_loss=0.09652, over 4922.00 frames.], tot_loss[loss=0.2315, simple_loss=0.2814, pruned_loss=0.0908, over 971152.23 frames.], batch size: 29, lr: 1.69e-03 +2022-05-03 15:03:02,605 INFO [train.py:715] (7/8) Epoch 0, batch 14950, loss[loss=0.2479, simple_loss=0.3019, pruned_loss=0.09693, over 4803.00 frames.], tot_loss[loss=0.2305, simple_loss=0.2805, pruned_loss=0.09023, over 972067.48 frames.], batch size: 21, lr: 1.69e-03 +2022-05-03 15:03:42,037 INFO [train.py:715] (7/8) Epoch 0, batch 15000, loss[loss=0.2152, simple_loss=0.2683, pruned_loss=0.08111, over 4691.00 frames.], tot_loss[loss=0.2298, simple_loss=0.2799, pruned_loss=0.08983, over 971545.31 frames.], batch size: 15, lr: 1.69e-03 +2022-05-03 15:03:42,038 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 15:03:53,634 INFO [train.py:742] (7/8) Epoch 0, validation: loss=0.1454, simple_loss=0.2314, pruned_loss=0.02968, over 914524.00 frames. +2022-05-03 15:04:32,999 INFO [train.py:715] (7/8) Epoch 0, batch 15050, loss[loss=0.2186, simple_loss=0.2703, pruned_loss=0.08349, over 4775.00 frames.], tot_loss[loss=0.2292, simple_loss=0.2794, pruned_loss=0.08949, over 971649.21 frames.], batch size: 12, lr: 1.68e-03 +2022-05-03 15:05:13,562 INFO [train.py:715] (7/8) Epoch 0, batch 15100, loss[loss=0.2417, simple_loss=0.2845, pruned_loss=0.09946, over 4920.00 frames.], tot_loss[loss=0.2293, simple_loss=0.28, pruned_loss=0.08926, over 971944.19 frames.], batch size: 23, lr: 1.68e-03 +2022-05-03 15:05:53,899 INFO [train.py:715] (7/8) Epoch 0, batch 15150, loss[loss=0.2017, simple_loss=0.2587, pruned_loss=0.07231, over 4954.00 frames.], tot_loss[loss=0.2291, simple_loss=0.2798, pruned_loss=0.08923, over 972047.82 frames.], batch size: 29, lr: 1.68e-03 +2022-05-03 15:06:33,820 INFO [train.py:715] (7/8) Epoch 0, batch 15200, loss[loss=0.1997, simple_loss=0.262, pruned_loss=0.06876, over 4817.00 frames.], tot_loss[loss=0.2289, simple_loss=0.2796, pruned_loss=0.08911, over 971620.39 frames.], batch size: 27, lr: 1.68e-03 +2022-05-03 15:07:13,394 INFO [train.py:715] (7/8) Epoch 0, batch 15250, loss[loss=0.1752, simple_loss=0.2309, pruned_loss=0.05981, over 4868.00 frames.], tot_loss[loss=0.2304, simple_loss=0.2809, pruned_loss=0.08992, over 971397.21 frames.], batch size: 13, lr: 1.67e-03 +2022-05-03 15:07:53,257 INFO [train.py:715] (7/8) Epoch 0, batch 15300, loss[loss=0.19, simple_loss=0.2398, pruned_loss=0.07009, over 4747.00 frames.], tot_loss[loss=0.2293, simple_loss=0.2799, pruned_loss=0.08938, over 971516.51 frames.], batch size: 16, lr: 1.67e-03 +2022-05-03 15:08:33,613 INFO [train.py:715] (7/8) Epoch 0, batch 15350, loss[loss=0.2433, simple_loss=0.2915, pruned_loss=0.09749, over 4906.00 frames.], tot_loss[loss=0.2308, simple_loss=0.2811, pruned_loss=0.09021, over 972562.71 frames.], batch size: 39, lr: 1.67e-03 +2022-05-03 15:09:13,457 INFO [train.py:715] (7/8) Epoch 0, batch 15400, loss[loss=0.2381, simple_loss=0.2853, pruned_loss=0.09551, over 4946.00 frames.], tot_loss[loss=0.2313, simple_loss=0.2814, pruned_loss=0.09055, over 971760.86 frames.], batch size: 29, lr: 1.67e-03 +2022-05-03 15:09:53,913 INFO [train.py:715] (7/8) Epoch 0, batch 15450, loss[loss=0.2453, simple_loss=0.2751, pruned_loss=0.1077, over 4989.00 frames.], tot_loss[loss=0.2313, simple_loss=0.2814, pruned_loss=0.09064, over 971119.60 frames.], batch size: 14, lr: 1.66e-03 +2022-05-03 15:10:33,374 INFO [train.py:715] (7/8) Epoch 0, batch 15500, loss[loss=0.2017, simple_loss=0.2429, pruned_loss=0.08028, over 4820.00 frames.], tot_loss[loss=0.2314, simple_loss=0.2814, pruned_loss=0.09074, over 971789.99 frames.], batch size: 12, lr: 1.66e-03 +2022-05-03 15:11:12,571 INFO [train.py:715] (7/8) Epoch 0, batch 15550, loss[loss=0.2293, simple_loss=0.2695, pruned_loss=0.09452, over 4919.00 frames.], tot_loss[loss=0.2308, simple_loss=0.281, pruned_loss=0.09029, over 971606.27 frames.], batch size: 29, lr: 1.66e-03 +2022-05-03 15:11:52,063 INFO [train.py:715] (7/8) Epoch 0, batch 15600, loss[loss=0.2432, simple_loss=0.2949, pruned_loss=0.09571, over 4806.00 frames.], tot_loss[loss=0.2294, simple_loss=0.2799, pruned_loss=0.08944, over 971383.32 frames.], batch size: 21, lr: 1.66e-03 +2022-05-03 15:12:31,510 INFO [train.py:715] (7/8) Epoch 0, batch 15650, loss[loss=0.2027, simple_loss=0.2719, pruned_loss=0.06674, over 4687.00 frames.], tot_loss[loss=0.2303, simple_loss=0.2809, pruned_loss=0.08982, over 971111.86 frames.], batch size: 15, lr: 1.65e-03 +2022-05-03 15:13:11,301 INFO [train.py:715] (7/8) Epoch 0, batch 15700, loss[loss=0.2502, simple_loss=0.2913, pruned_loss=0.1045, over 4803.00 frames.], tot_loss[loss=0.231, simple_loss=0.2816, pruned_loss=0.09022, over 971989.93 frames.], batch size: 24, lr: 1.65e-03 +2022-05-03 15:13:50,903 INFO [train.py:715] (7/8) Epoch 0, batch 15750, loss[loss=0.2085, simple_loss=0.2597, pruned_loss=0.07867, over 4965.00 frames.], tot_loss[loss=0.2311, simple_loss=0.2817, pruned_loss=0.09021, over 972159.43 frames.], batch size: 24, lr: 1.65e-03 +2022-05-03 15:14:30,847 INFO [train.py:715] (7/8) Epoch 0, batch 15800, loss[loss=0.2192, simple_loss=0.2731, pruned_loss=0.08266, over 4781.00 frames.], tot_loss[loss=0.2322, simple_loss=0.2824, pruned_loss=0.09102, over 972248.33 frames.], batch size: 18, lr: 1.65e-03 +2022-05-03 15:15:10,665 INFO [train.py:715] (7/8) Epoch 0, batch 15850, loss[loss=0.273, simple_loss=0.3051, pruned_loss=0.1205, over 4808.00 frames.], tot_loss[loss=0.2314, simple_loss=0.2821, pruned_loss=0.09035, over 972873.23 frames.], batch size: 17, lr: 1.65e-03 +2022-05-03 15:15:50,242 INFO [train.py:715] (7/8) Epoch 0, batch 15900, loss[loss=0.2321, simple_loss=0.2954, pruned_loss=0.08443, over 4844.00 frames.], tot_loss[loss=0.2311, simple_loss=0.2819, pruned_loss=0.09019, over 972632.73 frames.], batch size: 15, lr: 1.64e-03 +2022-05-03 15:16:30,477 INFO [train.py:715] (7/8) Epoch 0, batch 15950, loss[loss=0.2561, simple_loss=0.2981, pruned_loss=0.107, over 4910.00 frames.], tot_loss[loss=0.2319, simple_loss=0.2825, pruned_loss=0.09061, over 972625.20 frames.], batch size: 19, lr: 1.64e-03 +2022-05-03 15:17:12,825 INFO [train.py:715] (7/8) Epoch 0, batch 16000, loss[loss=0.189, simple_loss=0.26, pruned_loss=0.05899, over 4823.00 frames.], tot_loss[loss=0.2304, simple_loss=0.2811, pruned_loss=0.08987, over 972596.84 frames.], batch size: 27, lr: 1.64e-03 +2022-05-03 15:17:52,708 INFO [train.py:715] (7/8) Epoch 0, batch 16050, loss[loss=0.2582, simple_loss=0.3007, pruned_loss=0.1078, over 4978.00 frames.], tot_loss[loss=0.2313, simple_loss=0.282, pruned_loss=0.09033, over 972394.42 frames.], batch size: 15, lr: 1.64e-03 +2022-05-03 15:18:33,258 INFO [train.py:715] (7/8) Epoch 0, batch 16100, loss[loss=0.2145, simple_loss=0.2734, pruned_loss=0.07776, over 4904.00 frames.], tot_loss[loss=0.2312, simple_loss=0.2818, pruned_loss=0.09028, over 972428.26 frames.], batch size: 23, lr: 1.63e-03 +2022-05-03 15:19:13,432 INFO [train.py:715] (7/8) Epoch 0, batch 16150, loss[loss=0.2672, simple_loss=0.3055, pruned_loss=0.1145, over 4798.00 frames.], tot_loss[loss=0.2327, simple_loss=0.2832, pruned_loss=0.09106, over 972530.92 frames.], batch size: 21, lr: 1.63e-03 +2022-05-03 15:19:52,899 INFO [train.py:715] (7/8) Epoch 0, batch 16200, loss[loss=0.2261, simple_loss=0.273, pruned_loss=0.08957, over 4804.00 frames.], tot_loss[loss=0.2307, simple_loss=0.282, pruned_loss=0.08972, over 972454.05 frames.], batch size: 21, lr: 1.63e-03 +2022-05-03 15:20:32,322 INFO [train.py:715] (7/8) Epoch 0, batch 16250, loss[loss=0.2151, simple_loss=0.2711, pruned_loss=0.07962, over 4881.00 frames.], tot_loss[loss=0.2307, simple_loss=0.2816, pruned_loss=0.0899, over 972187.09 frames.], batch size: 16, lr: 1.63e-03 +2022-05-03 15:21:12,243 INFO [train.py:715] (7/8) Epoch 0, batch 16300, loss[loss=0.2036, simple_loss=0.2626, pruned_loss=0.07223, over 4772.00 frames.], tot_loss[loss=0.2301, simple_loss=0.2812, pruned_loss=0.08946, over 972350.67 frames.], batch size: 18, lr: 1.62e-03 +2022-05-03 15:21:51,673 INFO [train.py:715] (7/8) Epoch 0, batch 16350, loss[loss=0.282, simple_loss=0.3181, pruned_loss=0.1229, over 4828.00 frames.], tot_loss[loss=0.2304, simple_loss=0.2816, pruned_loss=0.08962, over 972875.27 frames.], batch size: 15, lr: 1.62e-03 +2022-05-03 15:22:31,101 INFO [train.py:715] (7/8) Epoch 0, batch 16400, loss[loss=0.2336, simple_loss=0.2845, pruned_loss=0.09131, over 4954.00 frames.], tot_loss[loss=0.2293, simple_loss=0.2804, pruned_loss=0.08904, over 972751.34 frames.], batch size: 15, lr: 1.62e-03 +2022-05-03 15:23:11,048 INFO [train.py:715] (7/8) Epoch 0, batch 16450, loss[loss=0.2582, simple_loss=0.302, pruned_loss=0.1073, over 4979.00 frames.], tot_loss[loss=0.2297, simple_loss=0.2806, pruned_loss=0.08937, over 973400.42 frames.], batch size: 24, lr: 1.62e-03 +2022-05-03 15:23:51,585 INFO [train.py:715] (7/8) Epoch 0, batch 16500, loss[loss=0.2214, simple_loss=0.2917, pruned_loss=0.07553, over 4881.00 frames.], tot_loss[loss=0.2285, simple_loss=0.2797, pruned_loss=0.08866, over 972321.10 frames.], batch size: 19, lr: 1.62e-03 +2022-05-03 15:24:31,535 INFO [train.py:715] (7/8) Epoch 0, batch 16550, loss[loss=0.3204, simple_loss=0.3434, pruned_loss=0.1487, over 4832.00 frames.], tot_loss[loss=0.2283, simple_loss=0.2797, pruned_loss=0.08852, over 973012.67 frames.], batch size: 15, lr: 1.61e-03 +2022-05-03 15:25:11,228 INFO [train.py:715] (7/8) Epoch 0, batch 16600, loss[loss=0.2144, simple_loss=0.2657, pruned_loss=0.0816, over 4829.00 frames.], tot_loss[loss=0.2259, simple_loss=0.2773, pruned_loss=0.08732, over 972029.47 frames.], batch size: 15, lr: 1.61e-03 +2022-05-03 15:25:50,679 INFO [train.py:715] (7/8) Epoch 0, batch 16650, loss[loss=0.2468, simple_loss=0.2981, pruned_loss=0.09774, over 4783.00 frames.], tot_loss[loss=0.2251, simple_loss=0.2768, pruned_loss=0.08675, over 972229.30 frames.], batch size: 18, lr: 1.61e-03 +2022-05-03 15:26:30,538 INFO [train.py:715] (7/8) Epoch 0, batch 16700, loss[loss=0.3149, simple_loss=0.3299, pruned_loss=0.15, over 4766.00 frames.], tot_loss[loss=0.2254, simple_loss=0.2769, pruned_loss=0.08699, over 971987.29 frames.], batch size: 14, lr: 1.61e-03 +2022-05-03 15:27:09,635 INFO [train.py:715] (7/8) Epoch 0, batch 16750, loss[loss=0.2717, simple_loss=0.3174, pruned_loss=0.113, over 4838.00 frames.], tot_loss[loss=0.2267, simple_loss=0.2781, pruned_loss=0.08762, over 971987.15 frames.], batch size: 15, lr: 1.60e-03 +2022-05-03 15:27:48,782 INFO [train.py:715] (7/8) Epoch 0, batch 16800, loss[loss=0.2053, simple_loss=0.26, pruned_loss=0.07526, over 4757.00 frames.], tot_loss[loss=0.2269, simple_loss=0.2783, pruned_loss=0.08772, over 971835.60 frames.], batch size: 19, lr: 1.60e-03 +2022-05-03 15:28:28,414 INFO [train.py:715] (7/8) Epoch 0, batch 16850, loss[loss=0.256, simple_loss=0.2979, pruned_loss=0.107, over 4882.00 frames.], tot_loss[loss=0.2274, simple_loss=0.2787, pruned_loss=0.08801, over 972382.10 frames.], batch size: 16, lr: 1.60e-03 +2022-05-03 15:29:08,023 INFO [train.py:715] (7/8) Epoch 0, batch 16900, loss[loss=0.2443, simple_loss=0.2919, pruned_loss=0.09842, over 4759.00 frames.], tot_loss[loss=0.2259, simple_loss=0.2778, pruned_loss=0.08703, over 972650.40 frames.], batch size: 16, lr: 1.60e-03 +2022-05-03 15:29:47,267 INFO [train.py:715] (7/8) Epoch 0, batch 16950, loss[loss=0.19, simple_loss=0.2529, pruned_loss=0.06356, over 4783.00 frames.], tot_loss[loss=0.2241, simple_loss=0.2765, pruned_loss=0.08588, over 972503.61 frames.], batch size: 12, lr: 1.60e-03 +2022-05-03 15:30:27,234 INFO [train.py:715] (7/8) Epoch 0, batch 17000, loss[loss=0.2094, simple_loss=0.2696, pruned_loss=0.07456, over 4933.00 frames.], tot_loss[loss=0.2239, simple_loss=0.2762, pruned_loss=0.08583, over 972160.83 frames.], batch size: 29, lr: 1.59e-03 +2022-05-03 15:31:07,732 INFO [train.py:715] (7/8) Epoch 0, batch 17050, loss[loss=0.1881, simple_loss=0.2565, pruned_loss=0.05987, over 4806.00 frames.], tot_loss[loss=0.2261, simple_loss=0.2779, pruned_loss=0.08713, over 972736.42 frames.], batch size: 25, lr: 1.59e-03 +2022-05-03 15:31:47,486 INFO [train.py:715] (7/8) Epoch 0, batch 17100, loss[loss=0.2079, simple_loss=0.2526, pruned_loss=0.08163, over 4688.00 frames.], tot_loss[loss=0.2262, simple_loss=0.2779, pruned_loss=0.08728, over 972700.51 frames.], batch size: 15, lr: 1.59e-03 +2022-05-03 15:32:26,655 INFO [train.py:715] (7/8) Epoch 0, batch 17150, loss[loss=0.2061, simple_loss=0.2614, pruned_loss=0.07543, over 4754.00 frames.], tot_loss[loss=0.2261, simple_loss=0.2779, pruned_loss=0.08714, over 973163.16 frames.], batch size: 18, lr: 1.59e-03 +2022-05-03 15:33:06,904 INFO [train.py:715] (7/8) Epoch 0, batch 17200, loss[loss=0.2754, simple_loss=0.3161, pruned_loss=0.1174, over 4928.00 frames.], tot_loss[loss=0.2273, simple_loss=0.2785, pruned_loss=0.08804, over 972884.69 frames.], batch size: 21, lr: 1.58e-03 +2022-05-03 15:33:46,682 INFO [train.py:715] (7/8) Epoch 0, batch 17250, loss[loss=0.2159, simple_loss=0.2836, pruned_loss=0.07405, over 4970.00 frames.], tot_loss[loss=0.2273, simple_loss=0.2787, pruned_loss=0.08797, over 971724.51 frames.], batch size: 24, lr: 1.58e-03 +2022-05-03 15:34:26,238 INFO [train.py:715] (7/8) Epoch 0, batch 17300, loss[loss=0.1931, simple_loss=0.2532, pruned_loss=0.06649, over 4985.00 frames.], tot_loss[loss=0.228, simple_loss=0.2793, pruned_loss=0.08834, over 971955.62 frames.], batch size: 28, lr: 1.58e-03 +2022-05-03 15:35:06,294 INFO [train.py:715] (7/8) Epoch 0, batch 17350, loss[loss=0.1958, simple_loss=0.2628, pruned_loss=0.06436, over 4971.00 frames.], tot_loss[loss=0.2252, simple_loss=0.2774, pruned_loss=0.08647, over 972638.97 frames.], batch size: 28, lr: 1.58e-03 +2022-05-03 15:35:46,526 INFO [train.py:715] (7/8) Epoch 0, batch 17400, loss[loss=0.2249, simple_loss=0.2729, pruned_loss=0.08846, over 4977.00 frames.], tot_loss[loss=0.2246, simple_loss=0.2768, pruned_loss=0.08623, over 972024.80 frames.], batch size: 28, lr: 1.58e-03 +2022-05-03 15:36:26,423 INFO [train.py:715] (7/8) Epoch 0, batch 17450, loss[loss=0.2242, simple_loss=0.2833, pruned_loss=0.08256, over 4817.00 frames.], tot_loss[loss=0.2241, simple_loss=0.2759, pruned_loss=0.08613, over 971682.46 frames.], batch size: 26, lr: 1.57e-03 +2022-05-03 15:37:07,035 INFO [train.py:715] (7/8) Epoch 0, batch 17500, loss[loss=0.2565, simple_loss=0.3027, pruned_loss=0.1051, over 4883.00 frames.], tot_loss[loss=0.2233, simple_loss=0.2753, pruned_loss=0.08572, over 970833.15 frames.], batch size: 16, lr: 1.57e-03 +2022-05-03 15:37:47,464 INFO [train.py:715] (7/8) Epoch 0, batch 17550, loss[loss=0.2114, simple_loss=0.272, pruned_loss=0.07538, over 4836.00 frames.], tot_loss[loss=0.2239, simple_loss=0.2758, pruned_loss=0.08595, over 970642.09 frames.], batch size: 30, lr: 1.57e-03 +2022-05-03 15:38:27,022 INFO [train.py:715] (7/8) Epoch 0, batch 17600, loss[loss=0.208, simple_loss=0.2667, pruned_loss=0.07468, over 4845.00 frames.], tot_loss[loss=0.2244, simple_loss=0.2764, pruned_loss=0.08622, over 970796.66 frames.], batch size: 30, lr: 1.57e-03 +2022-05-03 15:39:06,947 INFO [train.py:715] (7/8) Epoch 0, batch 17650, loss[loss=0.2475, simple_loss=0.2926, pruned_loss=0.1012, over 4816.00 frames.], tot_loss[loss=0.2253, simple_loss=0.277, pruned_loss=0.08676, over 971230.90 frames.], batch size: 21, lr: 1.57e-03 +2022-05-03 15:39:47,482 INFO [train.py:715] (7/8) Epoch 0, batch 17700, loss[loss=0.2101, simple_loss=0.265, pruned_loss=0.07754, over 4812.00 frames.], tot_loss[loss=0.2227, simple_loss=0.2748, pruned_loss=0.08526, over 970595.88 frames.], batch size: 25, lr: 1.56e-03 +2022-05-03 15:40:27,382 INFO [train.py:715] (7/8) Epoch 0, batch 17750, loss[loss=0.1869, simple_loss=0.2568, pruned_loss=0.05851, over 4986.00 frames.], tot_loss[loss=0.2238, simple_loss=0.2761, pruned_loss=0.08581, over 971330.16 frames.], batch size: 26, lr: 1.56e-03 +2022-05-03 15:41:07,057 INFO [train.py:715] (7/8) Epoch 0, batch 17800, loss[loss=0.2336, simple_loss=0.2872, pruned_loss=0.08997, over 4801.00 frames.], tot_loss[loss=0.2237, simple_loss=0.2756, pruned_loss=0.08591, over 971810.87 frames.], batch size: 21, lr: 1.56e-03 +2022-05-03 15:41:47,858 INFO [train.py:715] (7/8) Epoch 0, batch 17850, loss[loss=0.2193, simple_loss=0.2842, pruned_loss=0.07721, over 4869.00 frames.], tot_loss[loss=0.225, simple_loss=0.2767, pruned_loss=0.08661, over 971905.21 frames.], batch size: 22, lr: 1.56e-03 +2022-05-03 15:42:28,484 INFO [train.py:715] (7/8) Epoch 0, batch 17900, loss[loss=0.2625, simple_loss=0.2938, pruned_loss=0.1155, over 4926.00 frames.], tot_loss[loss=0.2255, simple_loss=0.2772, pruned_loss=0.08691, over 972509.60 frames.], batch size: 29, lr: 1.56e-03 +2022-05-03 15:43:07,991 INFO [train.py:715] (7/8) Epoch 0, batch 17950, loss[loss=0.2519, simple_loss=0.3025, pruned_loss=0.1006, over 4918.00 frames.], tot_loss[loss=0.2258, simple_loss=0.2774, pruned_loss=0.08711, over 972827.64 frames.], batch size: 23, lr: 1.55e-03 +2022-05-03 15:43:48,225 INFO [train.py:715] (7/8) Epoch 0, batch 18000, loss[loss=0.2117, simple_loss=0.2722, pruned_loss=0.07556, over 4761.00 frames.], tot_loss[loss=0.2252, simple_loss=0.2771, pruned_loss=0.08669, over 971772.08 frames.], batch size: 18, lr: 1.55e-03 +2022-05-03 15:43:48,226 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 15:43:57,827 INFO [train.py:742] (7/8) Epoch 0, validation: loss=0.141, simple_loss=0.228, pruned_loss=0.02706, over 914524.00 frames. +2022-05-03 15:44:38,094 INFO [train.py:715] (7/8) Epoch 0, batch 18050, loss[loss=0.1945, simple_loss=0.2538, pruned_loss=0.06761, over 4985.00 frames.], tot_loss[loss=0.2248, simple_loss=0.2767, pruned_loss=0.08643, over 972617.88 frames.], batch size: 35, lr: 1.55e-03 +2022-05-03 15:45:18,349 INFO [train.py:715] (7/8) Epoch 0, batch 18100, loss[loss=0.1843, simple_loss=0.258, pruned_loss=0.05531, over 4979.00 frames.], tot_loss[loss=0.2234, simple_loss=0.2758, pruned_loss=0.08555, over 972206.74 frames.], batch size: 28, lr: 1.55e-03 +2022-05-03 15:45:58,158 INFO [train.py:715] (7/8) Epoch 0, batch 18150, loss[loss=0.2089, simple_loss=0.2765, pruned_loss=0.07063, over 4864.00 frames.], tot_loss[loss=0.2213, simple_loss=0.2745, pruned_loss=0.08407, over 971440.87 frames.], batch size: 22, lr: 1.55e-03 +2022-05-03 15:46:37,573 INFO [train.py:715] (7/8) Epoch 0, batch 18200, loss[loss=0.2499, simple_loss=0.2955, pruned_loss=0.1022, over 4970.00 frames.], tot_loss[loss=0.2218, simple_loss=0.2748, pruned_loss=0.0844, over 971467.93 frames.], batch size: 24, lr: 1.54e-03 +2022-05-03 15:47:17,747 INFO [train.py:715] (7/8) Epoch 0, batch 18250, loss[loss=0.1925, simple_loss=0.2453, pruned_loss=0.0698, over 4834.00 frames.], tot_loss[loss=0.223, simple_loss=0.2752, pruned_loss=0.08539, over 971742.38 frames.], batch size: 12, lr: 1.54e-03 +2022-05-03 15:47:59,027 INFO [train.py:715] (7/8) Epoch 0, batch 18300, loss[loss=0.1809, simple_loss=0.2312, pruned_loss=0.0653, over 4814.00 frames.], tot_loss[loss=0.222, simple_loss=0.2746, pruned_loss=0.08474, over 972654.22 frames.], batch size: 13, lr: 1.54e-03 +2022-05-03 15:48:38,802 INFO [train.py:715] (7/8) Epoch 0, batch 18350, loss[loss=0.2508, simple_loss=0.3072, pruned_loss=0.09721, over 4978.00 frames.], tot_loss[loss=0.2227, simple_loss=0.2752, pruned_loss=0.08507, over 972472.09 frames.], batch size: 24, lr: 1.54e-03 +2022-05-03 15:49:19,079 INFO [train.py:715] (7/8) Epoch 0, batch 18400, loss[loss=0.2479, simple_loss=0.3007, pruned_loss=0.09758, over 4911.00 frames.], tot_loss[loss=0.2233, simple_loss=0.276, pruned_loss=0.08534, over 971947.02 frames.], batch size: 29, lr: 1.54e-03 +2022-05-03 15:49:59,580 INFO [train.py:715] (7/8) Epoch 0, batch 18450, loss[loss=0.2007, simple_loss=0.2629, pruned_loss=0.06924, over 4871.00 frames.], tot_loss[loss=0.2219, simple_loss=0.2753, pruned_loss=0.08418, over 971988.25 frames.], batch size: 20, lr: 1.53e-03 +2022-05-03 15:50:39,244 INFO [train.py:715] (7/8) Epoch 0, batch 18500, loss[loss=0.2146, simple_loss=0.2674, pruned_loss=0.08092, over 4931.00 frames.], tot_loss[loss=0.2211, simple_loss=0.2748, pruned_loss=0.0837, over 972089.87 frames.], batch size: 23, lr: 1.53e-03 +2022-05-03 15:51:19,777 INFO [train.py:715] (7/8) Epoch 0, batch 18550, loss[loss=0.2539, simple_loss=0.2957, pruned_loss=0.1061, over 4958.00 frames.], tot_loss[loss=0.2228, simple_loss=0.2759, pruned_loss=0.08482, over 971959.28 frames.], batch size: 35, lr: 1.53e-03 +2022-05-03 15:52:00,086 INFO [train.py:715] (7/8) Epoch 0, batch 18600, loss[loss=0.2561, simple_loss=0.2972, pruned_loss=0.1075, over 4834.00 frames.], tot_loss[loss=0.2225, simple_loss=0.2756, pruned_loss=0.08471, over 972692.84 frames.], batch size: 30, lr: 1.53e-03 +2022-05-03 15:52:40,196 INFO [train.py:715] (7/8) Epoch 0, batch 18650, loss[loss=0.2502, simple_loss=0.291, pruned_loss=0.1046, over 4833.00 frames.], tot_loss[loss=0.2234, simple_loss=0.276, pruned_loss=0.08534, over 973175.78 frames.], batch size: 13, lr: 1.53e-03 +2022-05-03 15:53:19,599 INFO [train.py:715] (7/8) Epoch 0, batch 18700, loss[loss=0.2281, simple_loss=0.2808, pruned_loss=0.08772, over 4895.00 frames.], tot_loss[loss=0.2226, simple_loss=0.2755, pruned_loss=0.08488, over 973630.85 frames.], batch size: 17, lr: 1.52e-03 +2022-05-03 15:53:59,908 INFO [train.py:715] (7/8) Epoch 0, batch 18750, loss[loss=0.2344, simple_loss=0.283, pruned_loss=0.09286, over 4750.00 frames.], tot_loss[loss=0.2218, simple_loss=0.2746, pruned_loss=0.08452, over 972828.99 frames.], batch size: 16, lr: 1.52e-03 +2022-05-03 15:54:41,179 INFO [train.py:715] (7/8) Epoch 0, batch 18800, loss[loss=0.1856, simple_loss=0.2433, pruned_loss=0.06394, over 4770.00 frames.], tot_loss[loss=0.221, simple_loss=0.2738, pruned_loss=0.0841, over 972951.20 frames.], batch size: 17, lr: 1.52e-03 +2022-05-03 15:55:20,400 INFO [train.py:715] (7/8) Epoch 0, batch 18850, loss[loss=0.224, simple_loss=0.2625, pruned_loss=0.0928, over 4825.00 frames.], tot_loss[loss=0.2214, simple_loss=0.274, pruned_loss=0.0844, over 972162.57 frames.], batch size: 13, lr: 1.52e-03 +2022-05-03 15:56:01,308 INFO [train.py:715] (7/8) Epoch 0, batch 18900, loss[loss=0.2081, simple_loss=0.2642, pruned_loss=0.07597, over 4944.00 frames.], tot_loss[loss=0.2235, simple_loss=0.2758, pruned_loss=0.08561, over 972357.15 frames.], batch size: 21, lr: 1.52e-03 +2022-05-03 15:56:41,742 INFO [train.py:715] (7/8) Epoch 0, batch 18950, loss[loss=0.2326, simple_loss=0.2809, pruned_loss=0.09211, over 4888.00 frames.], tot_loss[loss=0.2234, simple_loss=0.2758, pruned_loss=0.08547, over 972756.50 frames.], batch size: 16, lr: 1.52e-03 +2022-05-03 15:57:21,404 INFO [train.py:715] (7/8) Epoch 0, batch 19000, loss[loss=0.1747, simple_loss=0.2442, pruned_loss=0.05264, over 4880.00 frames.], tot_loss[loss=0.2224, simple_loss=0.2751, pruned_loss=0.08483, over 972473.19 frames.], batch size: 39, lr: 1.51e-03 +2022-05-03 15:58:01,851 INFO [train.py:715] (7/8) Epoch 0, batch 19050, loss[loss=0.1916, simple_loss=0.2551, pruned_loss=0.06406, over 4794.00 frames.], tot_loss[loss=0.2215, simple_loss=0.2744, pruned_loss=0.08435, over 971975.69 frames.], batch size: 21, lr: 1.51e-03 +2022-05-03 15:58:42,185 INFO [train.py:715] (7/8) Epoch 0, batch 19100, loss[loss=0.2167, simple_loss=0.292, pruned_loss=0.07064, over 4909.00 frames.], tot_loss[loss=0.2209, simple_loss=0.274, pruned_loss=0.08386, over 972505.27 frames.], batch size: 18, lr: 1.51e-03 +2022-05-03 15:59:22,506 INFO [train.py:715] (7/8) Epoch 0, batch 19150, loss[loss=0.2252, simple_loss=0.2856, pruned_loss=0.08242, over 4935.00 frames.], tot_loss[loss=0.2197, simple_loss=0.2733, pruned_loss=0.08311, over 972676.35 frames.], batch size: 39, lr: 1.51e-03 +2022-05-03 16:00:01,716 INFO [train.py:715] (7/8) Epoch 0, batch 19200, loss[loss=0.1897, simple_loss=0.246, pruned_loss=0.06672, over 4950.00 frames.], tot_loss[loss=0.2203, simple_loss=0.2737, pruned_loss=0.08346, over 972726.24 frames.], batch size: 29, lr: 1.51e-03 +2022-05-03 16:00:42,582 INFO [train.py:715] (7/8) Epoch 0, batch 19250, loss[loss=0.2505, simple_loss=0.2992, pruned_loss=0.1009, over 4916.00 frames.], tot_loss[loss=0.2214, simple_loss=0.2746, pruned_loss=0.0841, over 973008.92 frames.], batch size: 17, lr: 1.50e-03 +2022-05-03 16:01:23,360 INFO [train.py:715] (7/8) Epoch 0, batch 19300, loss[loss=0.1796, simple_loss=0.2427, pruned_loss=0.05827, over 4921.00 frames.], tot_loss[loss=0.2201, simple_loss=0.2736, pruned_loss=0.08332, over 973722.76 frames.], batch size: 29, lr: 1.50e-03 +2022-05-03 16:02:03,056 INFO [train.py:715] (7/8) Epoch 0, batch 19350, loss[loss=0.2038, simple_loss=0.2587, pruned_loss=0.07448, over 4817.00 frames.], tot_loss[loss=0.2211, simple_loss=0.2745, pruned_loss=0.08389, over 973948.92 frames.], batch size: 25, lr: 1.50e-03 +2022-05-03 16:02:43,216 INFO [train.py:715] (7/8) Epoch 0, batch 19400, loss[loss=0.2432, simple_loss=0.2885, pruned_loss=0.0989, over 4872.00 frames.], tot_loss[loss=0.2204, simple_loss=0.2739, pruned_loss=0.08342, over 973115.39 frames.], batch size: 32, lr: 1.50e-03 +2022-05-03 16:03:24,065 INFO [train.py:715] (7/8) Epoch 0, batch 19450, loss[loss=0.2842, simple_loss=0.302, pruned_loss=0.1332, over 4794.00 frames.], tot_loss[loss=0.2211, simple_loss=0.2745, pruned_loss=0.08382, over 973240.33 frames.], batch size: 18, lr: 1.50e-03 +2022-05-03 16:04:03,577 INFO [train.py:715] (7/8) Epoch 0, batch 19500, loss[loss=0.2173, simple_loss=0.2715, pruned_loss=0.08153, over 4885.00 frames.], tot_loss[loss=0.221, simple_loss=0.2745, pruned_loss=0.08375, over 972371.72 frames.], batch size: 22, lr: 1.50e-03 +2022-05-03 16:04:42,929 INFO [train.py:715] (7/8) Epoch 0, batch 19550, loss[loss=0.2309, simple_loss=0.2879, pruned_loss=0.08692, over 4980.00 frames.], tot_loss[loss=0.2198, simple_loss=0.2739, pruned_loss=0.08291, over 972166.23 frames.], batch size: 25, lr: 1.49e-03 +2022-05-03 16:05:23,274 INFO [train.py:715] (7/8) Epoch 0, batch 19600, loss[loss=0.2226, simple_loss=0.2924, pruned_loss=0.07637, over 4966.00 frames.], tot_loss[loss=0.22, simple_loss=0.2739, pruned_loss=0.08309, over 972511.06 frames.], batch size: 24, lr: 1.49e-03 +2022-05-03 16:06:03,065 INFO [train.py:715] (7/8) Epoch 0, batch 19650, loss[loss=0.2685, simple_loss=0.3142, pruned_loss=0.1113, over 4705.00 frames.], tot_loss[loss=0.22, simple_loss=0.2739, pruned_loss=0.08303, over 971998.72 frames.], batch size: 15, lr: 1.49e-03 +2022-05-03 16:06:42,550 INFO [train.py:715] (7/8) Epoch 0, batch 19700, loss[loss=0.2231, simple_loss=0.2697, pruned_loss=0.08828, over 4761.00 frames.], tot_loss[loss=0.2197, simple_loss=0.2737, pruned_loss=0.08285, over 972429.52 frames.], batch size: 18, lr: 1.49e-03 +2022-05-03 16:07:22,620 INFO [train.py:715] (7/8) Epoch 0, batch 19750, loss[loss=0.3049, simple_loss=0.3387, pruned_loss=0.1355, over 4896.00 frames.], tot_loss[loss=0.2202, simple_loss=0.2739, pruned_loss=0.08326, over 972416.13 frames.], batch size: 19, lr: 1.49e-03 +2022-05-03 16:08:02,296 INFO [train.py:715] (7/8) Epoch 0, batch 19800, loss[loss=0.1921, simple_loss=0.2405, pruned_loss=0.07183, over 4987.00 frames.], tot_loss[loss=0.2206, simple_loss=0.2743, pruned_loss=0.08349, over 972725.01 frames.], batch size: 14, lr: 1.48e-03 +2022-05-03 16:08:42,110 INFO [train.py:715] (7/8) Epoch 0, batch 19850, loss[loss=0.2022, simple_loss=0.2668, pruned_loss=0.06877, over 4777.00 frames.], tot_loss[loss=0.2209, simple_loss=0.2746, pruned_loss=0.08359, over 973058.96 frames.], batch size: 18, lr: 1.48e-03 +2022-05-03 16:09:21,344 INFO [train.py:715] (7/8) Epoch 0, batch 19900, loss[loss=0.2105, simple_loss=0.2764, pruned_loss=0.07229, over 4694.00 frames.], tot_loss[loss=0.22, simple_loss=0.274, pruned_loss=0.08297, over 972806.88 frames.], batch size: 15, lr: 1.48e-03 +2022-05-03 16:10:02,121 INFO [train.py:715] (7/8) Epoch 0, batch 19950, loss[loss=0.2115, simple_loss=0.2653, pruned_loss=0.07888, over 4790.00 frames.], tot_loss[loss=0.2184, simple_loss=0.2728, pruned_loss=0.08199, over 972423.00 frames.], batch size: 14, lr: 1.48e-03 +2022-05-03 16:10:42,171 INFO [train.py:715] (7/8) Epoch 0, batch 20000, loss[loss=0.2464, simple_loss=0.2917, pruned_loss=0.1006, over 4945.00 frames.], tot_loss[loss=0.2179, simple_loss=0.2721, pruned_loss=0.0819, over 972359.02 frames.], batch size: 21, lr: 1.48e-03 +2022-05-03 16:11:21,523 INFO [train.py:715] (7/8) Epoch 0, batch 20050, loss[loss=0.2386, simple_loss=0.284, pruned_loss=0.09658, over 4943.00 frames.], tot_loss[loss=0.2169, simple_loss=0.2713, pruned_loss=0.08122, over 972627.92 frames.], batch size: 29, lr: 1.48e-03 +2022-05-03 16:12:01,703 INFO [train.py:715] (7/8) Epoch 0, batch 20100, loss[loss=0.2239, simple_loss=0.2713, pruned_loss=0.08821, over 4799.00 frames.], tot_loss[loss=0.217, simple_loss=0.2713, pruned_loss=0.08132, over 971752.96 frames.], batch size: 14, lr: 1.47e-03 +2022-05-03 16:12:41,690 INFO [train.py:715] (7/8) Epoch 0, batch 20150, loss[loss=0.217, simple_loss=0.2828, pruned_loss=0.07562, over 4973.00 frames.], tot_loss[loss=0.2163, simple_loss=0.2707, pruned_loss=0.08094, over 971370.89 frames.], batch size: 24, lr: 1.47e-03 +2022-05-03 16:13:21,728 INFO [train.py:715] (7/8) Epoch 0, batch 20200, loss[loss=0.1733, simple_loss=0.2353, pruned_loss=0.05566, over 4696.00 frames.], tot_loss[loss=0.217, simple_loss=0.2715, pruned_loss=0.08118, over 971834.03 frames.], batch size: 15, lr: 1.47e-03 +2022-05-03 16:14:01,257 INFO [train.py:715] (7/8) Epoch 0, batch 20250, loss[loss=0.2397, simple_loss=0.2963, pruned_loss=0.09154, over 4946.00 frames.], tot_loss[loss=0.2168, simple_loss=0.2716, pruned_loss=0.08099, over 971497.90 frames.], batch size: 21, lr: 1.47e-03 +2022-05-03 16:14:42,003 INFO [train.py:715] (7/8) Epoch 0, batch 20300, loss[loss=0.2109, simple_loss=0.2758, pruned_loss=0.07301, over 4751.00 frames.], tot_loss[loss=0.2189, simple_loss=0.2736, pruned_loss=0.08211, over 971700.93 frames.], batch size: 19, lr: 1.47e-03 +2022-05-03 16:15:21,890 INFO [train.py:715] (7/8) Epoch 0, batch 20350, loss[loss=0.1979, simple_loss=0.2589, pruned_loss=0.06849, over 4849.00 frames.], tot_loss[loss=0.2164, simple_loss=0.2715, pruned_loss=0.08066, over 972115.30 frames.], batch size: 32, lr: 1.47e-03 +2022-05-03 16:16:00,951 INFO [train.py:715] (7/8) Epoch 0, batch 20400, loss[loss=0.2388, simple_loss=0.2876, pruned_loss=0.09496, over 4903.00 frames.], tot_loss[loss=0.2164, simple_loss=0.2714, pruned_loss=0.0807, over 972138.93 frames.], batch size: 17, lr: 1.46e-03 +2022-05-03 16:16:40,902 INFO [train.py:715] (7/8) Epoch 0, batch 20450, loss[loss=0.2401, simple_loss=0.3027, pruned_loss=0.08871, over 4837.00 frames.], tot_loss[loss=0.2165, simple_loss=0.2716, pruned_loss=0.08075, over 972037.27 frames.], batch size: 30, lr: 1.46e-03 +2022-05-03 16:17:20,440 INFO [train.py:715] (7/8) Epoch 0, batch 20500, loss[loss=0.213, simple_loss=0.2733, pruned_loss=0.07634, over 4914.00 frames.], tot_loss[loss=0.2158, simple_loss=0.2713, pruned_loss=0.0802, over 972111.83 frames.], batch size: 17, lr: 1.46e-03 +2022-05-03 16:18:00,502 INFO [train.py:715] (7/8) Epoch 0, batch 20550, loss[loss=0.2354, simple_loss=0.2952, pruned_loss=0.08786, over 4982.00 frames.], tot_loss[loss=0.2178, simple_loss=0.2729, pruned_loss=0.08133, over 972122.59 frames.], batch size: 24, lr: 1.46e-03 +2022-05-03 16:18:39,953 INFO [train.py:715] (7/8) Epoch 0, batch 20600, loss[loss=0.2788, simple_loss=0.3115, pruned_loss=0.123, over 4793.00 frames.], tot_loss[loss=0.2175, simple_loss=0.2729, pruned_loss=0.08107, over 972730.71 frames.], batch size: 14, lr: 1.46e-03 +2022-05-03 16:19:19,652 INFO [train.py:715] (7/8) Epoch 0, batch 20650, loss[loss=0.2017, simple_loss=0.2451, pruned_loss=0.0792, over 4813.00 frames.], tot_loss[loss=0.218, simple_loss=0.2729, pruned_loss=0.08156, over 973184.66 frames.], batch size: 25, lr: 1.46e-03 +2022-05-03 16:20:00,381 INFO [train.py:715] (7/8) Epoch 0, batch 20700, loss[loss=0.22, simple_loss=0.2856, pruned_loss=0.07718, over 4785.00 frames.], tot_loss[loss=0.2172, simple_loss=0.2723, pruned_loss=0.08099, over 972553.39 frames.], batch size: 17, lr: 1.45e-03 +2022-05-03 16:20:39,700 INFO [train.py:715] (7/8) Epoch 0, batch 20750, loss[loss=0.1926, simple_loss=0.2488, pruned_loss=0.06825, over 4873.00 frames.], tot_loss[loss=0.2178, simple_loss=0.2729, pruned_loss=0.08133, over 972718.90 frames.], batch size: 32, lr: 1.45e-03 +2022-05-03 16:21:19,879 INFO [train.py:715] (7/8) Epoch 0, batch 20800, loss[loss=0.2249, simple_loss=0.2779, pruned_loss=0.08591, over 4968.00 frames.], tot_loss[loss=0.218, simple_loss=0.2734, pruned_loss=0.08132, over 973136.07 frames.], batch size: 15, lr: 1.45e-03 +2022-05-03 16:21:59,642 INFO [train.py:715] (7/8) Epoch 0, batch 20850, loss[loss=0.2217, simple_loss=0.2767, pruned_loss=0.08335, over 4830.00 frames.], tot_loss[loss=0.2169, simple_loss=0.2722, pruned_loss=0.08075, over 973245.48 frames.], batch size: 30, lr: 1.45e-03 +2022-05-03 16:22:39,125 INFO [train.py:715] (7/8) Epoch 0, batch 20900, loss[loss=0.194, simple_loss=0.2508, pruned_loss=0.06858, over 4863.00 frames.], tot_loss[loss=0.2171, simple_loss=0.2725, pruned_loss=0.08089, over 972657.92 frames.], batch size: 32, lr: 1.45e-03 +2022-05-03 16:23:19,651 INFO [train.py:715] (7/8) Epoch 0, batch 20950, loss[loss=0.1942, simple_loss=0.252, pruned_loss=0.06819, over 4889.00 frames.], tot_loss[loss=0.2151, simple_loss=0.2706, pruned_loss=0.07973, over 972440.09 frames.], batch size: 16, lr: 1.45e-03 +2022-05-03 16:24:00,683 INFO [train.py:715] (7/8) Epoch 0, batch 21000, loss[loss=0.2185, simple_loss=0.2774, pruned_loss=0.07986, over 4755.00 frames.], tot_loss[loss=0.2168, simple_loss=0.2718, pruned_loss=0.08095, over 972882.30 frames.], batch size: 19, lr: 1.44e-03 +2022-05-03 16:24:00,684 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 16:24:16,220 INFO [train.py:742] (7/8) Epoch 0, validation: loss=0.1386, simple_loss=0.2255, pruned_loss=0.02581, over 914524.00 frames. +2022-05-03 16:24:57,018 INFO [train.py:715] (7/8) Epoch 0, batch 21050, loss[loss=0.1499, simple_loss=0.2219, pruned_loss=0.03896, over 4763.00 frames.], tot_loss[loss=0.2194, simple_loss=0.274, pruned_loss=0.08243, over 972034.60 frames.], batch size: 19, lr: 1.44e-03 +2022-05-03 16:25:36,600 INFO [train.py:715] (7/8) Epoch 0, batch 21100, loss[loss=0.2036, simple_loss=0.2678, pruned_loss=0.06974, over 4781.00 frames.], tot_loss[loss=0.2175, simple_loss=0.2724, pruned_loss=0.08135, over 971402.52 frames.], batch size: 18, lr: 1.44e-03 +2022-05-03 16:26:16,948 INFO [train.py:715] (7/8) Epoch 0, batch 21150, loss[loss=0.2027, simple_loss=0.2625, pruned_loss=0.07142, over 4987.00 frames.], tot_loss[loss=0.2167, simple_loss=0.272, pruned_loss=0.08071, over 972345.44 frames.], batch size: 28, lr: 1.44e-03 +2022-05-03 16:26:56,815 INFO [train.py:715] (7/8) Epoch 0, batch 21200, loss[loss=0.1615, simple_loss=0.2312, pruned_loss=0.04586, over 4965.00 frames.], tot_loss[loss=0.2161, simple_loss=0.2714, pruned_loss=0.0804, over 972612.49 frames.], batch size: 14, lr: 1.44e-03 +2022-05-03 16:27:37,356 INFO [train.py:715] (7/8) Epoch 0, batch 21250, loss[loss=0.3261, simple_loss=0.3494, pruned_loss=0.1514, over 4916.00 frames.], tot_loss[loss=0.2166, simple_loss=0.2717, pruned_loss=0.08078, over 972222.28 frames.], batch size: 18, lr: 1.44e-03 +2022-05-03 16:28:17,123 INFO [train.py:715] (7/8) Epoch 0, batch 21300, loss[loss=0.1818, simple_loss=0.2391, pruned_loss=0.06221, over 4798.00 frames.], tot_loss[loss=0.2156, simple_loss=0.2706, pruned_loss=0.08028, over 972106.20 frames.], batch size: 12, lr: 1.43e-03 +2022-05-03 16:28:57,542 INFO [train.py:715] (7/8) Epoch 0, batch 21350, loss[loss=0.2118, simple_loss=0.257, pruned_loss=0.08332, over 4753.00 frames.], tot_loss[loss=0.216, simple_loss=0.2709, pruned_loss=0.08053, over 971907.18 frames.], batch size: 16, lr: 1.43e-03 +2022-05-03 16:29:38,283 INFO [train.py:715] (7/8) Epoch 0, batch 21400, loss[loss=0.2202, simple_loss=0.272, pruned_loss=0.08425, over 4896.00 frames.], tot_loss[loss=0.2165, simple_loss=0.2713, pruned_loss=0.08088, over 972088.11 frames.], batch size: 22, lr: 1.43e-03 +2022-05-03 16:30:17,951 INFO [train.py:715] (7/8) Epoch 0, batch 21450, loss[loss=0.2076, simple_loss=0.2706, pruned_loss=0.07227, over 4816.00 frames.], tot_loss[loss=0.2161, simple_loss=0.2708, pruned_loss=0.08071, over 972549.79 frames.], batch size: 21, lr: 1.43e-03 +2022-05-03 16:30:57,791 INFO [train.py:715] (7/8) Epoch 0, batch 21500, loss[loss=0.1811, simple_loss=0.2403, pruned_loss=0.06094, over 4942.00 frames.], tot_loss[loss=0.2164, simple_loss=0.2711, pruned_loss=0.08081, over 972612.49 frames.], batch size: 21, lr: 1.43e-03 +2022-05-03 16:31:38,011 INFO [train.py:715] (7/8) Epoch 0, batch 21550, loss[loss=0.1892, simple_loss=0.2467, pruned_loss=0.06586, over 4752.00 frames.], tot_loss[loss=0.2154, simple_loss=0.2707, pruned_loss=0.08002, over 972645.07 frames.], batch size: 19, lr: 1.43e-03 +2022-05-03 16:32:18,475 INFO [train.py:715] (7/8) Epoch 0, batch 21600, loss[loss=0.2237, simple_loss=0.2752, pruned_loss=0.08604, over 4864.00 frames.], tot_loss[loss=0.2144, simple_loss=0.2699, pruned_loss=0.07943, over 972572.83 frames.], batch size: 20, lr: 1.42e-03 +2022-05-03 16:32:58,238 INFO [train.py:715] (7/8) Epoch 0, batch 21650, loss[loss=0.2427, simple_loss=0.293, pruned_loss=0.09619, over 4929.00 frames.], tot_loss[loss=0.2134, simple_loss=0.2693, pruned_loss=0.07874, over 972226.99 frames.], batch size: 23, lr: 1.42e-03 +2022-05-03 16:33:39,053 INFO [train.py:715] (7/8) Epoch 0, batch 21700, loss[loss=0.2459, simple_loss=0.2867, pruned_loss=0.1026, over 4983.00 frames.], tot_loss[loss=0.2148, simple_loss=0.2702, pruned_loss=0.07969, over 972333.82 frames.], batch size: 25, lr: 1.42e-03 +2022-05-03 16:34:19,205 INFO [train.py:715] (7/8) Epoch 0, batch 21750, loss[loss=0.2343, simple_loss=0.2757, pruned_loss=0.09644, over 4751.00 frames.], tot_loss[loss=0.2154, simple_loss=0.2707, pruned_loss=0.08003, over 972588.06 frames.], batch size: 16, lr: 1.42e-03 +2022-05-03 16:34:58,792 INFO [train.py:715] (7/8) Epoch 0, batch 21800, loss[loss=0.2114, simple_loss=0.2651, pruned_loss=0.07879, over 4902.00 frames.], tot_loss[loss=0.2153, simple_loss=0.2702, pruned_loss=0.08018, over 972144.80 frames.], batch size: 18, lr: 1.42e-03 +2022-05-03 16:35:38,622 INFO [train.py:715] (7/8) Epoch 0, batch 21850, loss[loss=0.2374, simple_loss=0.2914, pruned_loss=0.09171, over 4759.00 frames.], tot_loss[loss=0.2163, simple_loss=0.2711, pruned_loss=0.08076, over 972301.91 frames.], batch size: 19, lr: 1.42e-03 +2022-05-03 16:36:19,095 INFO [train.py:715] (7/8) Epoch 0, batch 21900, loss[loss=0.1981, simple_loss=0.2598, pruned_loss=0.06824, over 4911.00 frames.], tot_loss[loss=0.2148, simple_loss=0.2698, pruned_loss=0.07994, over 972089.93 frames.], batch size: 17, lr: 1.42e-03 +2022-05-03 16:36:59,004 INFO [train.py:715] (7/8) Epoch 0, batch 21950, loss[loss=0.196, simple_loss=0.2536, pruned_loss=0.06921, over 4900.00 frames.], tot_loss[loss=0.2149, simple_loss=0.2698, pruned_loss=0.08003, over 972122.77 frames.], batch size: 18, lr: 1.41e-03 +2022-05-03 16:37:38,289 INFO [train.py:715] (7/8) Epoch 0, batch 22000, loss[loss=0.2229, simple_loss=0.2708, pruned_loss=0.08746, over 4902.00 frames.], tot_loss[loss=0.2156, simple_loss=0.2705, pruned_loss=0.08036, over 972082.87 frames.], batch size: 18, lr: 1.41e-03 +2022-05-03 16:38:18,447 INFO [train.py:715] (7/8) Epoch 0, batch 22050, loss[loss=0.2024, simple_loss=0.2567, pruned_loss=0.07401, over 4986.00 frames.], tot_loss[loss=0.2153, simple_loss=0.2705, pruned_loss=0.08009, over 972777.27 frames.], batch size: 15, lr: 1.41e-03 +2022-05-03 16:38:58,602 INFO [train.py:715] (7/8) Epoch 0, batch 22100, loss[loss=0.1912, simple_loss=0.2481, pruned_loss=0.06713, over 4818.00 frames.], tot_loss[loss=0.2144, simple_loss=0.2702, pruned_loss=0.07933, over 972315.08 frames.], batch size: 26, lr: 1.41e-03 +2022-05-03 16:39:38,121 INFO [train.py:715] (7/8) Epoch 0, batch 22150, loss[loss=0.1813, simple_loss=0.2391, pruned_loss=0.0618, over 4859.00 frames.], tot_loss[loss=0.2161, simple_loss=0.2716, pruned_loss=0.08034, over 972160.66 frames.], batch size: 20, lr: 1.41e-03 +2022-05-03 16:40:17,927 INFO [train.py:715] (7/8) Epoch 0, batch 22200, loss[loss=0.1631, simple_loss=0.226, pruned_loss=0.05003, over 4875.00 frames.], tot_loss[loss=0.2157, simple_loss=0.2717, pruned_loss=0.07988, over 971426.47 frames.], batch size: 32, lr: 1.41e-03 +2022-05-03 16:40:58,311 INFO [train.py:715] (7/8) Epoch 0, batch 22250, loss[loss=0.222, simple_loss=0.2676, pruned_loss=0.08822, over 4728.00 frames.], tot_loss[loss=0.2151, simple_loss=0.2705, pruned_loss=0.07985, over 969975.00 frames.], batch size: 16, lr: 1.40e-03 +2022-05-03 16:41:38,380 INFO [train.py:715] (7/8) Epoch 0, batch 22300, loss[loss=0.1868, simple_loss=0.2483, pruned_loss=0.06268, over 4779.00 frames.], tot_loss[loss=0.2147, simple_loss=0.2698, pruned_loss=0.0798, over 971255.03 frames.], batch size: 18, lr: 1.40e-03 +2022-05-03 16:42:18,084 INFO [train.py:715] (7/8) Epoch 0, batch 22350, loss[loss=0.2056, simple_loss=0.2709, pruned_loss=0.0702, over 4907.00 frames.], tot_loss[loss=0.214, simple_loss=0.2689, pruned_loss=0.07954, over 971870.89 frames.], batch size: 17, lr: 1.40e-03 +2022-05-03 16:42:58,251 INFO [train.py:715] (7/8) Epoch 0, batch 22400, loss[loss=0.2674, simple_loss=0.3067, pruned_loss=0.1141, over 4851.00 frames.], tot_loss[loss=0.2137, simple_loss=0.2694, pruned_loss=0.07901, over 972386.97 frames.], batch size: 15, lr: 1.40e-03 +2022-05-03 16:43:38,085 INFO [train.py:715] (7/8) Epoch 0, batch 22450, loss[loss=0.2014, simple_loss=0.2585, pruned_loss=0.07214, over 4917.00 frames.], tot_loss[loss=0.2128, simple_loss=0.2689, pruned_loss=0.07834, over 972978.10 frames.], batch size: 18, lr: 1.40e-03 +2022-05-03 16:44:17,448 INFO [train.py:715] (7/8) Epoch 0, batch 22500, loss[loss=0.2452, simple_loss=0.2917, pruned_loss=0.09937, over 4741.00 frames.], tot_loss[loss=0.2148, simple_loss=0.2705, pruned_loss=0.07952, over 973451.26 frames.], batch size: 16, lr: 1.40e-03 +2022-05-03 16:44:57,230 INFO [train.py:715] (7/8) Epoch 0, batch 22550, loss[loss=0.1827, simple_loss=0.2465, pruned_loss=0.05945, over 4695.00 frames.], tot_loss[loss=0.2156, simple_loss=0.271, pruned_loss=0.08014, over 973104.83 frames.], batch size: 15, lr: 1.40e-03 +2022-05-03 16:45:37,441 INFO [train.py:715] (7/8) Epoch 0, batch 22600, loss[loss=0.2242, simple_loss=0.2829, pruned_loss=0.08272, over 4926.00 frames.], tot_loss[loss=0.214, simple_loss=0.2698, pruned_loss=0.07913, over 973439.34 frames.], batch size: 23, lr: 1.39e-03 +2022-05-03 16:46:18,094 INFO [train.py:715] (7/8) Epoch 0, batch 22650, loss[loss=0.2153, simple_loss=0.2713, pruned_loss=0.07969, over 4903.00 frames.], tot_loss[loss=0.2142, simple_loss=0.2698, pruned_loss=0.07932, over 973086.54 frames.], batch size: 19, lr: 1.39e-03 +2022-05-03 16:46:57,302 INFO [train.py:715] (7/8) Epoch 0, batch 22700, loss[loss=0.1703, simple_loss=0.2457, pruned_loss=0.04747, over 4894.00 frames.], tot_loss[loss=0.2151, simple_loss=0.2705, pruned_loss=0.07991, over 973863.21 frames.], batch size: 19, lr: 1.39e-03 +2022-05-03 16:47:37,376 INFO [train.py:715] (7/8) Epoch 0, batch 22750, loss[loss=0.1609, simple_loss=0.235, pruned_loss=0.04336, over 4807.00 frames.], tot_loss[loss=0.2158, simple_loss=0.2715, pruned_loss=0.08003, over 973785.07 frames.], batch size: 21, lr: 1.39e-03 +2022-05-03 16:48:17,860 INFO [train.py:715] (7/8) Epoch 0, batch 22800, loss[loss=0.2129, simple_loss=0.2747, pruned_loss=0.07555, over 4794.00 frames.], tot_loss[loss=0.2146, simple_loss=0.2706, pruned_loss=0.07935, over 973316.40 frames.], batch size: 14, lr: 1.39e-03 +2022-05-03 16:48:57,456 INFO [train.py:715] (7/8) Epoch 0, batch 22850, loss[loss=0.2226, simple_loss=0.2666, pruned_loss=0.08933, over 4774.00 frames.], tot_loss[loss=0.2145, simple_loss=0.2703, pruned_loss=0.07932, over 972328.86 frames.], batch size: 18, lr: 1.39e-03 +2022-05-03 16:49:37,566 INFO [train.py:715] (7/8) Epoch 0, batch 22900, loss[loss=0.2035, simple_loss=0.2612, pruned_loss=0.07294, over 4774.00 frames.], tot_loss[loss=0.2123, simple_loss=0.2687, pruned_loss=0.07794, over 972003.91 frames.], batch size: 18, lr: 1.39e-03 +2022-05-03 16:50:17,835 INFO [train.py:715] (7/8) Epoch 0, batch 22950, loss[loss=0.2019, simple_loss=0.2634, pruned_loss=0.07018, over 4748.00 frames.], tot_loss[loss=0.2138, simple_loss=0.2697, pruned_loss=0.07896, over 971823.59 frames.], batch size: 12, lr: 1.38e-03 +2022-05-03 16:50:58,462 INFO [train.py:715] (7/8) Epoch 0, batch 23000, loss[loss=0.2428, simple_loss=0.2864, pruned_loss=0.09966, over 4915.00 frames.], tot_loss[loss=0.2134, simple_loss=0.2695, pruned_loss=0.07872, over 972066.22 frames.], batch size: 39, lr: 1.38e-03 +2022-05-03 16:51:37,481 INFO [train.py:715] (7/8) Epoch 0, batch 23050, loss[loss=0.2311, simple_loss=0.3002, pruned_loss=0.08097, over 4893.00 frames.], tot_loss[loss=0.2145, simple_loss=0.27, pruned_loss=0.07953, over 973447.49 frames.], batch size: 19, lr: 1.38e-03 +2022-05-03 16:52:18,419 INFO [train.py:715] (7/8) Epoch 0, batch 23100, loss[loss=0.1768, simple_loss=0.2267, pruned_loss=0.06351, over 4816.00 frames.], tot_loss[loss=0.2128, simple_loss=0.2687, pruned_loss=0.07844, over 973688.75 frames.], batch size: 25, lr: 1.38e-03 +2022-05-03 16:52:59,441 INFO [train.py:715] (7/8) Epoch 0, batch 23150, loss[loss=0.2224, simple_loss=0.2818, pruned_loss=0.08151, over 4964.00 frames.], tot_loss[loss=0.2134, simple_loss=0.2691, pruned_loss=0.07883, over 973411.61 frames.], batch size: 29, lr: 1.38e-03 +2022-05-03 16:53:39,187 INFO [train.py:715] (7/8) Epoch 0, batch 23200, loss[loss=0.1847, simple_loss=0.245, pruned_loss=0.06219, over 4976.00 frames.], tot_loss[loss=0.2126, simple_loss=0.2683, pruned_loss=0.0784, over 974025.59 frames.], batch size: 15, lr: 1.38e-03 +2022-05-03 16:54:19,756 INFO [train.py:715] (7/8) Epoch 0, batch 23250, loss[loss=0.2251, simple_loss=0.2816, pruned_loss=0.08433, over 4875.00 frames.], tot_loss[loss=0.2118, simple_loss=0.2679, pruned_loss=0.07785, over 972936.20 frames.], batch size: 16, lr: 1.38e-03 +2022-05-03 16:55:00,180 INFO [train.py:715] (7/8) Epoch 0, batch 23300, loss[loss=0.1988, simple_loss=0.2618, pruned_loss=0.06793, over 4817.00 frames.], tot_loss[loss=0.2117, simple_loss=0.2679, pruned_loss=0.07778, over 972790.87 frames.], batch size: 26, lr: 1.37e-03 +2022-05-03 16:55:40,656 INFO [train.py:715] (7/8) Epoch 0, batch 23350, loss[loss=0.1677, simple_loss=0.2364, pruned_loss=0.04952, over 4929.00 frames.], tot_loss[loss=0.2107, simple_loss=0.2669, pruned_loss=0.07724, over 973217.41 frames.], batch size: 21, lr: 1.37e-03 +2022-05-03 16:56:21,259 INFO [train.py:715] (7/8) Epoch 0, batch 23400, loss[loss=0.209, simple_loss=0.2618, pruned_loss=0.07811, over 4986.00 frames.], tot_loss[loss=0.2104, simple_loss=0.2663, pruned_loss=0.07729, over 973624.04 frames.], batch size: 25, lr: 1.37e-03 +2022-05-03 16:57:02,271 INFO [train.py:715] (7/8) Epoch 0, batch 23450, loss[loss=0.2662, simple_loss=0.2901, pruned_loss=0.1211, over 4985.00 frames.], tot_loss[loss=0.2103, simple_loss=0.266, pruned_loss=0.07727, over 973081.18 frames.], batch size: 14, lr: 1.37e-03 +2022-05-03 16:57:43,372 INFO [train.py:715] (7/8) Epoch 0, batch 23500, loss[loss=0.2123, simple_loss=0.2718, pruned_loss=0.07641, over 4882.00 frames.], tot_loss[loss=0.2107, simple_loss=0.2663, pruned_loss=0.07757, over 972738.16 frames.], batch size: 22, lr: 1.37e-03 +2022-05-03 16:58:23,224 INFO [train.py:715] (7/8) Epoch 0, batch 23550, loss[loss=0.2133, simple_loss=0.2706, pruned_loss=0.07803, over 4809.00 frames.], tot_loss[loss=0.2117, simple_loss=0.2671, pruned_loss=0.07809, over 972103.86 frames.], batch size: 21, lr: 1.37e-03 +2022-05-03 16:59:04,085 INFO [train.py:715] (7/8) Epoch 0, batch 23600, loss[loss=0.1977, simple_loss=0.2713, pruned_loss=0.06204, over 4930.00 frames.], tot_loss[loss=0.2122, simple_loss=0.2681, pruned_loss=0.07817, over 971966.48 frames.], batch size: 18, lr: 1.37e-03 +2022-05-03 16:59:44,350 INFO [train.py:715] (7/8) Epoch 0, batch 23650, loss[loss=0.2226, simple_loss=0.2741, pruned_loss=0.0855, over 4912.00 frames.], tot_loss[loss=0.213, simple_loss=0.2687, pruned_loss=0.0787, over 972470.84 frames.], batch size: 19, lr: 1.36e-03 +2022-05-03 17:00:24,470 INFO [train.py:715] (7/8) Epoch 0, batch 23700, loss[loss=0.186, simple_loss=0.2482, pruned_loss=0.0619, over 4878.00 frames.], tot_loss[loss=0.2133, simple_loss=0.2689, pruned_loss=0.07887, over 972196.68 frames.], batch size: 22, lr: 1.36e-03 +2022-05-03 17:01:03,661 INFO [train.py:715] (7/8) Epoch 0, batch 23750, loss[loss=0.218, simple_loss=0.2734, pruned_loss=0.08126, over 4965.00 frames.], tot_loss[loss=0.2123, simple_loss=0.2679, pruned_loss=0.07834, over 972158.72 frames.], batch size: 15, lr: 1.36e-03 +2022-05-03 17:01:43,663 INFO [train.py:715] (7/8) Epoch 0, batch 23800, loss[loss=0.2061, simple_loss=0.2782, pruned_loss=0.06703, over 4863.00 frames.], tot_loss[loss=0.2118, simple_loss=0.2679, pruned_loss=0.07782, over 971925.67 frames.], batch size: 20, lr: 1.36e-03 +2022-05-03 17:02:24,152 INFO [train.py:715] (7/8) Epoch 0, batch 23850, loss[loss=0.2277, simple_loss=0.2792, pruned_loss=0.08804, over 4956.00 frames.], tot_loss[loss=0.2135, simple_loss=0.2691, pruned_loss=0.07899, over 972681.73 frames.], batch size: 39, lr: 1.36e-03 +2022-05-03 17:03:03,309 INFO [train.py:715] (7/8) Epoch 0, batch 23900, loss[loss=0.1633, simple_loss=0.228, pruned_loss=0.04934, over 4977.00 frames.], tot_loss[loss=0.2139, simple_loss=0.2692, pruned_loss=0.07925, over 972846.83 frames.], batch size: 15, lr: 1.36e-03 +2022-05-03 17:03:43,456 INFO [train.py:715] (7/8) Epoch 0, batch 23950, loss[loss=0.1971, simple_loss=0.2468, pruned_loss=0.07367, over 4787.00 frames.], tot_loss[loss=0.2122, simple_loss=0.268, pruned_loss=0.07824, over 972875.79 frames.], batch size: 14, lr: 1.36e-03 +2022-05-03 17:04:26,569 INFO [train.py:715] (7/8) Epoch 0, batch 24000, loss[loss=0.2668, simple_loss=0.3201, pruned_loss=0.1067, over 4872.00 frames.], tot_loss[loss=0.2125, simple_loss=0.2684, pruned_loss=0.07826, over 972500.92 frames.], batch size: 16, lr: 1.35e-03 +2022-05-03 17:04:26,569 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 17:04:40,850 INFO [train.py:742] (7/8) Epoch 0, validation: loss=0.1357, simple_loss=0.2226, pruned_loss=0.02435, over 914524.00 frames. +2022-05-03 17:05:21,170 INFO [train.py:715] (7/8) Epoch 0, batch 24050, loss[loss=0.2193, simple_loss=0.2698, pruned_loss=0.08439, over 4749.00 frames.], tot_loss[loss=0.2114, simple_loss=0.2673, pruned_loss=0.07778, over 972123.09 frames.], batch size: 19, lr: 1.35e-03 +2022-05-03 17:06:00,599 INFO [train.py:715] (7/8) Epoch 0, batch 24100, loss[loss=0.2465, simple_loss=0.2908, pruned_loss=0.1011, over 4958.00 frames.], tot_loss[loss=0.2115, simple_loss=0.2672, pruned_loss=0.07787, over 971807.55 frames.], batch size: 35, lr: 1.35e-03 +2022-05-03 17:06:40,582 INFO [train.py:715] (7/8) Epoch 0, batch 24150, loss[loss=0.1943, simple_loss=0.2548, pruned_loss=0.06686, over 4664.00 frames.], tot_loss[loss=0.2117, simple_loss=0.2678, pruned_loss=0.07776, over 972225.04 frames.], batch size: 13, lr: 1.35e-03 +2022-05-03 17:07:20,606 INFO [train.py:715] (7/8) Epoch 0, batch 24200, loss[loss=0.2685, simple_loss=0.3049, pruned_loss=0.1161, over 4906.00 frames.], tot_loss[loss=0.2115, simple_loss=0.2673, pruned_loss=0.07787, over 972459.42 frames.], batch size: 23, lr: 1.35e-03 +2022-05-03 17:08:01,229 INFO [train.py:715] (7/8) Epoch 0, batch 24250, loss[loss=0.2052, simple_loss=0.2739, pruned_loss=0.06828, over 4810.00 frames.], tot_loss[loss=0.2116, simple_loss=0.2679, pruned_loss=0.07767, over 971630.36 frames.], batch size: 24, lr: 1.35e-03 +2022-05-03 17:08:40,837 INFO [train.py:715] (7/8) Epoch 0, batch 24300, loss[loss=0.1788, simple_loss=0.2464, pruned_loss=0.05562, over 4946.00 frames.], tot_loss[loss=0.2119, simple_loss=0.2679, pruned_loss=0.07795, over 972129.28 frames.], batch size: 21, lr: 1.35e-03 +2022-05-03 17:09:21,013 INFO [train.py:715] (7/8) Epoch 0, batch 24350, loss[loss=0.1569, simple_loss=0.2256, pruned_loss=0.04409, over 4741.00 frames.], tot_loss[loss=0.2112, simple_loss=0.2677, pruned_loss=0.07739, over 972510.62 frames.], batch size: 16, lr: 1.35e-03 +2022-05-03 17:10:01,415 INFO [train.py:715] (7/8) Epoch 0, batch 24400, loss[loss=0.1907, simple_loss=0.2565, pruned_loss=0.06246, over 4730.00 frames.], tot_loss[loss=0.2104, simple_loss=0.2673, pruned_loss=0.0768, over 972605.38 frames.], batch size: 16, lr: 1.34e-03 +2022-05-03 17:10:40,940 INFO [train.py:715] (7/8) Epoch 0, batch 24450, loss[loss=0.246, simple_loss=0.2986, pruned_loss=0.09672, over 4826.00 frames.], tot_loss[loss=0.2111, simple_loss=0.2676, pruned_loss=0.07726, over 972868.78 frames.], batch size: 15, lr: 1.34e-03 +2022-05-03 17:11:21,051 INFO [train.py:715] (7/8) Epoch 0, batch 24500, loss[loss=0.2072, simple_loss=0.2497, pruned_loss=0.08233, over 4879.00 frames.], tot_loss[loss=0.2104, simple_loss=0.2668, pruned_loss=0.077, over 971928.87 frames.], batch size: 32, lr: 1.34e-03 +2022-05-03 17:12:01,320 INFO [train.py:715] (7/8) Epoch 0, batch 24550, loss[loss=0.1734, simple_loss=0.2449, pruned_loss=0.05097, over 4817.00 frames.], tot_loss[loss=0.2117, simple_loss=0.2676, pruned_loss=0.07794, over 972588.47 frames.], batch size: 25, lr: 1.34e-03 +2022-05-03 17:12:41,517 INFO [train.py:715] (7/8) Epoch 0, batch 24600, loss[loss=0.2112, simple_loss=0.2811, pruned_loss=0.07065, over 4794.00 frames.], tot_loss[loss=0.2128, simple_loss=0.2685, pruned_loss=0.07852, over 970973.51 frames.], batch size: 24, lr: 1.34e-03 +2022-05-03 17:13:20,994 INFO [train.py:715] (7/8) Epoch 0, batch 24650, loss[loss=0.1912, simple_loss=0.249, pruned_loss=0.06673, over 4927.00 frames.], tot_loss[loss=0.2118, simple_loss=0.2676, pruned_loss=0.07801, over 971486.45 frames.], batch size: 35, lr: 1.34e-03 +2022-05-03 17:14:01,416 INFO [train.py:715] (7/8) Epoch 0, batch 24700, loss[loss=0.2496, simple_loss=0.2851, pruned_loss=0.107, over 4940.00 frames.], tot_loss[loss=0.2118, simple_loss=0.2677, pruned_loss=0.07795, over 972303.91 frames.], batch size: 35, lr: 1.34e-03 +2022-05-03 17:14:42,121 INFO [train.py:715] (7/8) Epoch 0, batch 24750, loss[loss=0.1612, simple_loss=0.2239, pruned_loss=0.04919, over 4770.00 frames.], tot_loss[loss=0.2112, simple_loss=0.2674, pruned_loss=0.07746, over 972539.90 frames.], batch size: 18, lr: 1.33e-03 +2022-05-03 17:15:21,178 INFO [train.py:715] (7/8) Epoch 0, batch 24800, loss[loss=0.2202, simple_loss=0.2731, pruned_loss=0.08362, over 4838.00 frames.], tot_loss[loss=0.21, simple_loss=0.2666, pruned_loss=0.07673, over 972385.92 frames.], batch size: 30, lr: 1.33e-03 +2022-05-03 17:16:01,310 INFO [train.py:715] (7/8) Epoch 0, batch 24850, loss[loss=0.1979, simple_loss=0.2597, pruned_loss=0.06802, over 4981.00 frames.], tot_loss[loss=0.2104, simple_loss=0.2671, pruned_loss=0.0769, over 971759.87 frames.], batch size: 25, lr: 1.33e-03 +2022-05-03 17:16:41,590 INFO [train.py:715] (7/8) Epoch 0, batch 24900, loss[loss=0.2675, simple_loss=0.289, pruned_loss=0.123, over 4982.00 frames.], tot_loss[loss=0.2106, simple_loss=0.267, pruned_loss=0.07711, over 971895.97 frames.], batch size: 25, lr: 1.33e-03 +2022-05-03 17:17:21,631 INFO [train.py:715] (7/8) Epoch 0, batch 24950, loss[loss=0.1896, simple_loss=0.2492, pruned_loss=0.06496, over 4967.00 frames.], tot_loss[loss=0.2097, simple_loss=0.266, pruned_loss=0.07671, over 971473.24 frames.], batch size: 28, lr: 1.33e-03 +2022-05-03 17:18:01,152 INFO [train.py:715] (7/8) Epoch 0, batch 25000, loss[loss=0.2006, simple_loss=0.2576, pruned_loss=0.07183, over 4778.00 frames.], tot_loss[loss=0.2089, simple_loss=0.2655, pruned_loss=0.0761, over 971900.11 frames.], batch size: 18, lr: 1.33e-03 +2022-05-03 17:18:41,404 INFO [train.py:715] (7/8) Epoch 0, batch 25050, loss[loss=0.1899, simple_loss=0.2547, pruned_loss=0.0625, over 4895.00 frames.], tot_loss[loss=0.2097, simple_loss=0.2665, pruned_loss=0.07642, over 971451.37 frames.], batch size: 19, lr: 1.33e-03 +2022-05-03 17:19:21,103 INFO [train.py:715] (7/8) Epoch 0, batch 25100, loss[loss=0.2751, simple_loss=0.3184, pruned_loss=0.1159, over 4735.00 frames.], tot_loss[loss=0.2098, simple_loss=0.2664, pruned_loss=0.0766, over 971542.27 frames.], batch size: 16, lr: 1.33e-03 +2022-05-03 17:20:00,600 INFO [train.py:715] (7/8) Epoch 0, batch 25150, loss[loss=0.2175, simple_loss=0.2801, pruned_loss=0.07744, over 4776.00 frames.], tot_loss[loss=0.2091, simple_loss=0.2661, pruned_loss=0.07608, over 971747.94 frames.], batch size: 17, lr: 1.32e-03 +2022-05-03 17:20:41,134 INFO [train.py:715] (7/8) Epoch 0, batch 25200, loss[loss=0.202, simple_loss=0.2589, pruned_loss=0.07254, over 4874.00 frames.], tot_loss[loss=0.2088, simple_loss=0.2663, pruned_loss=0.07567, over 972360.47 frames.], batch size: 22, lr: 1.32e-03 +2022-05-03 17:21:21,702 INFO [train.py:715] (7/8) Epoch 0, batch 25250, loss[loss=0.2004, simple_loss=0.2532, pruned_loss=0.0738, over 4927.00 frames.], tot_loss[loss=0.2087, simple_loss=0.2661, pruned_loss=0.07571, over 972352.14 frames.], batch size: 29, lr: 1.32e-03 +2022-05-03 17:22:02,263 INFO [train.py:715] (7/8) Epoch 0, batch 25300, loss[loss=0.1835, simple_loss=0.2495, pruned_loss=0.05874, over 4933.00 frames.], tot_loss[loss=0.2079, simple_loss=0.2652, pruned_loss=0.07528, over 971841.30 frames.], batch size: 21, lr: 1.32e-03 +2022-05-03 17:22:42,091 INFO [train.py:715] (7/8) Epoch 0, batch 25350, loss[loss=0.2413, simple_loss=0.2834, pruned_loss=0.09957, over 4918.00 frames.], tot_loss[loss=0.2095, simple_loss=0.2663, pruned_loss=0.07637, over 972410.93 frames.], batch size: 17, lr: 1.32e-03 +2022-05-03 17:23:22,554 INFO [train.py:715] (7/8) Epoch 0, batch 25400, loss[loss=0.242, simple_loss=0.2876, pruned_loss=0.09815, over 4877.00 frames.], tot_loss[loss=0.2094, simple_loss=0.2667, pruned_loss=0.07609, over 972150.52 frames.], batch size: 38, lr: 1.32e-03 +2022-05-03 17:24:02,724 INFO [train.py:715] (7/8) Epoch 0, batch 25450, loss[loss=0.1948, simple_loss=0.2522, pruned_loss=0.06866, over 4922.00 frames.], tot_loss[loss=0.2083, simple_loss=0.2661, pruned_loss=0.07525, over 972974.62 frames.], batch size: 18, lr: 1.32e-03 +2022-05-03 17:24:41,710 INFO [train.py:715] (7/8) Epoch 0, batch 25500, loss[loss=0.2157, simple_loss=0.2598, pruned_loss=0.08577, over 4778.00 frames.], tot_loss[loss=0.2078, simple_loss=0.2657, pruned_loss=0.07494, over 971993.84 frames.], batch size: 14, lr: 1.32e-03 +2022-05-03 17:25:22,424 INFO [train.py:715] (7/8) Epoch 0, batch 25550, loss[loss=0.2409, simple_loss=0.2863, pruned_loss=0.09774, over 4940.00 frames.], tot_loss[loss=0.2096, simple_loss=0.2671, pruned_loss=0.07606, over 972184.64 frames.], batch size: 39, lr: 1.31e-03 +2022-05-03 17:26:02,028 INFO [train.py:715] (7/8) Epoch 0, batch 25600, loss[loss=0.1944, simple_loss=0.2569, pruned_loss=0.066, over 4878.00 frames.], tot_loss[loss=0.2092, simple_loss=0.2669, pruned_loss=0.07571, over 972421.76 frames.], batch size: 38, lr: 1.31e-03 +2022-05-03 17:26:41,738 INFO [train.py:715] (7/8) Epoch 0, batch 25650, loss[loss=0.2173, simple_loss=0.279, pruned_loss=0.07776, over 4754.00 frames.], tot_loss[loss=0.2103, simple_loss=0.2673, pruned_loss=0.07665, over 972923.61 frames.], batch size: 19, lr: 1.31e-03 +2022-05-03 17:27:21,449 INFO [train.py:715] (7/8) Epoch 0, batch 25700, loss[loss=0.1888, simple_loss=0.2641, pruned_loss=0.0568, over 4768.00 frames.], tot_loss[loss=0.2092, simple_loss=0.2662, pruned_loss=0.07614, over 972922.88 frames.], batch size: 19, lr: 1.31e-03 +2022-05-03 17:28:01,736 INFO [train.py:715] (7/8) Epoch 0, batch 25750, loss[loss=0.2179, simple_loss=0.2722, pruned_loss=0.08186, over 4800.00 frames.], tot_loss[loss=0.2079, simple_loss=0.2652, pruned_loss=0.0753, over 971827.59 frames.], batch size: 14, lr: 1.31e-03 +2022-05-03 17:28:41,515 INFO [train.py:715] (7/8) Epoch 0, batch 25800, loss[loss=0.2044, simple_loss=0.2615, pruned_loss=0.07367, over 4986.00 frames.], tot_loss[loss=0.2083, simple_loss=0.2653, pruned_loss=0.07566, over 971895.40 frames.], batch size: 20, lr: 1.31e-03 +2022-05-03 17:29:20,757 INFO [train.py:715] (7/8) Epoch 0, batch 25850, loss[loss=0.1784, simple_loss=0.2432, pruned_loss=0.05681, over 4901.00 frames.], tot_loss[loss=0.2075, simple_loss=0.2643, pruned_loss=0.07532, over 971088.44 frames.], batch size: 13, lr: 1.31e-03 +2022-05-03 17:30:01,475 INFO [train.py:715] (7/8) Epoch 0, batch 25900, loss[loss=0.1937, simple_loss=0.259, pruned_loss=0.06419, over 4824.00 frames.], tot_loss[loss=0.2065, simple_loss=0.2642, pruned_loss=0.07437, over 970929.33 frames.], batch size: 26, lr: 1.31e-03 +2022-05-03 17:30:41,233 INFO [train.py:715] (7/8) Epoch 0, batch 25950, loss[loss=0.2634, simple_loss=0.3059, pruned_loss=0.1104, over 4832.00 frames.], tot_loss[loss=0.2082, simple_loss=0.2655, pruned_loss=0.07541, over 971094.02 frames.], batch size: 13, lr: 1.30e-03 +2022-05-03 17:31:21,228 INFO [train.py:715] (7/8) Epoch 0, batch 26000, loss[loss=0.239, simple_loss=0.2819, pruned_loss=0.09811, over 4795.00 frames.], tot_loss[loss=0.2074, simple_loss=0.265, pruned_loss=0.07494, over 971653.87 frames.], batch size: 14, lr: 1.30e-03 +2022-05-03 17:32:01,172 INFO [train.py:715] (7/8) Epoch 0, batch 26050, loss[loss=0.2709, simple_loss=0.3213, pruned_loss=0.1102, over 4735.00 frames.], tot_loss[loss=0.208, simple_loss=0.2658, pruned_loss=0.07513, over 971588.93 frames.], batch size: 16, lr: 1.30e-03 +2022-05-03 17:32:41,633 INFO [train.py:715] (7/8) Epoch 0, batch 26100, loss[loss=0.2099, simple_loss=0.27, pruned_loss=0.07489, over 4888.00 frames.], tot_loss[loss=0.208, simple_loss=0.2658, pruned_loss=0.07516, over 971983.95 frames.], batch size: 22, lr: 1.30e-03 +2022-05-03 17:33:21,955 INFO [train.py:715] (7/8) Epoch 0, batch 26150, loss[loss=0.2432, simple_loss=0.2913, pruned_loss=0.09758, over 4934.00 frames.], tot_loss[loss=0.2085, simple_loss=0.2663, pruned_loss=0.07537, over 972108.45 frames.], batch size: 18, lr: 1.30e-03 +2022-05-03 17:34:00,859 INFO [train.py:715] (7/8) Epoch 0, batch 26200, loss[loss=0.2147, simple_loss=0.2753, pruned_loss=0.07699, over 4908.00 frames.], tot_loss[loss=0.2084, simple_loss=0.2658, pruned_loss=0.0755, over 972120.80 frames.], batch size: 22, lr: 1.30e-03 +2022-05-03 17:34:41,490 INFO [train.py:715] (7/8) Epoch 0, batch 26250, loss[loss=0.1413, simple_loss=0.2111, pruned_loss=0.03568, over 4829.00 frames.], tot_loss[loss=0.2084, simple_loss=0.2659, pruned_loss=0.07546, over 972349.25 frames.], batch size: 12, lr: 1.30e-03 +2022-05-03 17:35:21,435 INFO [train.py:715] (7/8) Epoch 0, batch 26300, loss[loss=0.1905, simple_loss=0.2426, pruned_loss=0.06927, over 4982.00 frames.], tot_loss[loss=0.2073, simple_loss=0.2644, pruned_loss=0.07507, over 972092.64 frames.], batch size: 14, lr: 1.30e-03 +2022-05-03 17:36:01,281 INFO [train.py:715] (7/8) Epoch 0, batch 26350, loss[loss=0.1578, simple_loss=0.2266, pruned_loss=0.04455, over 4826.00 frames.], tot_loss[loss=0.2071, simple_loss=0.2646, pruned_loss=0.07477, over 973009.30 frames.], batch size: 25, lr: 1.30e-03 +2022-05-03 17:36:41,220 INFO [train.py:715] (7/8) Epoch 0, batch 26400, loss[loss=0.2012, simple_loss=0.251, pruned_loss=0.07567, over 4773.00 frames.], tot_loss[loss=0.2077, simple_loss=0.2653, pruned_loss=0.07508, over 971847.42 frames.], batch size: 14, lr: 1.29e-03 +2022-05-03 17:37:21,344 INFO [train.py:715] (7/8) Epoch 0, batch 26450, loss[loss=0.2133, simple_loss=0.2761, pruned_loss=0.07524, over 4881.00 frames.], tot_loss[loss=0.2079, simple_loss=0.2655, pruned_loss=0.07515, over 971837.18 frames.], batch size: 19, lr: 1.29e-03 +2022-05-03 17:38:02,049 INFO [train.py:715] (7/8) Epoch 0, batch 26500, loss[loss=0.1902, simple_loss=0.26, pruned_loss=0.0602, over 4821.00 frames.], tot_loss[loss=0.2071, simple_loss=0.2648, pruned_loss=0.07467, over 972096.10 frames.], batch size: 26, lr: 1.29e-03 +2022-05-03 17:38:41,413 INFO [train.py:715] (7/8) Epoch 0, batch 26550, loss[loss=0.2154, simple_loss=0.2699, pruned_loss=0.08039, over 4846.00 frames.], tot_loss[loss=0.2063, simple_loss=0.264, pruned_loss=0.07432, over 971470.30 frames.], batch size: 32, lr: 1.29e-03 +2022-05-03 17:39:21,085 INFO [train.py:715] (7/8) Epoch 0, batch 26600, loss[loss=0.1785, simple_loss=0.2534, pruned_loss=0.05181, over 4947.00 frames.], tot_loss[loss=0.2065, simple_loss=0.264, pruned_loss=0.07454, over 972383.37 frames.], batch size: 21, lr: 1.29e-03 +2022-05-03 17:40:01,335 INFO [train.py:715] (7/8) Epoch 0, batch 26650, loss[loss=0.214, simple_loss=0.2584, pruned_loss=0.08484, over 4803.00 frames.], tot_loss[loss=0.2064, simple_loss=0.2638, pruned_loss=0.07445, over 971830.74 frames.], batch size: 14, lr: 1.29e-03 +2022-05-03 17:40:40,798 INFO [train.py:715] (7/8) Epoch 0, batch 26700, loss[loss=0.2582, simple_loss=0.3135, pruned_loss=0.1015, over 4837.00 frames.], tot_loss[loss=0.2071, simple_loss=0.2646, pruned_loss=0.0748, over 971645.78 frames.], batch size: 30, lr: 1.29e-03 +2022-05-03 17:41:20,825 INFO [train.py:715] (7/8) Epoch 0, batch 26750, loss[loss=0.1765, simple_loss=0.2461, pruned_loss=0.05347, over 4928.00 frames.], tot_loss[loss=0.2065, simple_loss=0.264, pruned_loss=0.07451, over 971064.28 frames.], batch size: 23, lr: 1.29e-03 +2022-05-03 17:42:01,252 INFO [train.py:715] (7/8) Epoch 0, batch 26800, loss[loss=0.2668, simple_loss=0.3156, pruned_loss=0.109, over 4913.00 frames.], tot_loss[loss=0.2073, simple_loss=0.265, pruned_loss=0.07478, over 970509.14 frames.], batch size: 18, lr: 1.28e-03 +2022-05-03 17:42:41,671 INFO [train.py:715] (7/8) Epoch 0, batch 26850, loss[loss=0.2596, simple_loss=0.316, pruned_loss=0.1016, over 4924.00 frames.], tot_loss[loss=0.2076, simple_loss=0.2651, pruned_loss=0.07501, over 970347.50 frames.], batch size: 18, lr: 1.28e-03 +2022-05-03 17:43:21,536 INFO [train.py:715] (7/8) Epoch 0, batch 26900, loss[loss=0.2112, simple_loss=0.2703, pruned_loss=0.07601, over 4754.00 frames.], tot_loss[loss=0.208, simple_loss=0.2651, pruned_loss=0.07548, over 970703.63 frames.], batch size: 19, lr: 1.28e-03 +2022-05-03 17:44:02,264 INFO [train.py:715] (7/8) Epoch 0, batch 26950, loss[loss=0.2505, simple_loss=0.3094, pruned_loss=0.0958, over 4831.00 frames.], tot_loss[loss=0.21, simple_loss=0.2665, pruned_loss=0.07676, over 970664.71 frames.], batch size: 26, lr: 1.28e-03 +2022-05-03 17:44:42,421 INFO [train.py:715] (7/8) Epoch 0, batch 27000, loss[loss=0.1878, simple_loss=0.2524, pruned_loss=0.06158, over 4810.00 frames.], tot_loss[loss=0.21, simple_loss=0.2669, pruned_loss=0.0766, over 970914.01 frames.], batch size: 25, lr: 1.28e-03 +2022-05-03 17:44:42,422 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 17:44:51,201 INFO [train.py:742] (7/8) Epoch 0, validation: loss=0.1338, simple_loss=0.2208, pruned_loss=0.02337, over 914524.00 frames. +2022-05-03 17:45:31,275 INFO [train.py:715] (7/8) Epoch 0, batch 27050, loss[loss=0.1506, simple_loss=0.2217, pruned_loss=0.03977, over 4775.00 frames.], tot_loss[loss=0.2095, simple_loss=0.2664, pruned_loss=0.0763, over 970192.40 frames.], batch size: 18, lr: 1.28e-03 +2022-05-03 17:46:10,747 INFO [train.py:715] (7/8) Epoch 0, batch 27100, loss[loss=0.2275, simple_loss=0.2719, pruned_loss=0.09155, over 4778.00 frames.], tot_loss[loss=0.2094, simple_loss=0.2665, pruned_loss=0.07614, over 970729.21 frames.], batch size: 17, lr: 1.28e-03 +2022-05-03 17:46:51,329 INFO [train.py:715] (7/8) Epoch 0, batch 27150, loss[loss=0.2333, simple_loss=0.2825, pruned_loss=0.09208, over 4820.00 frames.], tot_loss[loss=0.2108, simple_loss=0.2675, pruned_loss=0.07709, over 970828.26 frames.], batch size: 12, lr: 1.28e-03 +2022-05-03 17:47:31,715 INFO [train.py:715] (7/8) Epoch 0, batch 27200, loss[loss=0.2079, simple_loss=0.2559, pruned_loss=0.0799, over 4839.00 frames.], tot_loss[loss=0.2096, simple_loss=0.2665, pruned_loss=0.07637, over 971563.09 frames.], batch size: 13, lr: 1.28e-03 +2022-05-03 17:48:11,815 INFO [train.py:715] (7/8) Epoch 0, batch 27250, loss[loss=0.1884, simple_loss=0.2548, pruned_loss=0.06101, over 4946.00 frames.], tot_loss[loss=0.2086, simple_loss=0.2659, pruned_loss=0.07562, over 971965.89 frames.], batch size: 21, lr: 1.27e-03 +2022-05-03 17:48:51,961 INFO [train.py:715] (7/8) Epoch 0, batch 27300, loss[loss=0.1885, simple_loss=0.2523, pruned_loss=0.06235, over 4866.00 frames.], tot_loss[loss=0.2083, simple_loss=0.2656, pruned_loss=0.07554, over 971942.21 frames.], batch size: 30, lr: 1.27e-03 +2022-05-03 17:49:31,861 INFO [train.py:715] (7/8) Epoch 0, batch 27350, loss[loss=0.1902, simple_loss=0.2593, pruned_loss=0.06053, over 4860.00 frames.], tot_loss[loss=0.2066, simple_loss=0.2643, pruned_loss=0.07449, over 972576.49 frames.], batch size: 20, lr: 1.27e-03 +2022-05-03 17:50:11,824 INFO [train.py:715] (7/8) Epoch 0, batch 27400, loss[loss=0.2344, simple_loss=0.2859, pruned_loss=0.09141, over 4806.00 frames.], tot_loss[loss=0.2066, simple_loss=0.2645, pruned_loss=0.07432, over 972364.03 frames.], batch size: 13, lr: 1.27e-03 +2022-05-03 17:50:51,096 INFO [train.py:715] (7/8) Epoch 0, batch 27450, loss[loss=0.2181, simple_loss=0.2734, pruned_loss=0.08136, over 4777.00 frames.], tot_loss[loss=0.2066, simple_loss=0.2645, pruned_loss=0.0743, over 971783.84 frames.], batch size: 18, lr: 1.27e-03 +2022-05-03 17:51:31,242 INFO [train.py:715] (7/8) Epoch 0, batch 27500, loss[loss=0.2152, simple_loss=0.2636, pruned_loss=0.08334, over 4918.00 frames.], tot_loss[loss=0.2055, simple_loss=0.2634, pruned_loss=0.07375, over 972475.34 frames.], batch size: 39, lr: 1.27e-03 +2022-05-03 17:52:11,053 INFO [train.py:715] (7/8) Epoch 0, batch 27550, loss[loss=0.1824, simple_loss=0.2313, pruned_loss=0.06677, over 4835.00 frames.], tot_loss[loss=0.2061, simple_loss=0.264, pruned_loss=0.07415, over 972376.51 frames.], batch size: 13, lr: 1.27e-03 +2022-05-03 17:52:50,537 INFO [train.py:715] (7/8) Epoch 0, batch 27600, loss[loss=0.2055, simple_loss=0.275, pruned_loss=0.06802, over 4775.00 frames.], tot_loss[loss=0.2062, simple_loss=0.2642, pruned_loss=0.07412, over 971660.47 frames.], batch size: 17, lr: 1.27e-03 +2022-05-03 17:53:29,970 INFO [train.py:715] (7/8) Epoch 0, batch 27650, loss[loss=0.2382, simple_loss=0.2965, pruned_loss=0.08991, over 4974.00 frames.], tot_loss[loss=0.2062, simple_loss=0.2642, pruned_loss=0.07407, over 971954.92 frames.], batch size: 24, lr: 1.27e-03 +2022-05-03 17:54:09,974 INFO [train.py:715] (7/8) Epoch 0, batch 27700, loss[loss=0.3163, simple_loss=0.3518, pruned_loss=0.1404, over 4920.00 frames.], tot_loss[loss=0.206, simple_loss=0.2638, pruned_loss=0.07413, over 972866.95 frames.], batch size: 18, lr: 1.26e-03 +2022-05-03 17:54:50,344 INFO [train.py:715] (7/8) Epoch 0, batch 27750, loss[loss=0.2025, simple_loss=0.2587, pruned_loss=0.0731, over 4990.00 frames.], tot_loss[loss=0.2063, simple_loss=0.2639, pruned_loss=0.07433, over 972792.06 frames.], batch size: 14, lr: 1.26e-03 +2022-05-03 17:55:30,115 INFO [train.py:715] (7/8) Epoch 0, batch 27800, loss[loss=0.1904, simple_loss=0.2582, pruned_loss=0.06128, over 4705.00 frames.], tot_loss[loss=0.205, simple_loss=0.263, pruned_loss=0.07349, over 973224.32 frames.], batch size: 15, lr: 1.26e-03 +2022-05-03 17:56:10,385 INFO [train.py:715] (7/8) Epoch 0, batch 27850, loss[loss=0.2542, simple_loss=0.3048, pruned_loss=0.1018, over 4749.00 frames.], tot_loss[loss=0.2064, simple_loss=0.2639, pruned_loss=0.07451, over 973150.58 frames.], batch size: 16, lr: 1.26e-03 +2022-05-03 17:56:49,945 INFO [train.py:715] (7/8) Epoch 0, batch 27900, loss[loss=0.2263, simple_loss=0.2837, pruned_loss=0.0845, over 4990.00 frames.], tot_loss[loss=0.2068, simple_loss=0.2641, pruned_loss=0.07473, over 973089.12 frames.], batch size: 28, lr: 1.26e-03 +2022-05-03 17:57:29,410 INFO [train.py:715] (7/8) Epoch 0, batch 27950, loss[loss=0.2248, simple_loss=0.2653, pruned_loss=0.09214, over 4791.00 frames.], tot_loss[loss=0.2067, simple_loss=0.2638, pruned_loss=0.07486, over 972993.42 frames.], batch size: 12, lr: 1.26e-03 +2022-05-03 17:58:09,429 INFO [train.py:715] (7/8) Epoch 0, batch 28000, loss[loss=0.1655, simple_loss=0.2287, pruned_loss=0.05118, over 4888.00 frames.], tot_loss[loss=0.2058, simple_loss=0.2636, pruned_loss=0.074, over 972646.66 frames.], batch size: 19, lr: 1.26e-03 +2022-05-03 17:58:49,661 INFO [train.py:715] (7/8) Epoch 0, batch 28050, loss[loss=0.2142, simple_loss=0.2792, pruned_loss=0.0746, over 4849.00 frames.], tot_loss[loss=0.2051, simple_loss=0.2634, pruned_loss=0.07338, over 973051.56 frames.], batch size: 20, lr: 1.26e-03 +2022-05-03 17:59:29,710 INFO [train.py:715] (7/8) Epoch 0, batch 28100, loss[loss=0.2272, simple_loss=0.2841, pruned_loss=0.08513, over 4930.00 frames.], tot_loss[loss=0.2076, simple_loss=0.2653, pruned_loss=0.07495, over 972928.06 frames.], batch size: 23, lr: 1.26e-03 +2022-05-03 18:00:08,962 INFO [train.py:715] (7/8) Epoch 0, batch 28150, loss[loss=0.2404, simple_loss=0.2936, pruned_loss=0.09365, over 4957.00 frames.], tot_loss[loss=0.2081, simple_loss=0.2658, pruned_loss=0.07519, over 973004.35 frames.], batch size: 21, lr: 1.25e-03 +2022-05-03 18:00:49,205 INFO [train.py:715] (7/8) Epoch 0, batch 28200, loss[loss=0.207, simple_loss=0.2673, pruned_loss=0.07331, over 4987.00 frames.], tot_loss[loss=0.2072, simple_loss=0.2644, pruned_loss=0.07498, over 972140.88 frames.], batch size: 28, lr: 1.25e-03 +2022-05-03 18:01:28,911 INFO [train.py:715] (7/8) Epoch 0, batch 28250, loss[loss=0.1938, simple_loss=0.2466, pruned_loss=0.07057, over 4984.00 frames.], tot_loss[loss=0.2076, simple_loss=0.2648, pruned_loss=0.0752, over 972572.01 frames.], batch size: 15, lr: 1.25e-03 +2022-05-03 18:02:07,674 INFO [train.py:715] (7/8) Epoch 0, batch 28300, loss[loss=0.1747, simple_loss=0.2451, pruned_loss=0.05217, over 4955.00 frames.], tot_loss[loss=0.2069, simple_loss=0.2645, pruned_loss=0.07471, over 972431.71 frames.], batch size: 21, lr: 1.25e-03 +2022-05-03 18:02:48,212 INFO [train.py:715] (7/8) Epoch 0, batch 28350, loss[loss=0.2576, simple_loss=0.3038, pruned_loss=0.1057, over 4800.00 frames.], tot_loss[loss=0.207, simple_loss=0.2645, pruned_loss=0.07477, over 971939.82 frames.], batch size: 24, lr: 1.25e-03 +2022-05-03 18:03:27,717 INFO [train.py:715] (7/8) Epoch 0, batch 28400, loss[loss=0.1836, simple_loss=0.2236, pruned_loss=0.07184, over 4978.00 frames.], tot_loss[loss=0.2071, simple_loss=0.2642, pruned_loss=0.07503, over 971811.64 frames.], batch size: 28, lr: 1.25e-03 +2022-05-03 18:04:07,957 INFO [train.py:715] (7/8) Epoch 0, batch 28450, loss[loss=0.1984, simple_loss=0.2685, pruned_loss=0.06416, over 4774.00 frames.], tot_loss[loss=0.2071, simple_loss=0.2642, pruned_loss=0.07503, over 971871.02 frames.], batch size: 18, lr: 1.25e-03 +2022-05-03 18:04:47,635 INFO [train.py:715] (7/8) Epoch 0, batch 28500, loss[loss=0.2017, simple_loss=0.2606, pruned_loss=0.07142, over 4823.00 frames.], tot_loss[loss=0.206, simple_loss=0.2637, pruned_loss=0.0742, over 972608.67 frames.], batch size: 26, lr: 1.25e-03 +2022-05-03 18:05:28,106 INFO [train.py:715] (7/8) Epoch 0, batch 28550, loss[loss=0.2573, simple_loss=0.2907, pruned_loss=0.112, over 4954.00 frames.], tot_loss[loss=0.208, simple_loss=0.2651, pruned_loss=0.0754, over 972971.83 frames.], batch size: 14, lr: 1.25e-03 +2022-05-03 18:06:07,733 INFO [train.py:715] (7/8) Epoch 0, batch 28600, loss[loss=0.2262, simple_loss=0.2732, pruned_loss=0.08959, over 4816.00 frames.], tot_loss[loss=0.2083, simple_loss=0.2656, pruned_loss=0.07554, over 973901.85 frames.], batch size: 12, lr: 1.24e-03 +2022-05-03 18:06:46,966 INFO [train.py:715] (7/8) Epoch 0, batch 28650, loss[loss=0.223, simple_loss=0.273, pruned_loss=0.08652, over 4843.00 frames.], tot_loss[loss=0.21, simple_loss=0.2668, pruned_loss=0.07664, over 973753.82 frames.], batch size: 32, lr: 1.24e-03 +2022-05-03 18:07:26,845 INFO [train.py:715] (7/8) Epoch 0, batch 28700, loss[loss=0.1653, simple_loss=0.2414, pruned_loss=0.04459, over 4823.00 frames.], tot_loss[loss=0.2093, simple_loss=0.2663, pruned_loss=0.07613, over 972968.61 frames.], batch size: 13, lr: 1.24e-03 +2022-05-03 18:08:06,490 INFO [train.py:715] (7/8) Epoch 0, batch 28750, loss[loss=0.1814, simple_loss=0.2488, pruned_loss=0.05702, over 4966.00 frames.], tot_loss[loss=0.2077, simple_loss=0.2651, pruned_loss=0.07511, over 973071.93 frames.], batch size: 24, lr: 1.24e-03 +2022-05-03 18:08:46,806 INFO [train.py:715] (7/8) Epoch 0, batch 28800, loss[loss=0.1736, simple_loss=0.2447, pruned_loss=0.05124, over 4761.00 frames.], tot_loss[loss=0.2076, simple_loss=0.2651, pruned_loss=0.07501, over 972858.90 frames.], batch size: 19, lr: 1.24e-03 +2022-05-03 18:09:25,927 INFO [train.py:715] (7/8) Epoch 0, batch 28850, loss[loss=0.1939, simple_loss=0.2604, pruned_loss=0.06366, over 4937.00 frames.], tot_loss[loss=0.2073, simple_loss=0.2648, pruned_loss=0.07488, over 972787.74 frames.], batch size: 21, lr: 1.24e-03 +2022-05-03 18:10:05,954 INFO [train.py:715] (7/8) Epoch 0, batch 28900, loss[loss=0.2339, simple_loss=0.2829, pruned_loss=0.09246, over 4867.00 frames.], tot_loss[loss=0.2066, simple_loss=0.2644, pruned_loss=0.07442, over 972062.46 frames.], batch size: 20, lr: 1.24e-03 +2022-05-03 18:10:45,832 INFO [train.py:715] (7/8) Epoch 0, batch 28950, loss[loss=0.2047, simple_loss=0.2667, pruned_loss=0.0713, over 4744.00 frames.], tot_loss[loss=0.2061, simple_loss=0.2644, pruned_loss=0.07384, over 972202.70 frames.], batch size: 19, lr: 1.24e-03 +2022-05-03 18:11:24,710 INFO [train.py:715] (7/8) Epoch 0, batch 29000, loss[loss=0.2441, simple_loss=0.2907, pruned_loss=0.09879, over 4823.00 frames.], tot_loss[loss=0.2059, simple_loss=0.2646, pruned_loss=0.07358, over 972608.21 frames.], batch size: 15, lr: 1.24e-03 +2022-05-03 18:12:05,313 INFO [train.py:715] (7/8) Epoch 0, batch 29050, loss[loss=0.1655, simple_loss=0.2354, pruned_loss=0.04781, over 4904.00 frames.], tot_loss[loss=0.2051, simple_loss=0.264, pruned_loss=0.07308, over 972902.16 frames.], batch size: 19, lr: 1.24e-03 +2022-05-03 18:12:45,442 INFO [train.py:715] (7/8) Epoch 0, batch 29100, loss[loss=0.2705, simple_loss=0.3271, pruned_loss=0.1069, over 4797.00 frames.], tot_loss[loss=0.2052, simple_loss=0.2636, pruned_loss=0.07337, over 972845.70 frames.], batch size: 21, lr: 1.23e-03 +2022-05-03 18:13:25,064 INFO [train.py:715] (7/8) Epoch 0, batch 29150, loss[loss=0.1912, simple_loss=0.2567, pruned_loss=0.06283, over 4934.00 frames.], tot_loss[loss=0.2054, simple_loss=0.2634, pruned_loss=0.0737, over 973408.86 frames.], batch size: 23, lr: 1.23e-03 +2022-05-03 18:14:04,271 INFO [train.py:715] (7/8) Epoch 0, batch 29200, loss[loss=0.1751, simple_loss=0.2205, pruned_loss=0.06492, over 4825.00 frames.], tot_loss[loss=0.204, simple_loss=0.2618, pruned_loss=0.07317, over 972543.72 frames.], batch size: 12, lr: 1.23e-03 +2022-05-03 18:14:44,213 INFO [train.py:715] (7/8) Epoch 0, batch 29250, loss[loss=0.203, simple_loss=0.2661, pruned_loss=0.06999, over 4948.00 frames.], tot_loss[loss=0.2043, simple_loss=0.2625, pruned_loss=0.07301, over 972399.45 frames.], batch size: 23, lr: 1.23e-03 +2022-05-03 18:15:24,232 INFO [train.py:715] (7/8) Epoch 0, batch 29300, loss[loss=0.1663, simple_loss=0.2266, pruned_loss=0.05301, over 4875.00 frames.], tot_loss[loss=0.2035, simple_loss=0.2618, pruned_loss=0.07254, over 972891.54 frames.], batch size: 16, lr: 1.23e-03 +2022-05-03 18:16:04,639 INFO [train.py:715] (7/8) Epoch 0, batch 29350, loss[loss=0.1936, simple_loss=0.251, pruned_loss=0.06814, over 4831.00 frames.], tot_loss[loss=0.2029, simple_loss=0.2617, pruned_loss=0.07206, over 972543.82 frames.], batch size: 26, lr: 1.23e-03 +2022-05-03 18:16:44,084 INFO [train.py:715] (7/8) Epoch 0, batch 29400, loss[loss=0.1773, simple_loss=0.2552, pruned_loss=0.04965, over 4864.00 frames.], tot_loss[loss=0.2044, simple_loss=0.2628, pruned_loss=0.073, over 972681.70 frames.], batch size: 16, lr: 1.23e-03 +2022-05-03 18:17:23,555 INFO [train.py:715] (7/8) Epoch 0, batch 29450, loss[loss=0.2134, simple_loss=0.269, pruned_loss=0.07896, over 4757.00 frames.], tot_loss[loss=0.2042, simple_loss=0.2629, pruned_loss=0.07273, over 972580.20 frames.], batch size: 19, lr: 1.23e-03 +2022-05-03 18:18:03,750 INFO [train.py:715] (7/8) Epoch 0, batch 29500, loss[loss=0.2077, simple_loss=0.2669, pruned_loss=0.0743, over 4938.00 frames.], tot_loss[loss=0.2044, simple_loss=0.263, pruned_loss=0.07285, over 972713.11 frames.], batch size: 18, lr: 1.23e-03 +2022-05-03 18:18:42,857 INFO [train.py:715] (7/8) Epoch 0, batch 29550, loss[loss=0.1912, simple_loss=0.2546, pruned_loss=0.06383, over 4689.00 frames.], tot_loss[loss=0.205, simple_loss=0.2635, pruned_loss=0.07326, over 972784.87 frames.], batch size: 15, lr: 1.23e-03 +2022-05-03 18:19:23,021 INFO [train.py:715] (7/8) Epoch 0, batch 29600, loss[loss=0.1959, simple_loss=0.2481, pruned_loss=0.07184, over 4825.00 frames.], tot_loss[loss=0.2068, simple_loss=0.2649, pruned_loss=0.07433, over 972603.23 frames.], batch size: 30, lr: 1.22e-03 +2022-05-03 18:20:02,965 INFO [train.py:715] (7/8) Epoch 0, batch 29650, loss[loss=0.2234, simple_loss=0.2801, pruned_loss=0.08335, over 4878.00 frames.], tot_loss[loss=0.2064, simple_loss=0.2645, pruned_loss=0.0742, over 972618.31 frames.], batch size: 22, lr: 1.22e-03 +2022-05-03 18:20:42,828 INFO [train.py:715] (7/8) Epoch 0, batch 29700, loss[loss=0.1852, simple_loss=0.2577, pruned_loss=0.05637, over 4814.00 frames.], tot_loss[loss=0.2053, simple_loss=0.2633, pruned_loss=0.07366, over 971483.02 frames.], batch size: 25, lr: 1.22e-03 +2022-05-03 18:21:23,327 INFO [train.py:715] (7/8) Epoch 0, batch 29750, loss[loss=0.1773, simple_loss=0.2332, pruned_loss=0.06064, over 4819.00 frames.], tot_loss[loss=0.2049, simple_loss=0.2631, pruned_loss=0.0734, over 970531.35 frames.], batch size: 25, lr: 1.22e-03 +2022-05-03 18:22:03,154 INFO [train.py:715] (7/8) Epoch 0, batch 29800, loss[loss=0.2292, simple_loss=0.2609, pruned_loss=0.09876, over 4832.00 frames.], tot_loss[loss=0.2042, simple_loss=0.2627, pruned_loss=0.07288, over 971471.64 frames.], batch size: 13, lr: 1.22e-03 +2022-05-03 18:22:44,063 INFO [train.py:715] (7/8) Epoch 0, batch 29850, loss[loss=0.2034, simple_loss=0.2569, pruned_loss=0.07495, over 4857.00 frames.], tot_loss[loss=0.2045, simple_loss=0.2629, pruned_loss=0.073, over 971889.58 frames.], batch size: 30, lr: 1.22e-03 +2022-05-03 18:23:23,992 INFO [train.py:715] (7/8) Epoch 0, batch 29900, loss[loss=0.2057, simple_loss=0.2566, pruned_loss=0.07738, over 4906.00 frames.], tot_loss[loss=0.2051, simple_loss=0.2633, pruned_loss=0.07346, over 970653.56 frames.], batch size: 17, lr: 1.22e-03 +2022-05-03 18:24:03,890 INFO [train.py:715] (7/8) Epoch 0, batch 29950, loss[loss=0.2369, simple_loss=0.2967, pruned_loss=0.08856, over 4823.00 frames.], tot_loss[loss=0.2069, simple_loss=0.2649, pruned_loss=0.07441, over 970943.87 frames.], batch size: 26, lr: 1.22e-03 +2022-05-03 18:24:43,766 INFO [train.py:715] (7/8) Epoch 0, batch 30000, loss[loss=0.1628, simple_loss=0.2277, pruned_loss=0.04895, over 4814.00 frames.], tot_loss[loss=0.2065, simple_loss=0.2642, pruned_loss=0.07444, over 971537.77 frames.], batch size: 25, lr: 1.22e-03 +2022-05-03 18:24:43,766 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 18:25:00,381 INFO [train.py:742] (7/8) Epoch 0, validation: loss=0.1316, simple_loss=0.2189, pruned_loss=0.02213, over 914524.00 frames. +2022-05-03 18:25:40,686 INFO [train.py:715] (7/8) Epoch 0, batch 30050, loss[loss=0.2241, simple_loss=0.276, pruned_loss=0.08611, over 4884.00 frames.], tot_loss[loss=0.2061, simple_loss=0.2637, pruned_loss=0.07419, over 971340.46 frames.], batch size: 22, lr: 1.22e-03 +2022-05-03 18:26:21,237 INFO [train.py:715] (7/8) Epoch 0, batch 30100, loss[loss=0.2246, simple_loss=0.2788, pruned_loss=0.08521, over 4681.00 frames.], tot_loss[loss=0.2069, simple_loss=0.2643, pruned_loss=0.0747, over 971251.64 frames.], batch size: 15, lr: 1.21e-03 +2022-05-03 18:27:01,917 INFO [train.py:715] (7/8) Epoch 0, batch 30150, loss[loss=0.1844, simple_loss=0.2444, pruned_loss=0.0622, over 4744.00 frames.], tot_loss[loss=0.2066, simple_loss=0.2643, pruned_loss=0.07446, over 971125.51 frames.], batch size: 16, lr: 1.21e-03 +2022-05-03 18:27:42,054 INFO [train.py:715] (7/8) Epoch 0, batch 30200, loss[loss=0.2266, simple_loss=0.2697, pruned_loss=0.09174, over 4649.00 frames.], tot_loss[loss=0.2054, simple_loss=0.2638, pruned_loss=0.07355, over 971829.66 frames.], batch size: 13, lr: 1.21e-03 +2022-05-03 18:28:22,545 INFO [train.py:715] (7/8) Epoch 0, batch 30250, loss[loss=0.2352, simple_loss=0.281, pruned_loss=0.09468, over 4963.00 frames.], tot_loss[loss=0.205, simple_loss=0.263, pruned_loss=0.07344, over 972546.34 frames.], batch size: 28, lr: 1.21e-03 +2022-05-03 18:29:02,646 INFO [train.py:715] (7/8) Epoch 0, batch 30300, loss[loss=0.1921, simple_loss=0.2665, pruned_loss=0.05885, over 4817.00 frames.], tot_loss[loss=0.204, simple_loss=0.2625, pruned_loss=0.0727, over 972824.02 frames.], batch size: 27, lr: 1.21e-03 +2022-05-03 18:29:43,073 INFO [train.py:715] (7/8) Epoch 0, batch 30350, loss[loss=0.2059, simple_loss=0.2428, pruned_loss=0.08449, over 4829.00 frames.], tot_loss[loss=0.2025, simple_loss=0.2614, pruned_loss=0.07179, over 972665.27 frames.], batch size: 30, lr: 1.21e-03 +2022-05-03 18:30:23,203 INFO [train.py:715] (7/8) Epoch 0, batch 30400, loss[loss=0.1724, simple_loss=0.2337, pruned_loss=0.05551, over 4962.00 frames.], tot_loss[loss=0.2009, simple_loss=0.2601, pruned_loss=0.07081, over 972881.89 frames.], batch size: 24, lr: 1.21e-03 +2022-05-03 18:31:02,970 INFO [train.py:715] (7/8) Epoch 0, batch 30450, loss[loss=0.2057, simple_loss=0.2735, pruned_loss=0.06891, over 4816.00 frames.], tot_loss[loss=0.2021, simple_loss=0.2612, pruned_loss=0.07153, over 971820.77 frames.], batch size: 25, lr: 1.21e-03 +2022-05-03 18:31:42,726 INFO [train.py:715] (7/8) Epoch 0, batch 30500, loss[loss=0.2112, simple_loss=0.2753, pruned_loss=0.07351, over 4769.00 frames.], tot_loss[loss=0.2015, simple_loss=0.2605, pruned_loss=0.07129, over 971622.51 frames.], batch size: 17, lr: 1.21e-03 +2022-05-03 18:32:22,644 INFO [train.py:715] (7/8) Epoch 0, batch 30550, loss[loss=0.2121, simple_loss=0.2673, pruned_loss=0.07845, over 4751.00 frames.], tot_loss[loss=0.2005, simple_loss=0.2593, pruned_loss=0.07085, over 971076.08 frames.], batch size: 19, lr: 1.21e-03 +2022-05-03 18:33:01,764 INFO [train.py:715] (7/8) Epoch 0, batch 30600, loss[loss=0.2178, simple_loss=0.2745, pruned_loss=0.08059, over 4842.00 frames.], tot_loss[loss=0.1996, simple_loss=0.2582, pruned_loss=0.0705, over 971444.12 frames.], batch size: 15, lr: 1.20e-03 +2022-05-03 18:33:41,707 INFO [train.py:715] (7/8) Epoch 0, batch 30650, loss[loss=0.211, simple_loss=0.2742, pruned_loss=0.07391, over 4877.00 frames.], tot_loss[loss=0.2003, simple_loss=0.2592, pruned_loss=0.07066, over 971916.72 frames.], batch size: 38, lr: 1.20e-03 +2022-05-03 18:34:21,524 INFO [train.py:715] (7/8) Epoch 0, batch 30700, loss[loss=0.2101, simple_loss=0.2661, pruned_loss=0.07705, over 4983.00 frames.], tot_loss[loss=0.2, simple_loss=0.2589, pruned_loss=0.07054, over 972028.58 frames.], batch size: 15, lr: 1.20e-03 +2022-05-03 18:35:01,624 INFO [train.py:715] (7/8) Epoch 0, batch 30750, loss[loss=0.2112, simple_loss=0.2696, pruned_loss=0.07635, over 4755.00 frames.], tot_loss[loss=0.2002, simple_loss=0.2594, pruned_loss=0.07045, over 971121.54 frames.], batch size: 19, lr: 1.20e-03 +2022-05-03 18:35:40,973 INFO [train.py:715] (7/8) Epoch 0, batch 30800, loss[loss=0.1936, simple_loss=0.2619, pruned_loss=0.06264, over 4917.00 frames.], tot_loss[loss=0.2003, simple_loss=0.2594, pruned_loss=0.07066, over 971324.46 frames.], batch size: 18, lr: 1.20e-03 +2022-05-03 18:36:21,309 INFO [train.py:715] (7/8) Epoch 0, batch 30850, loss[loss=0.2194, simple_loss=0.2825, pruned_loss=0.0781, over 4765.00 frames.], tot_loss[loss=0.2027, simple_loss=0.2618, pruned_loss=0.07183, over 971079.71 frames.], batch size: 18, lr: 1.20e-03 +2022-05-03 18:37:01,155 INFO [train.py:715] (7/8) Epoch 0, batch 30900, loss[loss=0.1722, simple_loss=0.2428, pruned_loss=0.05085, over 4824.00 frames.], tot_loss[loss=0.2021, simple_loss=0.2612, pruned_loss=0.07151, over 971629.15 frames.], batch size: 26, lr: 1.20e-03 +2022-05-03 18:37:40,863 INFO [train.py:715] (7/8) Epoch 0, batch 30950, loss[loss=0.2051, simple_loss=0.269, pruned_loss=0.07062, over 4756.00 frames.], tot_loss[loss=0.2032, simple_loss=0.2619, pruned_loss=0.07231, over 971754.54 frames.], batch size: 16, lr: 1.20e-03 +2022-05-03 18:38:20,957 INFO [train.py:715] (7/8) Epoch 0, batch 31000, loss[loss=0.1942, simple_loss=0.2677, pruned_loss=0.06035, over 4920.00 frames.], tot_loss[loss=0.2031, simple_loss=0.2617, pruned_loss=0.07224, over 972764.26 frames.], batch size: 23, lr: 1.20e-03 +2022-05-03 18:39:00,970 INFO [train.py:715] (7/8) Epoch 0, batch 31050, loss[loss=0.1756, simple_loss=0.2323, pruned_loss=0.0595, over 4785.00 frames.], tot_loss[loss=0.2033, simple_loss=0.2614, pruned_loss=0.07261, over 972354.37 frames.], batch size: 18, lr: 1.20e-03 +2022-05-03 18:39:40,373 INFO [train.py:715] (7/8) Epoch 0, batch 31100, loss[loss=0.1804, simple_loss=0.2542, pruned_loss=0.05332, over 4802.00 frames.], tot_loss[loss=0.2039, simple_loss=0.2619, pruned_loss=0.07299, over 971900.33 frames.], batch size: 13, lr: 1.20e-03 +2022-05-03 18:40:19,543 INFO [train.py:715] (7/8) Epoch 0, batch 31150, loss[loss=0.1814, simple_loss=0.2448, pruned_loss=0.05901, over 4839.00 frames.], tot_loss[loss=0.2041, simple_loss=0.2623, pruned_loss=0.07293, over 972086.31 frames.], batch size: 15, lr: 1.19e-03 +2022-05-03 18:40:59,617 INFO [train.py:715] (7/8) Epoch 0, batch 31200, loss[loss=0.2197, simple_loss=0.2746, pruned_loss=0.08236, over 4787.00 frames.], tot_loss[loss=0.2026, simple_loss=0.261, pruned_loss=0.07209, over 972709.28 frames.], batch size: 18, lr: 1.19e-03 +2022-05-03 18:41:39,411 INFO [train.py:715] (7/8) Epoch 0, batch 31250, loss[loss=0.2048, simple_loss=0.275, pruned_loss=0.06728, over 4979.00 frames.], tot_loss[loss=0.2025, simple_loss=0.2611, pruned_loss=0.072, over 972485.24 frames.], batch size: 25, lr: 1.19e-03 +2022-05-03 18:42:18,887 INFO [train.py:715] (7/8) Epoch 0, batch 31300, loss[loss=0.1568, simple_loss=0.219, pruned_loss=0.04733, over 4847.00 frames.], tot_loss[loss=0.202, simple_loss=0.2606, pruned_loss=0.07171, over 972809.10 frames.], batch size: 13, lr: 1.19e-03 +2022-05-03 18:42:59,223 INFO [train.py:715] (7/8) Epoch 0, batch 31350, loss[loss=0.2167, simple_loss=0.2821, pruned_loss=0.07565, over 4796.00 frames.], tot_loss[loss=0.2019, simple_loss=0.2608, pruned_loss=0.07148, over 972821.57 frames.], batch size: 14, lr: 1.19e-03 +2022-05-03 18:43:38,898 INFO [train.py:715] (7/8) Epoch 0, batch 31400, loss[loss=0.256, simple_loss=0.3123, pruned_loss=0.09979, over 4950.00 frames.], tot_loss[loss=0.2025, simple_loss=0.2612, pruned_loss=0.07187, over 972821.33 frames.], batch size: 39, lr: 1.19e-03 +2022-05-03 18:44:18,171 INFO [train.py:715] (7/8) Epoch 0, batch 31450, loss[loss=0.1677, simple_loss=0.2355, pruned_loss=0.04998, over 4986.00 frames.], tot_loss[loss=0.2026, simple_loss=0.2613, pruned_loss=0.07191, over 972396.84 frames.], batch size: 35, lr: 1.19e-03 +2022-05-03 18:44:57,278 INFO [train.py:715] (7/8) Epoch 0, batch 31500, loss[loss=0.1926, simple_loss=0.2511, pruned_loss=0.06701, over 4759.00 frames.], tot_loss[loss=0.202, simple_loss=0.2609, pruned_loss=0.07153, over 972765.74 frames.], batch size: 19, lr: 1.19e-03 +2022-05-03 18:45:37,326 INFO [train.py:715] (7/8) Epoch 0, batch 31550, loss[loss=0.193, simple_loss=0.2544, pruned_loss=0.06585, over 4958.00 frames.], tot_loss[loss=0.2023, simple_loss=0.2613, pruned_loss=0.0717, over 973235.35 frames.], batch size: 14, lr: 1.19e-03 +2022-05-03 18:46:17,102 INFO [train.py:715] (7/8) Epoch 0, batch 31600, loss[loss=0.1726, simple_loss=0.2281, pruned_loss=0.05856, over 4864.00 frames.], tot_loss[loss=0.2027, simple_loss=0.2615, pruned_loss=0.07191, over 972845.87 frames.], batch size: 22, lr: 1.19e-03 +2022-05-03 18:46:56,337 INFO [train.py:715] (7/8) Epoch 0, batch 31650, loss[loss=0.2066, simple_loss=0.2677, pruned_loss=0.07277, over 4908.00 frames.], tot_loss[loss=0.2021, simple_loss=0.2614, pruned_loss=0.07136, over 973345.13 frames.], batch size: 17, lr: 1.19e-03 +2022-05-03 18:47:36,249 INFO [train.py:715] (7/8) Epoch 0, batch 31700, loss[loss=0.1673, simple_loss=0.2314, pruned_loss=0.05164, over 4779.00 frames.], tot_loss[loss=0.2015, simple_loss=0.261, pruned_loss=0.07104, over 973231.85 frames.], batch size: 17, lr: 1.18e-03 +2022-05-03 18:48:16,474 INFO [train.py:715] (7/8) Epoch 0, batch 31750, loss[loss=0.1965, simple_loss=0.2556, pruned_loss=0.06872, over 4913.00 frames.], tot_loss[loss=0.2013, simple_loss=0.2611, pruned_loss=0.07079, over 973619.29 frames.], batch size: 17, lr: 1.18e-03 +2022-05-03 18:48:56,201 INFO [train.py:715] (7/8) Epoch 0, batch 31800, loss[loss=0.2217, simple_loss=0.2752, pruned_loss=0.08406, over 4980.00 frames.], tot_loss[loss=0.2009, simple_loss=0.2604, pruned_loss=0.07073, over 973812.67 frames.], batch size: 20, lr: 1.18e-03 +2022-05-03 18:49:35,471 INFO [train.py:715] (7/8) Epoch 0, batch 31850, loss[loss=0.2118, simple_loss=0.2656, pruned_loss=0.07898, over 4954.00 frames.], tot_loss[loss=0.1994, simple_loss=0.2593, pruned_loss=0.06976, over 972894.01 frames.], batch size: 23, lr: 1.18e-03 +2022-05-03 18:50:15,969 INFO [train.py:715] (7/8) Epoch 0, batch 31900, loss[loss=0.2124, simple_loss=0.2801, pruned_loss=0.07234, over 4888.00 frames.], tot_loss[loss=0.1995, simple_loss=0.2592, pruned_loss=0.06988, over 971735.54 frames.], batch size: 22, lr: 1.18e-03 +2022-05-03 18:50:55,676 INFO [train.py:715] (7/8) Epoch 0, batch 31950, loss[loss=0.1629, simple_loss=0.2317, pruned_loss=0.04702, over 4954.00 frames.], tot_loss[loss=0.2005, simple_loss=0.2604, pruned_loss=0.0703, over 972944.78 frames.], batch size: 14, lr: 1.18e-03 +2022-05-03 18:51:37,237 INFO [train.py:715] (7/8) Epoch 0, batch 32000, loss[loss=0.2205, simple_loss=0.2708, pruned_loss=0.08511, over 4987.00 frames.], tot_loss[loss=0.2014, simple_loss=0.2613, pruned_loss=0.07077, over 973242.38 frames.], batch size: 14, lr: 1.18e-03 +2022-05-03 18:52:17,389 INFO [train.py:715] (7/8) Epoch 0, batch 32050, loss[loss=0.1453, simple_loss=0.2045, pruned_loss=0.04307, over 4772.00 frames.], tot_loss[loss=0.2013, simple_loss=0.2607, pruned_loss=0.07091, over 972669.50 frames.], batch size: 12, lr: 1.18e-03 +2022-05-03 18:52:57,286 INFO [train.py:715] (7/8) Epoch 0, batch 32100, loss[loss=0.202, simple_loss=0.2681, pruned_loss=0.06796, over 4757.00 frames.], tot_loss[loss=0.201, simple_loss=0.2606, pruned_loss=0.07066, over 972405.98 frames.], batch size: 19, lr: 1.18e-03 +2022-05-03 18:53:36,630 INFO [train.py:715] (7/8) Epoch 0, batch 32150, loss[loss=0.1731, simple_loss=0.246, pruned_loss=0.05013, over 4813.00 frames.], tot_loss[loss=0.2007, simple_loss=0.2601, pruned_loss=0.07065, over 973343.91 frames.], batch size: 26, lr: 1.18e-03 +2022-05-03 18:54:15,806 INFO [train.py:715] (7/8) Epoch 0, batch 32200, loss[loss=0.2051, simple_loss=0.2588, pruned_loss=0.07566, over 4957.00 frames.], tot_loss[loss=0.2014, simple_loss=0.2602, pruned_loss=0.07128, over 973296.09 frames.], batch size: 29, lr: 1.18e-03 +2022-05-03 18:54:55,965 INFO [train.py:715] (7/8) Epoch 0, batch 32250, loss[loss=0.1834, simple_loss=0.2478, pruned_loss=0.05946, over 4908.00 frames.], tot_loss[loss=0.2013, simple_loss=0.2603, pruned_loss=0.07112, over 973434.93 frames.], batch size: 17, lr: 1.17e-03 +2022-05-03 18:55:35,812 INFO [train.py:715] (7/8) Epoch 0, batch 32300, loss[loss=0.2321, simple_loss=0.274, pruned_loss=0.09514, over 4789.00 frames.], tot_loss[loss=0.2013, simple_loss=0.2605, pruned_loss=0.07106, over 973565.49 frames.], batch size: 17, lr: 1.17e-03 +2022-05-03 18:56:15,320 INFO [train.py:715] (7/8) Epoch 0, batch 32350, loss[loss=0.1876, simple_loss=0.2322, pruned_loss=0.07144, over 4758.00 frames.], tot_loss[loss=0.2005, simple_loss=0.2601, pruned_loss=0.0704, over 972778.15 frames.], batch size: 19, lr: 1.17e-03 +2022-05-03 18:56:55,316 INFO [train.py:715] (7/8) Epoch 0, batch 32400, loss[loss=0.1998, simple_loss=0.2395, pruned_loss=0.08007, over 4775.00 frames.], tot_loss[loss=0.1993, simple_loss=0.2587, pruned_loss=0.06992, over 973353.57 frames.], batch size: 12, lr: 1.17e-03 +2022-05-03 18:57:35,390 INFO [train.py:715] (7/8) Epoch 0, batch 32450, loss[loss=0.1934, simple_loss=0.2507, pruned_loss=0.06809, over 4950.00 frames.], tot_loss[loss=0.1997, simple_loss=0.259, pruned_loss=0.07022, over 972803.68 frames.], batch size: 21, lr: 1.17e-03 +2022-05-03 18:58:15,187 INFO [train.py:715] (7/8) Epoch 0, batch 32500, loss[loss=0.1958, simple_loss=0.2472, pruned_loss=0.07217, over 4889.00 frames.], tot_loss[loss=0.2008, simple_loss=0.2598, pruned_loss=0.0709, over 972710.93 frames.], batch size: 17, lr: 1.17e-03 +2022-05-03 18:58:54,510 INFO [train.py:715] (7/8) Epoch 0, batch 32550, loss[loss=0.1551, simple_loss=0.2295, pruned_loss=0.04032, over 4990.00 frames.], tot_loss[loss=0.2009, simple_loss=0.2601, pruned_loss=0.07091, over 973107.28 frames.], batch size: 14, lr: 1.17e-03 +2022-05-03 18:59:34,023 INFO [train.py:715] (7/8) Epoch 0, batch 32600, loss[loss=0.2051, simple_loss=0.2674, pruned_loss=0.07137, over 4974.00 frames.], tot_loss[loss=0.2007, simple_loss=0.2603, pruned_loss=0.07053, over 973125.07 frames.], batch size: 15, lr: 1.17e-03 +2022-05-03 19:00:13,284 INFO [train.py:715] (7/8) Epoch 0, batch 32650, loss[loss=0.1649, simple_loss=0.2298, pruned_loss=0.05001, over 4792.00 frames.], tot_loss[loss=0.1995, simple_loss=0.2594, pruned_loss=0.06983, over 973623.22 frames.], batch size: 12, lr: 1.17e-03 +2022-05-03 19:00:52,622 INFO [train.py:715] (7/8) Epoch 0, batch 32700, loss[loss=0.2047, simple_loss=0.2519, pruned_loss=0.0787, over 4806.00 frames.], tot_loss[loss=0.1993, simple_loss=0.2591, pruned_loss=0.06975, over 973103.59 frames.], batch size: 25, lr: 1.17e-03 +2022-05-03 19:01:32,100 INFO [train.py:715] (7/8) Epoch 0, batch 32750, loss[loss=0.1806, simple_loss=0.2416, pruned_loss=0.05986, over 4831.00 frames.], tot_loss[loss=0.1981, simple_loss=0.2579, pruned_loss=0.06913, over 973229.95 frames.], batch size: 25, lr: 1.17e-03 +2022-05-03 19:02:12,131 INFO [train.py:715] (7/8) Epoch 0, batch 32800, loss[loss=0.2157, simple_loss=0.282, pruned_loss=0.0747, over 4900.00 frames.], tot_loss[loss=0.1987, simple_loss=0.2587, pruned_loss=0.0693, over 973148.45 frames.], batch size: 19, lr: 1.16e-03 +2022-05-03 19:02:51,639 INFO [train.py:715] (7/8) Epoch 0, batch 32850, loss[loss=0.1866, simple_loss=0.2517, pruned_loss=0.06073, over 4858.00 frames.], tot_loss[loss=0.1984, simple_loss=0.2585, pruned_loss=0.06916, over 973551.14 frames.], batch size: 20, lr: 1.16e-03 +2022-05-03 19:03:31,124 INFO [train.py:715] (7/8) Epoch 0, batch 32900, loss[loss=0.2215, simple_loss=0.278, pruned_loss=0.08255, over 4927.00 frames.], tot_loss[loss=0.1978, simple_loss=0.2579, pruned_loss=0.06887, over 973264.49 frames.], batch size: 17, lr: 1.16e-03 +2022-05-03 19:04:11,184 INFO [train.py:715] (7/8) Epoch 0, batch 32950, loss[loss=0.1818, simple_loss=0.2523, pruned_loss=0.05559, over 4972.00 frames.], tot_loss[loss=0.1986, simple_loss=0.2583, pruned_loss=0.06941, over 972723.16 frames.], batch size: 15, lr: 1.16e-03 +2022-05-03 19:04:50,688 INFO [train.py:715] (7/8) Epoch 0, batch 33000, loss[loss=0.1902, simple_loss=0.2551, pruned_loss=0.06262, over 4883.00 frames.], tot_loss[loss=0.1978, simple_loss=0.2574, pruned_loss=0.06909, over 973027.50 frames.], batch size: 16, lr: 1.16e-03 +2022-05-03 19:04:50,689 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 19:05:00,798 INFO [train.py:742] (7/8) Epoch 0, validation: loss=0.1303, simple_loss=0.2174, pruned_loss=0.02158, over 914524.00 frames. +2022-05-03 19:05:40,739 INFO [train.py:715] (7/8) Epoch 0, batch 33050, loss[loss=0.2019, simple_loss=0.2683, pruned_loss=0.06777, over 4935.00 frames.], tot_loss[loss=0.1977, simple_loss=0.2575, pruned_loss=0.06893, over 972973.62 frames.], batch size: 23, lr: 1.16e-03 +2022-05-03 19:06:20,348 INFO [train.py:715] (7/8) Epoch 0, batch 33100, loss[loss=0.2104, simple_loss=0.2747, pruned_loss=0.07309, over 4831.00 frames.], tot_loss[loss=0.1977, simple_loss=0.2574, pruned_loss=0.06898, over 972052.62 frames.], batch size: 30, lr: 1.16e-03 +2022-05-03 19:07:01,022 INFO [train.py:715] (7/8) Epoch 0, batch 33150, loss[loss=0.1942, simple_loss=0.2599, pruned_loss=0.06422, over 4856.00 frames.], tot_loss[loss=0.1987, simple_loss=0.258, pruned_loss=0.06971, over 971769.94 frames.], batch size: 20, lr: 1.16e-03 +2022-05-03 19:07:41,363 INFO [train.py:715] (7/8) Epoch 0, batch 33200, loss[loss=0.1971, simple_loss=0.2592, pruned_loss=0.06749, over 4825.00 frames.], tot_loss[loss=0.2002, simple_loss=0.2592, pruned_loss=0.07057, over 972583.69 frames.], batch size: 26, lr: 1.16e-03 +2022-05-03 19:08:21,598 INFO [train.py:715] (7/8) Epoch 0, batch 33250, loss[loss=0.1739, simple_loss=0.2419, pruned_loss=0.05295, over 4835.00 frames.], tot_loss[loss=0.1996, simple_loss=0.2591, pruned_loss=0.07011, over 972175.89 frames.], batch size: 15, lr: 1.16e-03 +2022-05-03 19:09:01,809 INFO [train.py:715] (7/8) Epoch 0, batch 33300, loss[loss=0.2125, simple_loss=0.2627, pruned_loss=0.08118, over 4858.00 frames.], tot_loss[loss=0.2009, simple_loss=0.2603, pruned_loss=0.07078, over 972361.16 frames.], batch size: 32, lr: 1.16e-03 +2022-05-03 19:09:42,529 INFO [train.py:715] (7/8) Epoch 0, batch 33350, loss[loss=0.2073, simple_loss=0.2689, pruned_loss=0.07285, over 4909.00 frames.], tot_loss[loss=0.2003, simple_loss=0.2598, pruned_loss=0.07034, over 972477.67 frames.], batch size: 17, lr: 1.16e-03 +2022-05-03 19:10:22,678 INFO [train.py:715] (7/8) Epoch 0, batch 33400, loss[loss=0.2055, simple_loss=0.2591, pruned_loss=0.07589, over 4947.00 frames.], tot_loss[loss=0.1994, simple_loss=0.2585, pruned_loss=0.07011, over 972123.66 frames.], batch size: 35, lr: 1.15e-03 +2022-05-03 19:11:02,702 INFO [train.py:715] (7/8) Epoch 0, batch 33450, loss[loss=0.2291, simple_loss=0.292, pruned_loss=0.08312, over 4878.00 frames.], tot_loss[loss=0.1999, simple_loss=0.2594, pruned_loss=0.0702, over 971780.75 frames.], batch size: 16, lr: 1.15e-03 +2022-05-03 19:11:43,357 INFO [train.py:715] (7/8) Epoch 0, batch 33500, loss[loss=0.2336, simple_loss=0.2915, pruned_loss=0.08786, over 4777.00 frames.], tot_loss[loss=0.2006, simple_loss=0.2599, pruned_loss=0.07061, over 971108.69 frames.], batch size: 17, lr: 1.15e-03 +2022-05-03 19:12:23,717 INFO [train.py:715] (7/8) Epoch 0, batch 33550, loss[loss=0.2261, simple_loss=0.282, pruned_loss=0.08507, over 4934.00 frames.], tot_loss[loss=0.1993, simple_loss=0.2586, pruned_loss=0.06998, over 971198.42 frames.], batch size: 23, lr: 1.15e-03 +2022-05-03 19:13:02,898 INFO [train.py:715] (7/8) Epoch 0, batch 33600, loss[loss=0.2172, simple_loss=0.2678, pruned_loss=0.08331, over 4786.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2591, pruned_loss=0.07029, over 970865.83 frames.], batch size: 14, lr: 1.15e-03 +2022-05-03 19:13:43,471 INFO [train.py:715] (7/8) Epoch 0, batch 33650, loss[loss=0.1988, simple_loss=0.2625, pruned_loss=0.0676, over 4916.00 frames.], tot_loss[loss=0.2001, simple_loss=0.2593, pruned_loss=0.0704, over 971264.03 frames.], batch size: 23, lr: 1.15e-03 +2022-05-03 19:14:23,808 INFO [train.py:715] (7/8) Epoch 0, batch 33700, loss[loss=0.2078, simple_loss=0.2512, pruned_loss=0.08217, over 4875.00 frames.], tot_loss[loss=0.2002, simple_loss=0.2595, pruned_loss=0.07047, over 971020.61 frames.], batch size: 32, lr: 1.15e-03 +2022-05-03 19:15:03,033 INFO [train.py:715] (7/8) Epoch 0, batch 33750, loss[loss=0.1646, simple_loss=0.2385, pruned_loss=0.0454, over 4886.00 frames.], tot_loss[loss=0.201, simple_loss=0.2602, pruned_loss=0.07092, over 971045.45 frames.], batch size: 22, lr: 1.15e-03 +2022-05-03 19:15:42,519 INFO [train.py:715] (7/8) Epoch 0, batch 33800, loss[loss=0.2005, simple_loss=0.2534, pruned_loss=0.07375, over 4694.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2593, pruned_loss=0.07012, over 971598.66 frames.], batch size: 15, lr: 1.15e-03 +2022-05-03 19:16:22,770 INFO [train.py:715] (7/8) Epoch 0, batch 33850, loss[loss=0.2663, simple_loss=0.3079, pruned_loss=0.1124, over 4928.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2593, pruned_loss=0.07014, over 971395.34 frames.], batch size: 39, lr: 1.15e-03 +2022-05-03 19:17:02,061 INFO [train.py:715] (7/8) Epoch 0, batch 33900, loss[loss=0.1994, simple_loss=0.2554, pruned_loss=0.07173, over 4822.00 frames.], tot_loss[loss=0.1985, simple_loss=0.2581, pruned_loss=0.0695, over 970455.19 frames.], batch size: 25, lr: 1.15e-03 +2022-05-03 19:17:41,117 INFO [train.py:715] (7/8) Epoch 0, batch 33950, loss[loss=0.2411, simple_loss=0.3048, pruned_loss=0.08865, over 4957.00 frames.], tot_loss[loss=0.1989, simple_loss=0.2583, pruned_loss=0.0697, over 970709.79 frames.], batch size: 15, lr: 1.15e-03 +2022-05-03 19:18:21,086 INFO [train.py:715] (7/8) Epoch 0, batch 34000, loss[loss=0.2712, simple_loss=0.3124, pruned_loss=0.115, over 4936.00 frames.], tot_loss[loss=0.1999, simple_loss=0.2592, pruned_loss=0.07029, over 971135.61 frames.], batch size: 29, lr: 1.14e-03 +2022-05-03 19:19:00,966 INFO [train.py:715] (7/8) Epoch 0, batch 34050, loss[loss=0.2021, simple_loss=0.2728, pruned_loss=0.06575, over 4787.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2592, pruned_loss=0.07015, over 971655.77 frames.], batch size: 18, lr: 1.14e-03 +2022-05-03 19:19:40,632 INFO [train.py:715] (7/8) Epoch 0, batch 34100, loss[loss=0.1879, simple_loss=0.2449, pruned_loss=0.06545, over 4851.00 frames.], tot_loss[loss=0.2011, simple_loss=0.2605, pruned_loss=0.07085, over 971302.39 frames.], batch size: 13, lr: 1.14e-03 +2022-05-03 19:20:19,824 INFO [train.py:715] (7/8) Epoch 0, batch 34150, loss[loss=0.1931, simple_loss=0.2524, pruned_loss=0.06688, over 4963.00 frames.], tot_loss[loss=0.2007, simple_loss=0.2603, pruned_loss=0.07054, over 971321.67 frames.], batch size: 35, lr: 1.14e-03 +2022-05-03 19:20:59,755 INFO [train.py:715] (7/8) Epoch 0, batch 34200, loss[loss=0.2183, simple_loss=0.27, pruned_loss=0.08333, over 4912.00 frames.], tot_loss[loss=0.1999, simple_loss=0.2595, pruned_loss=0.07017, over 971930.69 frames.], batch size: 17, lr: 1.14e-03 +2022-05-03 19:21:39,300 INFO [train.py:715] (7/8) Epoch 0, batch 34250, loss[loss=0.1917, simple_loss=0.2426, pruned_loss=0.07039, over 4815.00 frames.], tot_loss[loss=0.1992, simple_loss=0.2589, pruned_loss=0.06975, over 970886.45 frames.], batch size: 25, lr: 1.14e-03 +2022-05-03 19:22:18,599 INFO [train.py:715] (7/8) Epoch 0, batch 34300, loss[loss=0.1941, simple_loss=0.2583, pruned_loss=0.06501, over 4779.00 frames.], tot_loss[loss=0.1976, simple_loss=0.2576, pruned_loss=0.06879, over 970508.54 frames.], batch size: 17, lr: 1.14e-03 +2022-05-03 19:22:58,858 INFO [train.py:715] (7/8) Epoch 0, batch 34350, loss[loss=0.1916, simple_loss=0.2502, pruned_loss=0.06657, over 4843.00 frames.], tot_loss[loss=0.1976, simple_loss=0.2574, pruned_loss=0.06892, over 971082.49 frames.], batch size: 30, lr: 1.14e-03 +2022-05-03 19:23:39,060 INFO [train.py:715] (7/8) Epoch 0, batch 34400, loss[loss=0.1978, simple_loss=0.2571, pruned_loss=0.06925, over 4885.00 frames.], tot_loss[loss=0.1996, simple_loss=0.259, pruned_loss=0.07014, over 971796.40 frames.], batch size: 22, lr: 1.14e-03 +2022-05-03 19:24:18,633 INFO [train.py:715] (7/8) Epoch 0, batch 34450, loss[loss=0.1954, simple_loss=0.2572, pruned_loss=0.06685, over 4822.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2593, pruned_loss=0.07011, over 972527.35 frames.], batch size: 14, lr: 1.14e-03 +2022-05-03 19:24:57,904 INFO [train.py:715] (7/8) Epoch 0, batch 34500, loss[loss=0.2209, simple_loss=0.2745, pruned_loss=0.08365, over 4931.00 frames.], tot_loss[loss=0.1995, simple_loss=0.259, pruned_loss=0.07001, over 972697.72 frames.], batch size: 21, lr: 1.14e-03 +2022-05-03 19:25:38,244 INFO [train.py:715] (7/8) Epoch 0, batch 34550, loss[loss=0.239, simple_loss=0.2832, pruned_loss=0.09741, over 4854.00 frames.], tot_loss[loss=0.2, simple_loss=0.2596, pruned_loss=0.07025, over 972556.64 frames.], batch size: 32, lr: 1.14e-03 +2022-05-03 19:26:17,983 INFO [train.py:715] (7/8) Epoch 0, batch 34600, loss[loss=0.1678, simple_loss=0.2425, pruned_loss=0.04659, over 4980.00 frames.], tot_loss[loss=0.2, simple_loss=0.2593, pruned_loss=0.07035, over 973244.10 frames.], batch size: 28, lr: 1.13e-03 +2022-05-03 19:26:57,214 INFO [train.py:715] (7/8) Epoch 0, batch 34650, loss[loss=0.1758, simple_loss=0.2408, pruned_loss=0.05539, over 4774.00 frames.], tot_loss[loss=0.2006, simple_loss=0.2594, pruned_loss=0.07084, over 972812.93 frames.], batch size: 18, lr: 1.13e-03 +2022-05-03 19:27:37,741 INFO [train.py:715] (7/8) Epoch 0, batch 34700, loss[loss=0.1624, simple_loss=0.2317, pruned_loss=0.04651, over 4825.00 frames.], tot_loss[loss=0.1997, simple_loss=0.2589, pruned_loss=0.07022, over 971742.38 frames.], batch size: 13, lr: 1.13e-03 +2022-05-03 19:28:15,924 INFO [train.py:715] (7/8) Epoch 0, batch 34750, loss[loss=0.1755, simple_loss=0.2438, pruned_loss=0.05362, over 4917.00 frames.], tot_loss[loss=0.1984, simple_loss=0.258, pruned_loss=0.06944, over 970836.25 frames.], batch size: 29, lr: 1.13e-03 +2022-05-03 19:28:53,216 INFO [train.py:715] (7/8) Epoch 0, batch 34800, loss[loss=0.1962, simple_loss=0.267, pruned_loss=0.0627, over 4912.00 frames.], tot_loss[loss=0.1984, simple_loss=0.2578, pruned_loss=0.06954, over 971262.43 frames.], batch size: 18, lr: 1.13e-03 +2022-05-03 19:29:42,573 INFO [train.py:715] (7/8) Epoch 1, batch 0, loss[loss=0.1743, simple_loss=0.2283, pruned_loss=0.06013, over 4839.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2283, pruned_loss=0.06013, over 4839.00 frames.], batch size: 30, lr: 1.11e-03 +2022-05-03 19:30:21,874 INFO [train.py:715] (7/8) Epoch 1, batch 50, loss[loss=0.2409, simple_loss=0.2856, pruned_loss=0.0981, over 4869.00 frames.], tot_loss[loss=0.1982, simple_loss=0.257, pruned_loss=0.06969, over 219695.04 frames.], batch size: 32, lr: 1.11e-03 +2022-05-03 19:31:01,845 INFO [train.py:715] (7/8) Epoch 1, batch 100, loss[loss=0.1876, simple_loss=0.2536, pruned_loss=0.06082, over 4768.00 frames.], tot_loss[loss=0.1976, simple_loss=0.2571, pruned_loss=0.06905, over 386896.19 frames.], batch size: 18, lr: 1.11e-03 +2022-05-03 19:31:41,283 INFO [train.py:715] (7/8) Epoch 1, batch 150, loss[loss=0.1596, simple_loss=0.235, pruned_loss=0.04206, over 4812.00 frames.], tot_loss[loss=0.1981, simple_loss=0.2575, pruned_loss=0.06935, over 515728.04 frames.], batch size: 25, lr: 1.11e-03 +2022-05-03 19:32:20,521 INFO [train.py:715] (7/8) Epoch 1, batch 200, loss[loss=0.1827, simple_loss=0.2439, pruned_loss=0.06069, over 4889.00 frames.], tot_loss[loss=0.1956, simple_loss=0.255, pruned_loss=0.06809, over 617183.41 frames.], batch size: 19, lr: 1.11e-03 +2022-05-03 19:33:00,058 INFO [train.py:715] (7/8) Epoch 1, batch 250, loss[loss=0.2222, simple_loss=0.2837, pruned_loss=0.08036, over 4883.00 frames.], tot_loss[loss=0.1971, simple_loss=0.2569, pruned_loss=0.06866, over 695800.81 frames.], batch size: 16, lr: 1.11e-03 +2022-05-03 19:33:40,743 INFO [train.py:715] (7/8) Epoch 1, batch 300, loss[loss=0.1903, simple_loss=0.2434, pruned_loss=0.06859, over 4910.00 frames.], tot_loss[loss=0.1969, simple_loss=0.2571, pruned_loss=0.06838, over 756407.36 frames.], batch size: 19, lr: 1.11e-03 +2022-05-03 19:34:21,110 INFO [train.py:715] (7/8) Epoch 1, batch 350, loss[loss=0.191, simple_loss=0.2655, pruned_loss=0.05827, over 4739.00 frames.], tot_loss[loss=0.1952, simple_loss=0.2553, pruned_loss=0.06755, over 803259.96 frames.], batch size: 16, lr: 1.11e-03 +2022-05-03 19:35:01,382 INFO [train.py:715] (7/8) Epoch 1, batch 400, loss[loss=0.1904, simple_loss=0.2433, pruned_loss=0.06875, over 4797.00 frames.], tot_loss[loss=0.1959, simple_loss=0.2561, pruned_loss=0.06782, over 841496.42 frames.], batch size: 25, lr: 1.11e-03 +2022-05-03 19:35:42,057 INFO [train.py:715] (7/8) Epoch 1, batch 450, loss[loss=0.1801, simple_loss=0.2512, pruned_loss=0.05447, over 4977.00 frames.], tot_loss[loss=0.1954, simple_loss=0.2558, pruned_loss=0.0675, over 871110.20 frames.], batch size: 15, lr: 1.11e-03 +2022-05-03 19:36:22,767 INFO [train.py:715] (7/8) Epoch 1, batch 500, loss[loss=0.1889, simple_loss=0.248, pruned_loss=0.06486, over 4972.00 frames.], tot_loss[loss=0.1949, simple_loss=0.2558, pruned_loss=0.06701, over 894039.31 frames.], batch size: 14, lr: 1.11e-03 +2022-05-03 19:37:03,290 INFO [train.py:715] (7/8) Epoch 1, batch 550, loss[loss=0.1739, simple_loss=0.2394, pruned_loss=0.05415, over 4770.00 frames.], tot_loss[loss=0.1962, simple_loss=0.2565, pruned_loss=0.06801, over 911660.33 frames.], batch size: 18, lr: 1.11e-03 +2022-05-03 19:37:43,268 INFO [train.py:715] (7/8) Epoch 1, batch 600, loss[loss=0.1777, simple_loss=0.2537, pruned_loss=0.05081, over 4810.00 frames.], tot_loss[loss=0.1976, simple_loss=0.2578, pruned_loss=0.06872, over 924478.12 frames.], batch size: 26, lr: 1.10e-03 +2022-05-03 19:38:23,974 INFO [train.py:715] (7/8) Epoch 1, batch 650, loss[loss=0.1755, simple_loss=0.2473, pruned_loss=0.05186, over 4812.00 frames.], tot_loss[loss=0.1972, simple_loss=0.2575, pruned_loss=0.06848, over 935504.46 frames.], batch size: 25, lr: 1.10e-03 +2022-05-03 19:39:04,142 INFO [train.py:715] (7/8) Epoch 1, batch 700, loss[loss=0.1836, simple_loss=0.2471, pruned_loss=0.06005, over 4834.00 frames.], tot_loss[loss=0.1969, simple_loss=0.2568, pruned_loss=0.06843, over 944078.61 frames.], batch size: 32, lr: 1.10e-03 +2022-05-03 19:39:44,120 INFO [train.py:715] (7/8) Epoch 1, batch 750, loss[loss=0.2306, simple_loss=0.2969, pruned_loss=0.08217, over 4972.00 frames.], tot_loss[loss=0.1969, simple_loss=0.2573, pruned_loss=0.06821, over 950828.17 frames.], batch size: 15, lr: 1.10e-03 +2022-05-03 19:40:24,217 INFO [train.py:715] (7/8) Epoch 1, batch 800, loss[loss=0.1765, simple_loss=0.24, pruned_loss=0.05645, over 4954.00 frames.], tot_loss[loss=0.1987, simple_loss=0.2586, pruned_loss=0.06944, over 955484.51 frames.], batch size: 21, lr: 1.10e-03 +2022-05-03 19:41:04,460 INFO [train.py:715] (7/8) Epoch 1, batch 850, loss[loss=0.2051, simple_loss=0.2494, pruned_loss=0.08039, over 4794.00 frames.], tot_loss[loss=0.1979, simple_loss=0.2579, pruned_loss=0.06894, over 960139.64 frames.], batch size: 17, lr: 1.10e-03 +2022-05-03 19:41:43,692 INFO [train.py:715] (7/8) Epoch 1, batch 900, loss[loss=0.1972, simple_loss=0.2565, pruned_loss=0.06895, over 4890.00 frames.], tot_loss[loss=0.1972, simple_loss=0.2577, pruned_loss=0.0684, over 962747.98 frames.], batch size: 19, lr: 1.10e-03 +2022-05-03 19:42:22,972 INFO [train.py:715] (7/8) Epoch 1, batch 950, loss[loss=0.1922, simple_loss=0.256, pruned_loss=0.06417, over 4800.00 frames.], tot_loss[loss=0.197, simple_loss=0.2576, pruned_loss=0.06824, over 964842.01 frames.], batch size: 21, lr: 1.10e-03 +2022-05-03 19:43:02,562 INFO [train.py:715] (7/8) Epoch 1, batch 1000, loss[loss=0.1468, simple_loss=0.2152, pruned_loss=0.03921, over 4833.00 frames.], tot_loss[loss=0.1966, simple_loss=0.257, pruned_loss=0.06806, over 966217.80 frames.], batch size: 13, lr: 1.10e-03 +2022-05-03 19:43:41,898 INFO [train.py:715] (7/8) Epoch 1, batch 1050, loss[loss=0.1849, simple_loss=0.2458, pruned_loss=0.06204, over 4690.00 frames.], tot_loss[loss=0.1957, simple_loss=0.2561, pruned_loss=0.06766, over 967668.84 frames.], batch size: 15, lr: 1.10e-03 +2022-05-03 19:44:20,965 INFO [train.py:715] (7/8) Epoch 1, batch 1100, loss[loss=0.1718, simple_loss=0.2367, pruned_loss=0.05349, over 4901.00 frames.], tot_loss[loss=0.1944, simple_loss=0.2555, pruned_loss=0.0667, over 967628.73 frames.], batch size: 17, lr: 1.10e-03 +2022-05-03 19:45:00,275 INFO [train.py:715] (7/8) Epoch 1, batch 1150, loss[loss=0.1993, simple_loss=0.2598, pruned_loss=0.06942, over 4780.00 frames.], tot_loss[loss=0.1949, simple_loss=0.2562, pruned_loss=0.06683, over 968562.30 frames.], batch size: 18, lr: 1.10e-03 +2022-05-03 19:45:40,272 INFO [train.py:715] (7/8) Epoch 1, batch 1200, loss[loss=0.183, simple_loss=0.2416, pruned_loss=0.06224, over 4954.00 frames.], tot_loss[loss=0.1968, simple_loss=0.2569, pruned_loss=0.06836, over 969253.86 frames.], batch size: 24, lr: 1.10e-03 +2022-05-03 19:46:19,428 INFO [train.py:715] (7/8) Epoch 1, batch 1250, loss[loss=0.1699, simple_loss=0.2415, pruned_loss=0.04914, over 4839.00 frames.], tot_loss[loss=0.1962, simple_loss=0.2563, pruned_loss=0.06808, over 969745.45 frames.], batch size: 27, lr: 1.10e-03 +2022-05-03 19:46:58,958 INFO [train.py:715] (7/8) Epoch 1, batch 1300, loss[loss=0.2675, simple_loss=0.2973, pruned_loss=0.1188, over 4834.00 frames.], tot_loss[loss=0.1963, simple_loss=0.2563, pruned_loss=0.06815, over 970994.81 frames.], batch size: 15, lr: 1.09e-03 +2022-05-03 19:47:39,270 INFO [train.py:715] (7/8) Epoch 1, batch 1350, loss[loss=0.1665, simple_loss=0.2208, pruned_loss=0.05614, over 4792.00 frames.], tot_loss[loss=0.1969, simple_loss=0.2567, pruned_loss=0.06856, over 971421.24 frames.], batch size: 17, lr: 1.09e-03 +2022-05-03 19:48:18,895 INFO [train.py:715] (7/8) Epoch 1, batch 1400, loss[loss=0.1969, simple_loss=0.2658, pruned_loss=0.06406, over 4979.00 frames.], tot_loss[loss=0.1967, simple_loss=0.2568, pruned_loss=0.06831, over 972430.37 frames.], batch size: 28, lr: 1.09e-03 +2022-05-03 19:48:58,744 INFO [train.py:715] (7/8) Epoch 1, batch 1450, loss[loss=0.173, simple_loss=0.2582, pruned_loss=0.04389, over 4749.00 frames.], tot_loss[loss=0.1969, simple_loss=0.2569, pruned_loss=0.06846, over 972082.06 frames.], batch size: 16, lr: 1.09e-03 +2022-05-03 19:49:38,350 INFO [train.py:715] (7/8) Epoch 1, batch 1500, loss[loss=0.1742, simple_loss=0.2504, pruned_loss=0.04899, over 4871.00 frames.], tot_loss[loss=0.1986, simple_loss=0.2579, pruned_loss=0.06961, over 971240.27 frames.], batch size: 16, lr: 1.09e-03 +2022-05-03 19:50:17,875 INFO [train.py:715] (7/8) Epoch 1, batch 1550, loss[loss=0.163, simple_loss=0.2243, pruned_loss=0.05087, over 4797.00 frames.], tot_loss[loss=0.2003, simple_loss=0.2596, pruned_loss=0.07052, over 971574.27 frames.], batch size: 13, lr: 1.09e-03 +2022-05-03 19:50:57,100 INFO [train.py:715] (7/8) Epoch 1, batch 1600, loss[loss=0.1943, simple_loss=0.2511, pruned_loss=0.06877, over 4988.00 frames.], tot_loss[loss=0.1992, simple_loss=0.2589, pruned_loss=0.06979, over 972165.79 frames.], batch size: 25, lr: 1.09e-03 +2022-05-03 19:51:36,400 INFO [train.py:715] (7/8) Epoch 1, batch 1650, loss[loss=0.1612, simple_loss=0.241, pruned_loss=0.04066, over 4838.00 frames.], tot_loss[loss=0.1988, simple_loss=0.2589, pruned_loss=0.06936, over 971942.98 frames.], batch size: 13, lr: 1.09e-03 +2022-05-03 19:52:16,982 INFO [train.py:715] (7/8) Epoch 1, batch 1700, loss[loss=0.1891, simple_loss=0.2408, pruned_loss=0.06867, over 4874.00 frames.], tot_loss[loss=0.1972, simple_loss=0.2572, pruned_loss=0.06859, over 971796.52 frames.], batch size: 16, lr: 1.09e-03 +2022-05-03 19:52:56,161 INFO [train.py:715] (7/8) Epoch 1, batch 1750, loss[loss=0.1737, simple_loss=0.2351, pruned_loss=0.0562, over 4824.00 frames.], tot_loss[loss=0.1967, simple_loss=0.257, pruned_loss=0.06823, over 971507.66 frames.], batch size: 30, lr: 1.09e-03 +2022-05-03 19:53:35,893 INFO [train.py:715] (7/8) Epoch 1, batch 1800, loss[loss=0.1571, simple_loss=0.2181, pruned_loss=0.04807, over 4798.00 frames.], tot_loss[loss=0.1949, simple_loss=0.2554, pruned_loss=0.06721, over 970621.92 frames.], batch size: 21, lr: 1.09e-03 +2022-05-03 19:54:15,258 INFO [train.py:715] (7/8) Epoch 1, batch 1850, loss[loss=0.1643, simple_loss=0.2167, pruned_loss=0.05594, over 4754.00 frames.], tot_loss[loss=0.1955, simple_loss=0.2556, pruned_loss=0.06766, over 969581.14 frames.], batch size: 19, lr: 1.09e-03 +2022-05-03 19:54:54,778 INFO [train.py:715] (7/8) Epoch 1, batch 1900, loss[loss=0.2098, simple_loss=0.2754, pruned_loss=0.07208, over 4900.00 frames.], tot_loss[loss=0.1952, simple_loss=0.2556, pruned_loss=0.06745, over 970331.00 frames.], batch size: 22, lr: 1.09e-03 +2022-05-03 19:55:34,083 INFO [train.py:715] (7/8) Epoch 1, batch 1950, loss[loss=0.207, simple_loss=0.2599, pruned_loss=0.07709, over 4964.00 frames.], tot_loss[loss=0.1948, simple_loss=0.2552, pruned_loss=0.06716, over 970858.26 frames.], batch size: 35, lr: 1.08e-03 +2022-05-03 19:56:14,079 INFO [train.py:715] (7/8) Epoch 1, batch 2000, loss[loss=0.213, simple_loss=0.2637, pruned_loss=0.08112, over 4854.00 frames.], tot_loss[loss=0.1938, simple_loss=0.2546, pruned_loss=0.06653, over 971645.91 frames.], batch size: 20, lr: 1.08e-03 +2022-05-03 19:56:53,564 INFO [train.py:715] (7/8) Epoch 1, batch 2050, loss[loss=0.1777, simple_loss=0.2412, pruned_loss=0.05709, over 4790.00 frames.], tot_loss[loss=0.1941, simple_loss=0.2548, pruned_loss=0.06668, over 971918.55 frames.], batch size: 14, lr: 1.08e-03 +2022-05-03 19:57:33,041 INFO [train.py:715] (7/8) Epoch 1, batch 2100, loss[loss=0.1835, simple_loss=0.2566, pruned_loss=0.05524, over 4839.00 frames.], tot_loss[loss=0.1954, simple_loss=0.256, pruned_loss=0.06743, over 972259.23 frames.], batch size: 26, lr: 1.08e-03 +2022-05-03 19:58:12,725 INFO [train.py:715] (7/8) Epoch 1, batch 2150, loss[loss=0.182, simple_loss=0.2402, pruned_loss=0.06191, over 4783.00 frames.], tot_loss[loss=0.1973, simple_loss=0.2577, pruned_loss=0.06849, over 972414.65 frames.], batch size: 14, lr: 1.08e-03 +2022-05-03 19:58:52,403 INFO [train.py:715] (7/8) Epoch 1, batch 2200, loss[loss=0.2056, simple_loss=0.2602, pruned_loss=0.07554, over 4889.00 frames.], tot_loss[loss=0.1974, simple_loss=0.2579, pruned_loss=0.06847, over 972271.93 frames.], batch size: 19, lr: 1.08e-03 +2022-05-03 19:59:32,131 INFO [train.py:715] (7/8) Epoch 1, batch 2250, loss[loss=0.1928, simple_loss=0.2514, pruned_loss=0.06711, over 4881.00 frames.], tot_loss[loss=0.198, simple_loss=0.2583, pruned_loss=0.06889, over 973095.05 frames.], batch size: 22, lr: 1.08e-03 +2022-05-03 20:00:11,173 INFO [train.py:715] (7/8) Epoch 1, batch 2300, loss[loss=0.2422, simple_loss=0.2895, pruned_loss=0.09745, over 4954.00 frames.], tot_loss[loss=0.1984, simple_loss=0.2585, pruned_loss=0.06915, over 972648.78 frames.], batch size: 15, lr: 1.08e-03 +2022-05-03 20:00:51,310 INFO [train.py:715] (7/8) Epoch 1, batch 2350, loss[loss=0.2243, simple_loss=0.2717, pruned_loss=0.08841, over 4837.00 frames.], tot_loss[loss=0.198, simple_loss=0.258, pruned_loss=0.069, over 972139.57 frames.], batch size: 13, lr: 1.08e-03 +2022-05-03 20:01:30,589 INFO [train.py:715] (7/8) Epoch 1, batch 2400, loss[loss=0.1896, simple_loss=0.255, pruned_loss=0.06204, over 4812.00 frames.], tot_loss[loss=0.1963, simple_loss=0.257, pruned_loss=0.06783, over 971125.22 frames.], batch size: 27, lr: 1.08e-03 +2022-05-03 20:02:09,732 INFO [train.py:715] (7/8) Epoch 1, batch 2450, loss[loss=0.2002, simple_loss=0.2585, pruned_loss=0.07093, over 4828.00 frames.], tot_loss[loss=0.1955, simple_loss=0.2562, pruned_loss=0.06741, over 971261.60 frames.], batch size: 26, lr: 1.08e-03 +2022-05-03 20:02:48,984 INFO [train.py:715] (7/8) Epoch 1, batch 2500, loss[loss=0.1861, simple_loss=0.2551, pruned_loss=0.05857, over 4766.00 frames.], tot_loss[loss=0.1954, simple_loss=0.2559, pruned_loss=0.06739, over 971518.63 frames.], batch size: 14, lr: 1.08e-03 +2022-05-03 20:03:28,533 INFO [train.py:715] (7/8) Epoch 1, batch 2550, loss[loss=0.215, simple_loss=0.2671, pruned_loss=0.08141, over 4781.00 frames.], tot_loss[loss=0.1948, simple_loss=0.2557, pruned_loss=0.06698, over 971916.05 frames.], batch size: 14, lr: 1.08e-03 +2022-05-03 20:04:08,260 INFO [train.py:715] (7/8) Epoch 1, batch 2600, loss[loss=0.2089, simple_loss=0.2713, pruned_loss=0.0732, over 4852.00 frames.], tot_loss[loss=0.1939, simple_loss=0.2549, pruned_loss=0.0665, over 972177.97 frames.], batch size: 20, lr: 1.08e-03 +2022-05-03 20:04:47,474 INFO [train.py:715] (7/8) Epoch 1, batch 2650, loss[loss=0.1903, simple_loss=0.2548, pruned_loss=0.06292, over 4924.00 frames.], tot_loss[loss=0.194, simple_loss=0.2548, pruned_loss=0.0666, over 972345.90 frames.], batch size: 23, lr: 1.07e-03 +2022-05-03 20:05:27,542 INFO [train.py:715] (7/8) Epoch 1, batch 2700, loss[loss=0.1964, simple_loss=0.2613, pruned_loss=0.06574, over 4910.00 frames.], tot_loss[loss=0.1949, simple_loss=0.2558, pruned_loss=0.06702, over 972191.27 frames.], batch size: 39, lr: 1.07e-03 +2022-05-03 20:06:06,957 INFO [train.py:715] (7/8) Epoch 1, batch 2750, loss[loss=0.1607, simple_loss=0.2269, pruned_loss=0.04728, over 4980.00 frames.], tot_loss[loss=0.1955, simple_loss=0.2563, pruned_loss=0.06735, over 972692.93 frames.], batch size: 26, lr: 1.07e-03 +2022-05-03 20:06:45,688 INFO [train.py:715] (7/8) Epoch 1, batch 2800, loss[loss=0.1921, simple_loss=0.2552, pruned_loss=0.06445, over 4869.00 frames.], tot_loss[loss=0.1958, simple_loss=0.2562, pruned_loss=0.06765, over 972587.91 frames.], batch size: 20, lr: 1.07e-03 +2022-05-03 20:07:25,352 INFO [train.py:715] (7/8) Epoch 1, batch 2850, loss[loss=0.1852, simple_loss=0.2411, pruned_loss=0.06466, over 4925.00 frames.], tot_loss[loss=0.1956, simple_loss=0.2561, pruned_loss=0.06756, over 972510.93 frames.], batch size: 39, lr: 1.07e-03 +2022-05-03 20:08:05,010 INFO [train.py:715] (7/8) Epoch 1, batch 2900, loss[loss=0.1793, simple_loss=0.2393, pruned_loss=0.05968, over 4934.00 frames.], tot_loss[loss=0.1954, simple_loss=0.2559, pruned_loss=0.06739, over 972482.56 frames.], batch size: 35, lr: 1.07e-03 +2022-05-03 20:08:44,123 INFO [train.py:715] (7/8) Epoch 1, batch 2950, loss[loss=0.2108, simple_loss=0.2658, pruned_loss=0.07791, over 4968.00 frames.], tot_loss[loss=0.195, simple_loss=0.2555, pruned_loss=0.06723, over 972689.51 frames.], batch size: 15, lr: 1.07e-03 +2022-05-03 20:09:22,836 INFO [train.py:715] (7/8) Epoch 1, batch 3000, loss[loss=0.194, simple_loss=0.2523, pruned_loss=0.06783, over 4816.00 frames.], tot_loss[loss=0.1949, simple_loss=0.2554, pruned_loss=0.06722, over 971849.03 frames.], batch size: 25, lr: 1.07e-03 +2022-05-03 20:09:22,836 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 20:09:34,565 INFO [train.py:742] (7/8) Epoch 1, validation: loss=0.1276, simple_loss=0.2149, pruned_loss=0.0201, over 914524.00 frames. +2022-05-03 20:10:13,442 INFO [train.py:715] (7/8) Epoch 1, batch 3050, loss[loss=0.1579, simple_loss=0.2261, pruned_loss=0.04488, over 4780.00 frames.], tot_loss[loss=0.1959, simple_loss=0.2565, pruned_loss=0.06763, over 971617.32 frames.], batch size: 12, lr: 1.07e-03 +2022-05-03 20:10:53,451 INFO [train.py:715] (7/8) Epoch 1, batch 3100, loss[loss=0.1406, simple_loss=0.2115, pruned_loss=0.03487, over 4748.00 frames.], tot_loss[loss=0.1946, simple_loss=0.2554, pruned_loss=0.06692, over 971437.25 frames.], batch size: 12, lr: 1.07e-03 +2022-05-03 20:11:32,602 INFO [train.py:715] (7/8) Epoch 1, batch 3150, loss[loss=0.2392, simple_loss=0.2917, pruned_loss=0.09338, over 4802.00 frames.], tot_loss[loss=0.1941, simple_loss=0.2548, pruned_loss=0.06664, over 972018.65 frames.], batch size: 14, lr: 1.07e-03 +2022-05-03 20:12:11,817 INFO [train.py:715] (7/8) Epoch 1, batch 3200, loss[loss=0.2075, simple_loss=0.2695, pruned_loss=0.07273, over 4869.00 frames.], tot_loss[loss=0.1932, simple_loss=0.254, pruned_loss=0.06622, over 972129.97 frames.], batch size: 20, lr: 1.07e-03 +2022-05-03 20:12:51,457 INFO [train.py:715] (7/8) Epoch 1, batch 3250, loss[loss=0.2215, simple_loss=0.2792, pruned_loss=0.08195, over 4841.00 frames.], tot_loss[loss=0.1952, simple_loss=0.2557, pruned_loss=0.06736, over 972157.44 frames.], batch size: 32, lr: 1.07e-03 +2022-05-03 20:13:31,210 INFO [train.py:715] (7/8) Epoch 1, batch 3300, loss[loss=0.1632, simple_loss=0.2265, pruned_loss=0.04996, over 4976.00 frames.], tot_loss[loss=0.195, simple_loss=0.2558, pruned_loss=0.06713, over 971373.16 frames.], batch size: 28, lr: 1.07e-03 +2022-05-03 20:14:10,770 INFO [train.py:715] (7/8) Epoch 1, batch 3350, loss[loss=0.1903, simple_loss=0.2565, pruned_loss=0.06203, over 4771.00 frames.], tot_loss[loss=0.1957, simple_loss=0.2566, pruned_loss=0.06734, over 972262.65 frames.], batch size: 18, lr: 1.07e-03 +2022-05-03 20:14:50,049 INFO [train.py:715] (7/8) Epoch 1, batch 3400, loss[loss=0.2098, simple_loss=0.2544, pruned_loss=0.08265, over 4862.00 frames.], tot_loss[loss=0.1945, simple_loss=0.2557, pruned_loss=0.06663, over 972274.43 frames.], batch size: 30, lr: 1.06e-03 +2022-05-03 20:15:30,669 INFO [train.py:715] (7/8) Epoch 1, batch 3450, loss[loss=0.2045, simple_loss=0.2495, pruned_loss=0.07978, over 4915.00 frames.], tot_loss[loss=0.1952, simple_loss=0.2561, pruned_loss=0.06714, over 971897.66 frames.], batch size: 17, lr: 1.06e-03 +2022-05-03 20:16:09,594 INFO [train.py:715] (7/8) Epoch 1, batch 3500, loss[loss=0.2027, simple_loss=0.258, pruned_loss=0.07366, over 4923.00 frames.], tot_loss[loss=0.1955, simple_loss=0.2563, pruned_loss=0.06736, over 972694.53 frames.], batch size: 19, lr: 1.06e-03 +2022-05-03 20:16:48,618 INFO [train.py:715] (7/8) Epoch 1, batch 3550, loss[loss=0.2162, simple_loss=0.2747, pruned_loss=0.0788, over 4981.00 frames.], tot_loss[loss=0.1956, simple_loss=0.2566, pruned_loss=0.0673, over 972884.92 frames.], batch size: 25, lr: 1.06e-03 +2022-05-03 20:17:28,376 INFO [train.py:715] (7/8) Epoch 1, batch 3600, loss[loss=0.2207, simple_loss=0.2668, pruned_loss=0.08733, over 4837.00 frames.], tot_loss[loss=0.1951, simple_loss=0.2565, pruned_loss=0.06682, over 973617.07 frames.], batch size: 32, lr: 1.06e-03 +2022-05-03 20:18:08,018 INFO [train.py:715] (7/8) Epoch 1, batch 3650, loss[loss=0.187, simple_loss=0.2478, pruned_loss=0.06309, over 4859.00 frames.], tot_loss[loss=0.1937, simple_loss=0.255, pruned_loss=0.06618, over 973527.80 frames.], batch size: 20, lr: 1.06e-03 +2022-05-03 20:18:46,985 INFO [train.py:715] (7/8) Epoch 1, batch 3700, loss[loss=0.178, simple_loss=0.2362, pruned_loss=0.05994, over 4752.00 frames.], tot_loss[loss=0.1937, simple_loss=0.255, pruned_loss=0.0662, over 973290.08 frames.], batch size: 16, lr: 1.06e-03 +2022-05-03 20:19:25,661 INFO [train.py:715] (7/8) Epoch 1, batch 3750, loss[loss=0.2145, simple_loss=0.2726, pruned_loss=0.07822, over 4827.00 frames.], tot_loss[loss=0.1938, simple_loss=0.255, pruned_loss=0.06632, over 972375.88 frames.], batch size: 26, lr: 1.06e-03 +2022-05-03 20:20:05,933 INFO [train.py:715] (7/8) Epoch 1, batch 3800, loss[loss=0.1534, simple_loss=0.2212, pruned_loss=0.04278, over 4792.00 frames.], tot_loss[loss=0.1929, simple_loss=0.2539, pruned_loss=0.06593, over 972611.88 frames.], batch size: 12, lr: 1.06e-03 +2022-05-03 20:20:44,904 INFO [train.py:715] (7/8) Epoch 1, batch 3850, loss[loss=0.2595, simple_loss=0.3035, pruned_loss=0.1078, over 4896.00 frames.], tot_loss[loss=0.1926, simple_loss=0.2536, pruned_loss=0.06578, over 972758.91 frames.], batch size: 39, lr: 1.06e-03 +2022-05-03 20:21:23,757 INFO [train.py:715] (7/8) Epoch 1, batch 3900, loss[loss=0.1972, simple_loss=0.2513, pruned_loss=0.07149, over 4708.00 frames.], tot_loss[loss=0.1917, simple_loss=0.2531, pruned_loss=0.06519, over 972692.07 frames.], batch size: 15, lr: 1.06e-03 +2022-05-03 20:22:03,284 INFO [train.py:715] (7/8) Epoch 1, batch 3950, loss[loss=0.1866, simple_loss=0.2326, pruned_loss=0.07027, over 4974.00 frames.], tot_loss[loss=0.1923, simple_loss=0.2532, pruned_loss=0.06574, over 972320.46 frames.], batch size: 35, lr: 1.06e-03 +2022-05-03 20:22:42,796 INFO [train.py:715] (7/8) Epoch 1, batch 4000, loss[loss=0.1934, simple_loss=0.2533, pruned_loss=0.0668, over 4931.00 frames.], tot_loss[loss=0.193, simple_loss=0.2541, pruned_loss=0.06599, over 972722.94 frames.], batch size: 39, lr: 1.06e-03 +2022-05-03 20:23:21,458 INFO [train.py:715] (7/8) Epoch 1, batch 4050, loss[loss=0.3412, simple_loss=0.3801, pruned_loss=0.1512, over 4963.00 frames.], tot_loss[loss=0.1953, simple_loss=0.2558, pruned_loss=0.06737, over 972801.04 frames.], batch size: 35, lr: 1.06e-03 +2022-05-03 20:24:00,890 INFO [train.py:715] (7/8) Epoch 1, batch 4100, loss[loss=0.1888, simple_loss=0.2582, pruned_loss=0.05971, over 4804.00 frames.], tot_loss[loss=0.1954, simple_loss=0.256, pruned_loss=0.06738, over 971336.12 frames.], batch size: 21, lr: 1.05e-03 +2022-05-03 20:24:40,539 INFO [train.py:715] (7/8) Epoch 1, batch 4150, loss[loss=0.1891, simple_loss=0.2557, pruned_loss=0.06119, over 4957.00 frames.], tot_loss[loss=0.1956, simple_loss=0.2562, pruned_loss=0.06749, over 971271.11 frames.], batch size: 18, lr: 1.05e-03 +2022-05-03 20:25:19,586 INFO [train.py:715] (7/8) Epoch 1, batch 4200, loss[loss=0.2122, simple_loss=0.2634, pruned_loss=0.08047, over 4779.00 frames.], tot_loss[loss=0.1951, simple_loss=0.2553, pruned_loss=0.06741, over 971285.69 frames.], batch size: 14, lr: 1.05e-03 +2022-05-03 20:25:58,625 INFO [train.py:715] (7/8) Epoch 1, batch 4250, loss[loss=0.2014, simple_loss=0.2632, pruned_loss=0.06985, over 4977.00 frames.], tot_loss[loss=0.1955, simple_loss=0.2557, pruned_loss=0.0676, over 972315.84 frames.], batch size: 31, lr: 1.05e-03 +2022-05-03 20:26:38,142 INFO [train.py:715] (7/8) Epoch 1, batch 4300, loss[loss=0.1735, simple_loss=0.2316, pruned_loss=0.05774, over 4766.00 frames.], tot_loss[loss=0.1958, simple_loss=0.2563, pruned_loss=0.06765, over 971692.39 frames.], batch size: 12, lr: 1.05e-03 +2022-05-03 20:27:17,805 INFO [train.py:715] (7/8) Epoch 1, batch 4350, loss[loss=0.2061, simple_loss=0.2609, pruned_loss=0.0756, over 4975.00 frames.], tot_loss[loss=0.196, simple_loss=0.2568, pruned_loss=0.06756, over 971931.71 frames.], batch size: 25, lr: 1.05e-03 +2022-05-03 20:27:56,253 INFO [train.py:715] (7/8) Epoch 1, batch 4400, loss[loss=0.2294, simple_loss=0.2817, pruned_loss=0.08859, over 4702.00 frames.], tot_loss[loss=0.1958, simple_loss=0.2568, pruned_loss=0.0674, over 971830.24 frames.], batch size: 15, lr: 1.05e-03 +2022-05-03 20:28:35,842 INFO [train.py:715] (7/8) Epoch 1, batch 4450, loss[loss=0.2453, simple_loss=0.2939, pruned_loss=0.09835, over 4833.00 frames.], tot_loss[loss=0.1952, simple_loss=0.2563, pruned_loss=0.0671, over 971037.19 frames.], batch size: 15, lr: 1.05e-03 +2022-05-03 20:29:15,596 INFO [train.py:715] (7/8) Epoch 1, batch 4500, loss[loss=0.1919, simple_loss=0.2377, pruned_loss=0.07305, over 4982.00 frames.], tot_loss[loss=0.1953, simple_loss=0.256, pruned_loss=0.06731, over 971522.03 frames.], batch size: 14, lr: 1.05e-03 +2022-05-03 20:29:54,820 INFO [train.py:715] (7/8) Epoch 1, batch 4550, loss[loss=0.1769, simple_loss=0.2479, pruned_loss=0.05298, over 4918.00 frames.], tot_loss[loss=0.195, simple_loss=0.2558, pruned_loss=0.06706, over 971662.36 frames.], batch size: 18, lr: 1.05e-03 +2022-05-03 20:30:33,521 INFO [train.py:715] (7/8) Epoch 1, batch 4600, loss[loss=0.199, simple_loss=0.2532, pruned_loss=0.07244, over 4780.00 frames.], tot_loss[loss=0.1934, simple_loss=0.2547, pruned_loss=0.0661, over 972013.70 frames.], batch size: 18, lr: 1.05e-03 +2022-05-03 20:31:13,056 INFO [train.py:715] (7/8) Epoch 1, batch 4650, loss[loss=0.179, simple_loss=0.2332, pruned_loss=0.06236, over 4870.00 frames.], tot_loss[loss=0.1948, simple_loss=0.2555, pruned_loss=0.06699, over 971613.04 frames.], batch size: 32, lr: 1.05e-03 +2022-05-03 20:31:52,505 INFO [train.py:715] (7/8) Epoch 1, batch 4700, loss[loss=0.1718, simple_loss=0.2328, pruned_loss=0.05535, over 4924.00 frames.], tot_loss[loss=0.1933, simple_loss=0.2544, pruned_loss=0.06607, over 971327.60 frames.], batch size: 29, lr: 1.05e-03 +2022-05-03 20:32:31,323 INFO [train.py:715] (7/8) Epoch 1, batch 4750, loss[loss=0.2196, simple_loss=0.2812, pruned_loss=0.07899, over 4969.00 frames.], tot_loss[loss=0.1939, simple_loss=0.2549, pruned_loss=0.06643, over 971583.71 frames.], batch size: 15, lr: 1.05e-03 +2022-05-03 20:33:11,345 INFO [train.py:715] (7/8) Epoch 1, batch 4800, loss[loss=0.2008, simple_loss=0.2581, pruned_loss=0.0718, over 4850.00 frames.], tot_loss[loss=0.1942, simple_loss=0.2551, pruned_loss=0.06665, over 971957.62 frames.], batch size: 30, lr: 1.05e-03 +2022-05-03 20:33:51,182 INFO [train.py:715] (7/8) Epoch 1, batch 4850, loss[loss=0.2266, simple_loss=0.2885, pruned_loss=0.08237, over 4827.00 frames.], tot_loss[loss=0.1943, simple_loss=0.2551, pruned_loss=0.0667, over 971814.92 frames.], batch size: 25, lr: 1.05e-03 +2022-05-03 20:34:30,468 INFO [train.py:715] (7/8) Epoch 1, batch 4900, loss[loss=0.2326, simple_loss=0.2825, pruned_loss=0.09138, over 4977.00 frames.], tot_loss[loss=0.1939, simple_loss=0.2548, pruned_loss=0.06654, over 972394.83 frames.], batch size: 28, lr: 1.04e-03 +2022-05-03 20:35:09,821 INFO [train.py:715] (7/8) Epoch 1, batch 4950, loss[loss=0.1784, simple_loss=0.2379, pruned_loss=0.05943, over 4774.00 frames.], tot_loss[loss=0.1939, simple_loss=0.2546, pruned_loss=0.06656, over 971536.20 frames.], batch size: 17, lr: 1.04e-03 +2022-05-03 20:35:50,161 INFO [train.py:715] (7/8) Epoch 1, batch 5000, loss[loss=0.2434, simple_loss=0.3038, pruned_loss=0.09148, over 4792.00 frames.], tot_loss[loss=0.1943, simple_loss=0.2553, pruned_loss=0.06666, over 970836.20 frames.], batch size: 17, lr: 1.04e-03 +2022-05-03 20:36:29,720 INFO [train.py:715] (7/8) Epoch 1, batch 5050, loss[loss=0.1617, simple_loss=0.2372, pruned_loss=0.04308, over 4785.00 frames.], tot_loss[loss=0.1933, simple_loss=0.2543, pruned_loss=0.06617, over 971392.87 frames.], batch size: 14, lr: 1.04e-03 +2022-05-03 20:37:08,715 INFO [train.py:715] (7/8) Epoch 1, batch 5100, loss[loss=0.1755, simple_loss=0.2355, pruned_loss=0.05777, over 4786.00 frames.], tot_loss[loss=0.194, simple_loss=0.2548, pruned_loss=0.06659, over 971113.20 frames.], batch size: 18, lr: 1.04e-03 +2022-05-03 20:37:48,747 INFO [train.py:715] (7/8) Epoch 1, batch 5150, loss[loss=0.2352, simple_loss=0.2804, pruned_loss=0.09505, over 4842.00 frames.], tot_loss[loss=0.1932, simple_loss=0.2542, pruned_loss=0.06605, over 971119.20 frames.], batch size: 30, lr: 1.04e-03 +2022-05-03 20:38:30,133 INFO [train.py:715] (7/8) Epoch 1, batch 5200, loss[loss=0.2062, simple_loss=0.2726, pruned_loss=0.0699, over 4977.00 frames.], tot_loss[loss=0.1941, simple_loss=0.2552, pruned_loss=0.06651, over 971269.21 frames.], batch size: 25, lr: 1.04e-03 +2022-05-03 20:39:09,107 INFO [train.py:715] (7/8) Epoch 1, batch 5250, loss[loss=0.1606, simple_loss=0.2314, pruned_loss=0.0449, over 4805.00 frames.], tot_loss[loss=0.1933, simple_loss=0.2545, pruned_loss=0.06608, over 971004.12 frames.], batch size: 21, lr: 1.04e-03 +2022-05-03 20:39:48,465 INFO [train.py:715] (7/8) Epoch 1, batch 5300, loss[loss=0.1957, simple_loss=0.2604, pruned_loss=0.06551, over 4934.00 frames.], tot_loss[loss=0.1923, simple_loss=0.2539, pruned_loss=0.06538, over 970985.95 frames.], batch size: 23, lr: 1.04e-03 +2022-05-03 20:40:28,103 INFO [train.py:715] (7/8) Epoch 1, batch 5350, loss[loss=0.2054, simple_loss=0.2687, pruned_loss=0.07101, over 4784.00 frames.], tot_loss[loss=0.1927, simple_loss=0.254, pruned_loss=0.06571, over 972095.79 frames.], batch size: 17, lr: 1.04e-03 +2022-05-03 20:41:07,645 INFO [train.py:715] (7/8) Epoch 1, batch 5400, loss[loss=0.1439, simple_loss=0.2131, pruned_loss=0.03735, over 4830.00 frames.], tot_loss[loss=0.1918, simple_loss=0.2535, pruned_loss=0.065, over 972228.42 frames.], batch size: 13, lr: 1.04e-03 +2022-05-03 20:41:46,696 INFO [train.py:715] (7/8) Epoch 1, batch 5450, loss[loss=0.1628, simple_loss=0.2339, pruned_loss=0.04584, over 4959.00 frames.], tot_loss[loss=0.1916, simple_loss=0.2531, pruned_loss=0.06504, over 972160.81 frames.], batch size: 24, lr: 1.04e-03 +2022-05-03 20:42:26,577 INFO [train.py:715] (7/8) Epoch 1, batch 5500, loss[loss=0.1403, simple_loss=0.2012, pruned_loss=0.03969, over 4778.00 frames.], tot_loss[loss=0.1917, simple_loss=0.2533, pruned_loss=0.06509, over 970416.07 frames.], batch size: 12, lr: 1.04e-03 +2022-05-03 20:43:06,481 INFO [train.py:715] (7/8) Epoch 1, batch 5550, loss[loss=0.2139, simple_loss=0.268, pruned_loss=0.07988, over 4840.00 frames.], tot_loss[loss=0.1925, simple_loss=0.2544, pruned_loss=0.06533, over 971145.22 frames.], batch size: 20, lr: 1.04e-03 +2022-05-03 20:43:45,488 INFO [train.py:715] (7/8) Epoch 1, batch 5600, loss[loss=0.2364, simple_loss=0.278, pruned_loss=0.09743, over 4805.00 frames.], tot_loss[loss=0.1929, simple_loss=0.2543, pruned_loss=0.06568, over 971675.19 frames.], batch size: 15, lr: 1.04e-03 +2022-05-03 20:44:24,787 INFO [train.py:715] (7/8) Epoch 1, batch 5650, loss[loss=0.1513, simple_loss=0.2109, pruned_loss=0.04583, over 4819.00 frames.], tot_loss[loss=0.192, simple_loss=0.2536, pruned_loss=0.06514, over 972881.38 frames.], batch size: 13, lr: 1.03e-03 +2022-05-03 20:45:04,551 INFO [train.py:715] (7/8) Epoch 1, batch 5700, loss[loss=0.1609, simple_loss=0.2244, pruned_loss=0.04864, over 4983.00 frames.], tot_loss[loss=0.1923, simple_loss=0.2541, pruned_loss=0.06523, over 973276.10 frames.], batch size: 25, lr: 1.03e-03 +2022-05-03 20:45:44,080 INFO [train.py:715] (7/8) Epoch 1, batch 5750, loss[loss=0.184, simple_loss=0.2452, pruned_loss=0.06146, over 4866.00 frames.], tot_loss[loss=0.192, simple_loss=0.2538, pruned_loss=0.06512, over 973052.79 frames.], batch size: 39, lr: 1.03e-03 +2022-05-03 20:46:23,087 INFO [train.py:715] (7/8) Epoch 1, batch 5800, loss[loss=0.1898, simple_loss=0.2571, pruned_loss=0.06125, over 4783.00 frames.], tot_loss[loss=0.1917, simple_loss=0.2536, pruned_loss=0.06487, over 973513.73 frames.], batch size: 17, lr: 1.03e-03 +2022-05-03 20:47:03,044 INFO [train.py:715] (7/8) Epoch 1, batch 5850, loss[loss=0.1761, simple_loss=0.2368, pruned_loss=0.05775, over 4798.00 frames.], tot_loss[loss=0.1919, simple_loss=0.2537, pruned_loss=0.06504, over 972823.58 frames.], batch size: 14, lr: 1.03e-03 +2022-05-03 20:47:42,849 INFO [train.py:715] (7/8) Epoch 1, batch 5900, loss[loss=0.1733, simple_loss=0.2441, pruned_loss=0.05129, over 4949.00 frames.], tot_loss[loss=0.1913, simple_loss=0.2531, pruned_loss=0.06472, over 972343.33 frames.], batch size: 14, lr: 1.03e-03 +2022-05-03 20:48:21,957 INFO [train.py:715] (7/8) Epoch 1, batch 5950, loss[loss=0.1842, simple_loss=0.235, pruned_loss=0.06665, over 4860.00 frames.], tot_loss[loss=0.1908, simple_loss=0.2527, pruned_loss=0.06444, over 971888.33 frames.], batch size: 32, lr: 1.03e-03 +2022-05-03 20:49:01,788 INFO [train.py:715] (7/8) Epoch 1, batch 6000, loss[loss=0.1928, simple_loss=0.2491, pruned_loss=0.06822, over 4736.00 frames.], tot_loss[loss=0.1904, simple_loss=0.2523, pruned_loss=0.06421, over 971583.48 frames.], batch size: 16, lr: 1.03e-03 +2022-05-03 20:49:01,789 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 20:49:14,259 INFO [train.py:742] (7/8) Epoch 1, validation: loss=0.1267, simple_loss=0.2135, pruned_loss=0.01993, over 914524.00 frames. +2022-05-03 20:49:53,685 INFO [train.py:715] (7/8) Epoch 1, batch 6050, loss[loss=0.202, simple_loss=0.2505, pruned_loss=0.07676, over 4879.00 frames.], tot_loss[loss=0.1909, simple_loss=0.2526, pruned_loss=0.06466, over 971887.53 frames.], batch size: 32, lr: 1.03e-03 +2022-05-03 20:50:33,755 INFO [train.py:715] (7/8) Epoch 1, batch 6100, loss[loss=0.192, simple_loss=0.262, pruned_loss=0.06095, over 4750.00 frames.], tot_loss[loss=0.1928, simple_loss=0.2538, pruned_loss=0.06594, over 972070.46 frames.], batch size: 19, lr: 1.03e-03 +2022-05-03 20:51:13,278 INFO [train.py:715] (7/8) Epoch 1, batch 6150, loss[loss=0.1923, simple_loss=0.246, pruned_loss=0.06931, over 4993.00 frames.], tot_loss[loss=0.1922, simple_loss=0.2534, pruned_loss=0.06551, over 972464.34 frames.], batch size: 16, lr: 1.03e-03 +2022-05-03 20:51:51,977 INFO [train.py:715] (7/8) Epoch 1, batch 6200, loss[loss=0.1814, simple_loss=0.2493, pruned_loss=0.05676, over 4916.00 frames.], tot_loss[loss=0.1933, simple_loss=0.2539, pruned_loss=0.06629, over 972007.44 frames.], batch size: 39, lr: 1.03e-03 +2022-05-03 20:52:32,166 INFO [train.py:715] (7/8) Epoch 1, batch 6250, loss[loss=0.2236, simple_loss=0.2793, pruned_loss=0.08395, over 4845.00 frames.], tot_loss[loss=0.1933, simple_loss=0.2543, pruned_loss=0.06615, over 971737.67 frames.], batch size: 15, lr: 1.03e-03 +2022-05-03 20:53:11,879 INFO [train.py:715] (7/8) Epoch 1, batch 6300, loss[loss=0.1884, simple_loss=0.2497, pruned_loss=0.06352, over 4814.00 frames.], tot_loss[loss=0.1935, simple_loss=0.254, pruned_loss=0.06646, over 970834.37 frames.], batch size: 13, lr: 1.03e-03 +2022-05-03 20:53:51,077 INFO [train.py:715] (7/8) Epoch 1, batch 6350, loss[loss=0.1616, simple_loss=0.2366, pruned_loss=0.04329, over 4813.00 frames.], tot_loss[loss=0.1941, simple_loss=0.2546, pruned_loss=0.06679, over 970884.18 frames.], batch size: 15, lr: 1.03e-03 +2022-05-03 20:54:30,386 INFO [train.py:715] (7/8) Epoch 1, batch 6400, loss[loss=0.1824, simple_loss=0.2532, pruned_loss=0.05578, over 4918.00 frames.], tot_loss[loss=0.1933, simple_loss=0.2543, pruned_loss=0.06617, over 971032.12 frames.], batch size: 18, lr: 1.03e-03 +2022-05-03 20:55:09,943 INFO [train.py:715] (7/8) Epoch 1, batch 6450, loss[loss=0.2245, simple_loss=0.2678, pruned_loss=0.09062, over 4978.00 frames.], tot_loss[loss=0.1924, simple_loss=0.2536, pruned_loss=0.06563, over 971827.35 frames.], batch size: 15, lr: 1.02e-03 +2022-05-03 20:55:49,583 INFO [train.py:715] (7/8) Epoch 1, batch 6500, loss[loss=0.1724, simple_loss=0.2401, pruned_loss=0.05234, over 4785.00 frames.], tot_loss[loss=0.1925, simple_loss=0.2538, pruned_loss=0.06559, over 971034.72 frames.], batch size: 14, lr: 1.02e-03 +2022-05-03 20:56:28,198 INFO [train.py:715] (7/8) Epoch 1, batch 6550, loss[loss=0.1897, simple_loss=0.2575, pruned_loss=0.06094, over 4829.00 frames.], tot_loss[loss=0.1931, simple_loss=0.2547, pruned_loss=0.06576, over 971063.20 frames.], batch size: 13, lr: 1.02e-03 +2022-05-03 20:57:08,078 INFO [train.py:715] (7/8) Epoch 1, batch 6600, loss[loss=0.1942, simple_loss=0.2681, pruned_loss=0.06017, over 4758.00 frames.], tot_loss[loss=0.1914, simple_loss=0.2532, pruned_loss=0.06481, over 971093.23 frames.], batch size: 14, lr: 1.02e-03 +2022-05-03 20:57:48,550 INFO [train.py:715] (7/8) Epoch 1, batch 6650, loss[loss=0.2017, simple_loss=0.2601, pruned_loss=0.07163, over 4958.00 frames.], tot_loss[loss=0.1913, simple_loss=0.2526, pruned_loss=0.06498, over 971349.28 frames.], batch size: 35, lr: 1.02e-03 +2022-05-03 20:58:28,004 INFO [train.py:715] (7/8) Epoch 1, batch 6700, loss[loss=0.1966, simple_loss=0.2652, pruned_loss=0.06398, over 4906.00 frames.], tot_loss[loss=0.1919, simple_loss=0.2528, pruned_loss=0.06551, over 971466.17 frames.], batch size: 39, lr: 1.02e-03 +2022-05-03 20:59:07,324 INFO [train.py:715] (7/8) Epoch 1, batch 6750, loss[loss=0.2173, simple_loss=0.275, pruned_loss=0.07977, over 4923.00 frames.], tot_loss[loss=0.1928, simple_loss=0.2533, pruned_loss=0.06614, over 972032.19 frames.], batch size: 17, lr: 1.02e-03 +2022-05-03 20:59:47,258 INFO [train.py:715] (7/8) Epoch 1, batch 6800, loss[loss=0.1844, simple_loss=0.2556, pruned_loss=0.05664, over 4981.00 frames.], tot_loss[loss=0.1923, simple_loss=0.2527, pruned_loss=0.06596, over 972169.11 frames.], batch size: 28, lr: 1.02e-03 +2022-05-03 21:00:26,800 INFO [train.py:715] (7/8) Epoch 1, batch 6850, loss[loss=0.19, simple_loss=0.2527, pruned_loss=0.06365, over 4796.00 frames.], tot_loss[loss=0.1913, simple_loss=0.2523, pruned_loss=0.06517, over 972254.94 frames.], batch size: 24, lr: 1.02e-03 +2022-05-03 21:01:05,425 INFO [train.py:715] (7/8) Epoch 1, batch 6900, loss[loss=0.1682, simple_loss=0.2215, pruned_loss=0.05743, over 4778.00 frames.], tot_loss[loss=0.1914, simple_loss=0.2525, pruned_loss=0.0651, over 971918.29 frames.], batch size: 12, lr: 1.02e-03 +2022-05-03 21:01:44,716 INFO [train.py:715] (7/8) Epoch 1, batch 6950, loss[loss=0.1907, simple_loss=0.2496, pruned_loss=0.06591, over 4845.00 frames.], tot_loss[loss=0.1915, simple_loss=0.2527, pruned_loss=0.06516, over 970807.35 frames.], batch size: 13, lr: 1.02e-03 +2022-05-03 21:02:24,796 INFO [train.py:715] (7/8) Epoch 1, batch 7000, loss[loss=0.2044, simple_loss=0.2564, pruned_loss=0.07619, over 4963.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2521, pruned_loss=0.06464, over 971771.63 frames.], batch size: 35, lr: 1.02e-03 +2022-05-03 21:03:03,642 INFO [train.py:715] (7/8) Epoch 1, batch 7050, loss[loss=0.168, simple_loss=0.2237, pruned_loss=0.0561, over 4795.00 frames.], tot_loss[loss=0.1922, simple_loss=0.2532, pruned_loss=0.06564, over 972799.77 frames.], batch size: 24, lr: 1.02e-03 +2022-05-03 21:03:42,609 INFO [train.py:715] (7/8) Epoch 1, batch 7100, loss[loss=0.1857, simple_loss=0.2519, pruned_loss=0.0598, over 4896.00 frames.], tot_loss[loss=0.1932, simple_loss=0.254, pruned_loss=0.06618, over 972264.22 frames.], batch size: 19, lr: 1.02e-03 +2022-05-03 21:04:22,597 INFO [train.py:715] (7/8) Epoch 1, batch 7150, loss[loss=0.1509, simple_loss=0.2196, pruned_loss=0.04105, over 4956.00 frames.], tot_loss[loss=0.193, simple_loss=0.2544, pruned_loss=0.06575, over 971875.19 frames.], batch size: 24, lr: 1.02e-03 +2022-05-03 21:05:02,516 INFO [train.py:715] (7/8) Epoch 1, batch 7200, loss[loss=0.1925, simple_loss=0.2577, pruned_loss=0.06366, over 4795.00 frames.], tot_loss[loss=0.192, simple_loss=0.2542, pruned_loss=0.06495, over 970619.34 frames.], batch size: 24, lr: 1.02e-03 +2022-05-03 21:05:41,158 INFO [train.py:715] (7/8) Epoch 1, batch 7250, loss[loss=0.1579, simple_loss=0.2337, pruned_loss=0.04106, over 4880.00 frames.], tot_loss[loss=0.192, simple_loss=0.254, pruned_loss=0.06506, over 971559.03 frames.], batch size: 16, lr: 1.02e-03 +2022-05-03 21:06:21,089 INFO [train.py:715] (7/8) Epoch 1, batch 7300, loss[loss=0.2295, simple_loss=0.2786, pruned_loss=0.09017, over 4962.00 frames.], tot_loss[loss=0.1929, simple_loss=0.2548, pruned_loss=0.06548, over 972486.71 frames.], batch size: 35, lr: 1.01e-03 +2022-05-03 21:07:00,827 INFO [train.py:715] (7/8) Epoch 1, batch 7350, loss[loss=0.156, simple_loss=0.2298, pruned_loss=0.04114, over 4777.00 frames.], tot_loss[loss=0.1926, simple_loss=0.2544, pruned_loss=0.06542, over 972289.21 frames.], batch size: 18, lr: 1.01e-03 +2022-05-03 21:07:39,617 INFO [train.py:715] (7/8) Epoch 1, batch 7400, loss[loss=0.1593, simple_loss=0.2222, pruned_loss=0.04819, over 4852.00 frames.], tot_loss[loss=0.1914, simple_loss=0.2533, pruned_loss=0.06469, over 971769.65 frames.], batch size: 20, lr: 1.01e-03 +2022-05-03 21:08:18,532 INFO [train.py:715] (7/8) Epoch 1, batch 7450, loss[loss=0.1753, simple_loss=0.236, pruned_loss=0.05737, over 4819.00 frames.], tot_loss[loss=0.1922, simple_loss=0.2541, pruned_loss=0.06511, over 971732.96 frames.], batch size: 25, lr: 1.01e-03 +2022-05-03 21:08:58,346 INFO [train.py:715] (7/8) Epoch 1, batch 7500, loss[loss=0.1737, simple_loss=0.2459, pruned_loss=0.05076, over 4751.00 frames.], tot_loss[loss=0.1928, simple_loss=0.2544, pruned_loss=0.06565, over 972228.94 frames.], batch size: 19, lr: 1.01e-03 +2022-05-03 21:09:38,024 INFO [train.py:715] (7/8) Epoch 1, batch 7550, loss[loss=0.1724, simple_loss=0.2434, pruned_loss=0.05072, over 4868.00 frames.], tot_loss[loss=0.1921, simple_loss=0.2538, pruned_loss=0.06521, over 971625.61 frames.], batch size: 16, lr: 1.01e-03 +2022-05-03 21:10:16,233 INFO [train.py:715] (7/8) Epoch 1, batch 7600, loss[loss=0.1647, simple_loss=0.2418, pruned_loss=0.04381, over 4731.00 frames.], tot_loss[loss=0.1911, simple_loss=0.253, pruned_loss=0.0646, over 971938.31 frames.], batch size: 16, lr: 1.01e-03 +2022-05-03 21:10:55,973 INFO [train.py:715] (7/8) Epoch 1, batch 7650, loss[loss=0.1815, simple_loss=0.2514, pruned_loss=0.05579, over 4793.00 frames.], tot_loss[loss=0.1915, simple_loss=0.2536, pruned_loss=0.06474, over 971694.29 frames.], batch size: 18, lr: 1.01e-03 +2022-05-03 21:11:35,793 INFO [train.py:715] (7/8) Epoch 1, batch 7700, loss[loss=0.1994, simple_loss=0.2526, pruned_loss=0.07306, over 4975.00 frames.], tot_loss[loss=0.1915, simple_loss=0.2532, pruned_loss=0.06485, over 972654.08 frames.], batch size: 33, lr: 1.01e-03 +2022-05-03 21:12:14,135 INFO [train.py:715] (7/8) Epoch 1, batch 7750, loss[loss=0.2172, simple_loss=0.2654, pruned_loss=0.08447, over 4936.00 frames.], tot_loss[loss=0.192, simple_loss=0.2533, pruned_loss=0.06532, over 973218.80 frames.], batch size: 29, lr: 1.01e-03 +2022-05-03 21:12:53,243 INFO [train.py:715] (7/8) Epoch 1, batch 7800, loss[loss=0.1793, simple_loss=0.2456, pruned_loss=0.05649, over 4937.00 frames.], tot_loss[loss=0.1931, simple_loss=0.2539, pruned_loss=0.06608, over 973321.85 frames.], batch size: 29, lr: 1.01e-03 +2022-05-03 21:13:33,314 INFO [train.py:715] (7/8) Epoch 1, batch 7850, loss[loss=0.2087, simple_loss=0.274, pruned_loss=0.07172, over 4802.00 frames.], tot_loss[loss=0.1913, simple_loss=0.2528, pruned_loss=0.06497, over 973050.53 frames.], batch size: 21, lr: 1.01e-03 +2022-05-03 21:14:12,717 INFO [train.py:715] (7/8) Epoch 1, batch 7900, loss[loss=0.1912, simple_loss=0.2534, pruned_loss=0.0645, over 4787.00 frames.], tot_loss[loss=0.1913, simple_loss=0.2531, pruned_loss=0.06476, over 973418.01 frames.], batch size: 18, lr: 1.01e-03 +2022-05-03 21:14:51,151 INFO [train.py:715] (7/8) Epoch 1, batch 7950, loss[loss=0.1618, simple_loss=0.2261, pruned_loss=0.04871, over 4984.00 frames.], tot_loss[loss=0.1917, simple_loss=0.2534, pruned_loss=0.06499, over 974009.55 frames.], batch size: 33, lr: 1.01e-03 +2022-05-03 21:15:31,261 INFO [train.py:715] (7/8) Epoch 1, batch 8000, loss[loss=0.1771, simple_loss=0.2485, pruned_loss=0.05285, over 4819.00 frames.], tot_loss[loss=0.1914, simple_loss=0.2533, pruned_loss=0.0648, over 973536.86 frames.], batch size: 25, lr: 1.01e-03 +2022-05-03 21:16:11,052 INFO [train.py:715] (7/8) Epoch 1, batch 8050, loss[loss=0.1731, simple_loss=0.2274, pruned_loss=0.05938, over 4882.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2525, pruned_loss=0.06448, over 972575.07 frames.], batch size: 20, lr: 1.01e-03 +2022-05-03 21:16:50,425 INFO [train.py:715] (7/8) Epoch 1, batch 8100, loss[loss=0.1506, simple_loss=0.2172, pruned_loss=0.04206, over 4790.00 frames.], tot_loss[loss=0.1913, simple_loss=0.2525, pruned_loss=0.06503, over 972868.65 frames.], batch size: 12, lr: 1.01e-03 +2022-05-03 21:17:28,627 INFO [train.py:715] (7/8) Epoch 1, batch 8150, loss[loss=0.2164, simple_loss=0.2724, pruned_loss=0.08023, over 4953.00 frames.], tot_loss[loss=0.1917, simple_loss=0.2533, pruned_loss=0.06505, over 972998.41 frames.], batch size: 24, lr: 1.00e-03 +2022-05-03 21:18:08,547 INFO [train.py:715] (7/8) Epoch 1, batch 8200, loss[loss=0.1919, simple_loss=0.2545, pruned_loss=0.06463, over 4805.00 frames.], tot_loss[loss=0.192, simple_loss=0.2534, pruned_loss=0.06534, over 973619.66 frames.], batch size: 21, lr: 1.00e-03 +2022-05-03 21:18:48,018 INFO [train.py:715] (7/8) Epoch 1, batch 8250, loss[loss=0.2708, simple_loss=0.3204, pruned_loss=0.1106, over 4930.00 frames.], tot_loss[loss=0.1922, simple_loss=0.2537, pruned_loss=0.06532, over 973806.23 frames.], batch size: 23, lr: 1.00e-03 +2022-05-03 21:19:26,207 INFO [train.py:715] (7/8) Epoch 1, batch 8300, loss[loss=0.1708, simple_loss=0.2292, pruned_loss=0.05618, over 4954.00 frames.], tot_loss[loss=0.1919, simple_loss=0.2535, pruned_loss=0.06518, over 973831.34 frames.], batch size: 21, lr: 1.00e-03 +2022-05-03 21:20:06,146 INFO [train.py:715] (7/8) Epoch 1, batch 8350, loss[loss=0.2223, simple_loss=0.2726, pruned_loss=0.08603, over 4875.00 frames.], tot_loss[loss=0.1913, simple_loss=0.2531, pruned_loss=0.06477, over 973197.39 frames.], batch size: 32, lr: 1.00e-03 +2022-05-03 21:20:45,725 INFO [train.py:715] (7/8) Epoch 1, batch 8400, loss[loss=0.1805, simple_loss=0.2546, pruned_loss=0.05318, over 4873.00 frames.], tot_loss[loss=0.1926, simple_loss=0.2546, pruned_loss=0.0653, over 972615.32 frames.], batch size: 20, lr: 1.00e-03 +2022-05-03 21:21:25,104 INFO [train.py:715] (7/8) Epoch 1, batch 8450, loss[loss=0.1444, simple_loss=0.21, pruned_loss=0.03942, over 4812.00 frames.], tot_loss[loss=0.1899, simple_loss=0.2523, pruned_loss=0.06374, over 973421.43 frames.], batch size: 13, lr: 1.00e-03 +2022-05-03 21:22:03,494 INFO [train.py:715] (7/8) Epoch 1, batch 8500, loss[loss=0.1969, simple_loss=0.2538, pruned_loss=0.06996, over 4851.00 frames.], tot_loss[loss=0.1905, simple_loss=0.2525, pruned_loss=0.06431, over 973155.77 frames.], batch size: 30, lr: 1.00e-03 +2022-05-03 21:22:43,394 INFO [train.py:715] (7/8) Epoch 1, batch 8550, loss[loss=0.2094, simple_loss=0.2674, pruned_loss=0.07572, over 4889.00 frames.], tot_loss[loss=0.1922, simple_loss=0.2539, pruned_loss=0.06521, over 972879.06 frames.], batch size: 19, lr: 1.00e-03 +2022-05-03 21:23:22,904 INFO [train.py:715] (7/8) Epoch 1, batch 8600, loss[loss=0.2054, simple_loss=0.2707, pruned_loss=0.07001, over 4960.00 frames.], tot_loss[loss=0.1913, simple_loss=0.2531, pruned_loss=0.06477, over 971926.83 frames.], batch size: 21, lr: 1.00e-03 +2022-05-03 21:24:00,912 INFO [train.py:715] (7/8) Epoch 1, batch 8650, loss[loss=0.1838, simple_loss=0.2492, pruned_loss=0.05921, over 4954.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2529, pruned_loss=0.06428, over 972952.76 frames.], batch size: 24, lr: 9.99e-04 +2022-05-03 21:24:41,126 INFO [train.py:715] (7/8) Epoch 1, batch 8700, loss[loss=0.2504, simple_loss=0.2923, pruned_loss=0.1042, over 4958.00 frames.], tot_loss[loss=0.1903, simple_loss=0.2524, pruned_loss=0.06409, over 972981.82 frames.], batch size: 35, lr: 9.98e-04 +2022-05-03 21:25:21,118 INFO [train.py:715] (7/8) Epoch 1, batch 8750, loss[loss=0.1867, simple_loss=0.2484, pruned_loss=0.06257, over 4801.00 frames.], tot_loss[loss=0.1915, simple_loss=0.2533, pruned_loss=0.06485, over 973575.35 frames.], batch size: 21, lr: 9.98e-04 +2022-05-03 21:26:00,210 INFO [train.py:715] (7/8) Epoch 1, batch 8800, loss[loss=0.1846, simple_loss=0.2519, pruned_loss=0.05868, over 4917.00 frames.], tot_loss[loss=0.1924, simple_loss=0.254, pruned_loss=0.06538, over 973529.32 frames.], batch size: 17, lr: 9.97e-04 +2022-05-03 21:26:39,531 INFO [train.py:715] (7/8) Epoch 1, batch 8850, loss[loss=0.2019, simple_loss=0.2596, pruned_loss=0.07214, over 4807.00 frames.], tot_loss[loss=0.1932, simple_loss=0.2547, pruned_loss=0.06591, over 972862.65 frames.], batch size: 25, lr: 9.97e-04 +2022-05-03 21:27:19,652 INFO [train.py:715] (7/8) Epoch 1, batch 8900, loss[loss=0.1857, simple_loss=0.2507, pruned_loss=0.06034, over 4971.00 frames.], tot_loss[loss=0.192, simple_loss=0.2539, pruned_loss=0.0651, over 972837.82 frames.], batch size: 15, lr: 9.96e-04 +2022-05-03 21:27:59,353 INFO [train.py:715] (7/8) Epoch 1, batch 8950, loss[loss=0.1974, simple_loss=0.2671, pruned_loss=0.0638, over 4835.00 frames.], tot_loss[loss=0.1911, simple_loss=0.253, pruned_loss=0.06457, over 972568.39 frames.], batch size: 30, lr: 9.96e-04 +2022-05-03 21:28:37,780 INFO [train.py:715] (7/8) Epoch 1, batch 9000, loss[loss=0.1497, simple_loss=0.2201, pruned_loss=0.0396, over 4808.00 frames.], tot_loss[loss=0.1908, simple_loss=0.2527, pruned_loss=0.06441, over 972037.06 frames.], batch size: 13, lr: 9.95e-04 +2022-05-03 21:28:37,781 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 21:28:47,502 INFO [train.py:742] (7/8) Epoch 1, validation: loss=0.1253, simple_loss=0.2125, pruned_loss=0.01906, over 914524.00 frames. +2022-05-03 21:29:25,999 INFO [train.py:715] (7/8) Epoch 1, batch 9050, loss[loss=0.2179, simple_loss=0.2657, pruned_loss=0.08502, over 4906.00 frames.], tot_loss[loss=0.1902, simple_loss=0.252, pruned_loss=0.06417, over 972512.94 frames.], batch size: 17, lr: 9.94e-04 +2022-05-03 21:30:06,208 INFO [train.py:715] (7/8) Epoch 1, batch 9100, loss[loss=0.1711, simple_loss=0.2375, pruned_loss=0.05228, over 4986.00 frames.], tot_loss[loss=0.19, simple_loss=0.2523, pruned_loss=0.0639, over 972440.16 frames.], batch size: 15, lr: 9.94e-04 +2022-05-03 21:30:45,844 INFO [train.py:715] (7/8) Epoch 1, batch 9150, loss[loss=0.2165, simple_loss=0.2726, pruned_loss=0.08023, over 4744.00 frames.], tot_loss[loss=0.1892, simple_loss=0.2513, pruned_loss=0.06349, over 972445.07 frames.], batch size: 19, lr: 9.93e-04 +2022-05-03 21:31:24,124 INFO [train.py:715] (7/8) Epoch 1, batch 9200, loss[loss=0.1695, simple_loss=0.2295, pruned_loss=0.05476, over 4785.00 frames.], tot_loss[loss=0.1892, simple_loss=0.2514, pruned_loss=0.06349, over 972292.82 frames.], batch size: 12, lr: 9.93e-04 +2022-05-03 21:32:03,945 INFO [train.py:715] (7/8) Epoch 1, batch 9250, loss[loss=0.2374, simple_loss=0.296, pruned_loss=0.08941, over 4968.00 frames.], tot_loss[loss=0.1882, simple_loss=0.2506, pruned_loss=0.0629, over 972493.07 frames.], batch size: 15, lr: 9.92e-04 +2022-05-03 21:32:43,824 INFO [train.py:715] (7/8) Epoch 1, batch 9300, loss[loss=0.2049, simple_loss=0.2681, pruned_loss=0.07083, over 4754.00 frames.], tot_loss[loss=0.1895, simple_loss=0.2517, pruned_loss=0.06367, over 972422.18 frames.], batch size: 16, lr: 9.92e-04 +2022-05-03 21:33:22,871 INFO [train.py:715] (7/8) Epoch 1, batch 9350, loss[loss=0.198, simple_loss=0.2626, pruned_loss=0.0667, over 4808.00 frames.], tot_loss[loss=0.189, simple_loss=0.2516, pruned_loss=0.06324, over 971398.07 frames.], batch size: 25, lr: 9.91e-04 +2022-05-03 21:34:02,365 INFO [train.py:715] (7/8) Epoch 1, batch 9400, loss[loss=0.1961, simple_loss=0.2603, pruned_loss=0.0659, over 4831.00 frames.], tot_loss[loss=0.1888, simple_loss=0.2514, pruned_loss=0.06308, over 971749.40 frames.], batch size: 26, lr: 9.91e-04 +2022-05-03 21:34:42,535 INFO [train.py:715] (7/8) Epoch 1, batch 9450, loss[loss=0.2223, simple_loss=0.2769, pruned_loss=0.08389, over 4699.00 frames.], tot_loss[loss=0.1901, simple_loss=0.2526, pruned_loss=0.06384, over 971047.36 frames.], batch size: 15, lr: 9.90e-04 +2022-05-03 21:35:22,130 INFO [train.py:715] (7/8) Epoch 1, batch 9500, loss[loss=0.1927, simple_loss=0.2526, pruned_loss=0.06638, over 4884.00 frames.], tot_loss[loss=0.1891, simple_loss=0.2519, pruned_loss=0.06311, over 970983.00 frames.], batch size: 16, lr: 9.89e-04 +2022-05-03 21:36:00,394 INFO [train.py:715] (7/8) Epoch 1, batch 9550, loss[loss=0.1877, simple_loss=0.2454, pruned_loss=0.06504, over 4780.00 frames.], tot_loss[loss=0.1893, simple_loss=0.2521, pruned_loss=0.06322, over 971236.02 frames.], batch size: 18, lr: 9.89e-04 +2022-05-03 21:36:40,622 INFO [train.py:715] (7/8) Epoch 1, batch 9600, loss[loss=0.1579, simple_loss=0.2267, pruned_loss=0.04459, over 4963.00 frames.], tot_loss[loss=0.1912, simple_loss=0.2533, pruned_loss=0.06452, over 971715.93 frames.], batch size: 24, lr: 9.88e-04 +2022-05-03 21:37:20,357 INFO [train.py:715] (7/8) Epoch 1, batch 9650, loss[loss=0.2332, simple_loss=0.2909, pruned_loss=0.0878, over 4868.00 frames.], tot_loss[loss=0.1911, simple_loss=0.2532, pruned_loss=0.06445, over 972006.91 frames.], batch size: 16, lr: 9.88e-04 +2022-05-03 21:37:58,746 INFO [train.py:715] (7/8) Epoch 1, batch 9700, loss[loss=0.2149, simple_loss=0.2776, pruned_loss=0.07605, over 4782.00 frames.], tot_loss[loss=0.1915, simple_loss=0.2535, pruned_loss=0.06477, over 971172.94 frames.], batch size: 18, lr: 9.87e-04 +2022-05-03 21:38:38,640 INFO [train.py:715] (7/8) Epoch 1, batch 9750, loss[loss=0.1628, simple_loss=0.23, pruned_loss=0.04779, over 4924.00 frames.], tot_loss[loss=0.191, simple_loss=0.2529, pruned_loss=0.06454, over 971199.82 frames.], batch size: 29, lr: 9.87e-04 +2022-05-03 21:39:19,061 INFO [train.py:715] (7/8) Epoch 1, batch 9800, loss[loss=0.2241, simple_loss=0.29, pruned_loss=0.0791, over 4759.00 frames.], tot_loss[loss=0.1899, simple_loss=0.252, pruned_loss=0.06392, over 971623.37 frames.], batch size: 19, lr: 9.86e-04 +2022-05-03 21:39:58,299 INFO [train.py:715] (7/8) Epoch 1, batch 9850, loss[loss=0.2015, simple_loss=0.259, pruned_loss=0.07196, over 4798.00 frames.], tot_loss[loss=0.1897, simple_loss=0.2519, pruned_loss=0.06381, over 971418.63 frames.], batch size: 18, lr: 9.86e-04 +2022-05-03 21:40:37,079 INFO [train.py:715] (7/8) Epoch 1, batch 9900, loss[loss=0.1502, simple_loss=0.2209, pruned_loss=0.03975, over 4815.00 frames.], tot_loss[loss=0.1889, simple_loss=0.2518, pruned_loss=0.063, over 971923.44 frames.], batch size: 27, lr: 9.85e-04 +2022-05-03 21:41:17,364 INFO [train.py:715] (7/8) Epoch 1, batch 9950, loss[loss=0.1411, simple_loss=0.2101, pruned_loss=0.03606, over 4807.00 frames.], tot_loss[loss=0.1881, simple_loss=0.2508, pruned_loss=0.06275, over 971470.43 frames.], batch size: 25, lr: 9.85e-04 +2022-05-03 21:41:57,266 INFO [train.py:715] (7/8) Epoch 1, batch 10000, loss[loss=0.2062, simple_loss=0.2692, pruned_loss=0.07155, over 4876.00 frames.], tot_loss[loss=0.1886, simple_loss=0.2513, pruned_loss=0.06299, over 971579.13 frames.], batch size: 39, lr: 9.84e-04 +2022-05-03 21:42:36,321 INFO [train.py:715] (7/8) Epoch 1, batch 10050, loss[loss=0.2031, simple_loss=0.2543, pruned_loss=0.07592, over 4952.00 frames.], tot_loss[loss=0.1883, simple_loss=0.2509, pruned_loss=0.0628, over 972067.73 frames.], batch size: 24, lr: 9.83e-04 +2022-05-03 21:43:15,954 INFO [train.py:715] (7/8) Epoch 1, batch 10100, loss[loss=0.193, simple_loss=0.2542, pruned_loss=0.06587, over 4980.00 frames.], tot_loss[loss=0.1884, simple_loss=0.251, pruned_loss=0.0629, over 972765.06 frames.], batch size: 25, lr: 9.83e-04 +2022-05-03 21:43:55,971 INFO [train.py:715] (7/8) Epoch 1, batch 10150, loss[loss=0.2304, simple_loss=0.2929, pruned_loss=0.08397, over 4811.00 frames.], tot_loss[loss=0.1882, simple_loss=0.2509, pruned_loss=0.0628, over 972005.85 frames.], batch size: 21, lr: 9.82e-04 +2022-05-03 21:44:35,082 INFO [train.py:715] (7/8) Epoch 1, batch 10200, loss[loss=0.1477, simple_loss=0.2236, pruned_loss=0.03589, over 4791.00 frames.], tot_loss[loss=0.1871, simple_loss=0.2499, pruned_loss=0.06213, over 972755.05 frames.], batch size: 17, lr: 9.82e-04 +2022-05-03 21:45:14,035 INFO [train.py:715] (7/8) Epoch 1, batch 10250, loss[loss=0.1761, simple_loss=0.2485, pruned_loss=0.05182, over 4795.00 frames.], tot_loss[loss=0.1871, simple_loss=0.25, pruned_loss=0.06213, over 973257.11 frames.], batch size: 24, lr: 9.81e-04 +2022-05-03 21:45:54,206 INFO [train.py:715] (7/8) Epoch 1, batch 10300, loss[loss=0.1956, simple_loss=0.2596, pruned_loss=0.06583, over 4868.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2504, pruned_loss=0.06243, over 973527.96 frames.], batch size: 13, lr: 9.81e-04 +2022-05-03 21:46:34,447 INFO [train.py:715] (7/8) Epoch 1, batch 10350, loss[loss=0.158, simple_loss=0.2248, pruned_loss=0.04558, over 4766.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2503, pruned_loss=0.06237, over 973252.48 frames.], batch size: 14, lr: 9.80e-04 +2022-05-03 21:47:13,907 INFO [train.py:715] (7/8) Epoch 1, batch 10400, loss[loss=0.2059, simple_loss=0.2548, pruned_loss=0.07854, over 4915.00 frames.], tot_loss[loss=0.1878, simple_loss=0.2502, pruned_loss=0.06274, over 973011.88 frames.], batch size: 18, lr: 9.80e-04 +2022-05-03 21:47:53,946 INFO [train.py:715] (7/8) Epoch 1, batch 10450, loss[loss=0.1702, simple_loss=0.2329, pruned_loss=0.05374, over 4692.00 frames.], tot_loss[loss=0.1891, simple_loss=0.2513, pruned_loss=0.06341, over 972109.49 frames.], batch size: 15, lr: 9.79e-04 +2022-05-03 21:48:34,478 INFO [train.py:715] (7/8) Epoch 1, batch 10500, loss[loss=0.174, simple_loss=0.2371, pruned_loss=0.05541, over 4875.00 frames.], tot_loss[loss=0.1883, simple_loss=0.2508, pruned_loss=0.06289, over 971758.47 frames.], batch size: 16, lr: 9.79e-04 +2022-05-03 21:49:13,763 INFO [train.py:715] (7/8) Epoch 1, batch 10550, loss[loss=0.2397, simple_loss=0.2899, pruned_loss=0.09482, over 4821.00 frames.], tot_loss[loss=0.1878, simple_loss=0.2503, pruned_loss=0.06269, over 971317.27 frames.], batch size: 25, lr: 9.78e-04 +2022-05-03 21:49:52,638 INFO [train.py:715] (7/8) Epoch 1, batch 10600, loss[loss=0.1755, simple_loss=0.2457, pruned_loss=0.05261, over 4781.00 frames.], tot_loss[loss=0.1896, simple_loss=0.2521, pruned_loss=0.06355, over 972280.69 frames.], batch size: 18, lr: 9.78e-04 +2022-05-03 21:50:33,177 INFO [train.py:715] (7/8) Epoch 1, batch 10650, loss[loss=0.1682, simple_loss=0.2314, pruned_loss=0.05253, over 4794.00 frames.], tot_loss[loss=0.1882, simple_loss=0.251, pruned_loss=0.06268, over 972572.03 frames.], batch size: 24, lr: 9.77e-04 +2022-05-03 21:51:13,727 INFO [train.py:715] (7/8) Epoch 1, batch 10700, loss[loss=0.1898, simple_loss=0.2559, pruned_loss=0.06187, over 4977.00 frames.], tot_loss[loss=0.1889, simple_loss=0.2515, pruned_loss=0.06319, over 973214.88 frames.], batch size: 15, lr: 9.76e-04 +2022-05-03 21:51:52,993 INFO [train.py:715] (7/8) Epoch 1, batch 10750, loss[loss=0.2254, simple_loss=0.2844, pruned_loss=0.0832, over 4934.00 frames.], tot_loss[loss=0.1887, simple_loss=0.2515, pruned_loss=0.06289, over 973521.40 frames.], batch size: 21, lr: 9.76e-04 +2022-05-03 21:52:32,274 INFO [train.py:715] (7/8) Epoch 1, batch 10800, loss[loss=0.1799, simple_loss=0.2387, pruned_loss=0.06056, over 4845.00 frames.], tot_loss[loss=0.1891, simple_loss=0.2518, pruned_loss=0.06322, over 972651.52 frames.], batch size: 30, lr: 9.75e-04 +2022-05-03 21:53:12,732 INFO [train.py:715] (7/8) Epoch 1, batch 10850, loss[loss=0.1574, simple_loss=0.2143, pruned_loss=0.05022, over 4840.00 frames.], tot_loss[loss=0.1881, simple_loss=0.2509, pruned_loss=0.06261, over 972077.55 frames.], batch size: 13, lr: 9.75e-04 +2022-05-03 21:53:52,220 INFO [train.py:715] (7/8) Epoch 1, batch 10900, loss[loss=0.2069, simple_loss=0.2637, pruned_loss=0.07507, over 4734.00 frames.], tot_loss[loss=0.1887, simple_loss=0.2513, pruned_loss=0.06311, over 972179.71 frames.], batch size: 16, lr: 9.74e-04 +2022-05-03 21:54:30,709 INFO [train.py:715] (7/8) Epoch 1, batch 10950, loss[loss=0.1701, simple_loss=0.2331, pruned_loss=0.05353, over 4984.00 frames.], tot_loss[loss=0.1888, simple_loss=0.2512, pruned_loss=0.0632, over 972508.01 frames.], batch size: 35, lr: 9.74e-04 +2022-05-03 21:55:10,755 INFO [train.py:715] (7/8) Epoch 1, batch 11000, loss[loss=0.2228, simple_loss=0.2793, pruned_loss=0.08312, over 4852.00 frames.], tot_loss[loss=0.1896, simple_loss=0.2519, pruned_loss=0.06366, over 972804.57 frames.], batch size: 20, lr: 9.73e-04 +2022-05-03 21:55:50,515 INFO [train.py:715] (7/8) Epoch 1, batch 11050, loss[loss=0.1864, simple_loss=0.2663, pruned_loss=0.05324, over 4966.00 frames.], tot_loss[loss=0.1885, simple_loss=0.2516, pruned_loss=0.06266, over 972866.69 frames.], batch size: 24, lr: 9.73e-04 +2022-05-03 21:56:29,270 INFO [train.py:715] (7/8) Epoch 1, batch 11100, loss[loss=0.1688, simple_loss=0.2296, pruned_loss=0.05402, over 4889.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2502, pruned_loss=0.06215, over 973121.05 frames.], batch size: 17, lr: 9.72e-04 +2022-05-03 21:57:08,679 INFO [train.py:715] (7/8) Epoch 1, batch 11150, loss[loss=0.1688, simple_loss=0.2381, pruned_loss=0.04976, over 4942.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2498, pruned_loss=0.06173, over 972008.57 frames.], batch size: 29, lr: 9.72e-04 +2022-05-03 21:57:48,799 INFO [train.py:715] (7/8) Epoch 1, batch 11200, loss[loss=0.1891, simple_loss=0.2582, pruned_loss=0.05997, over 4921.00 frames.], tot_loss[loss=0.1865, simple_loss=0.2494, pruned_loss=0.06185, over 973109.28 frames.], batch size: 18, lr: 9.71e-04 +2022-05-03 21:58:28,395 INFO [train.py:715] (7/8) Epoch 1, batch 11250, loss[loss=0.19, simple_loss=0.2523, pruned_loss=0.06388, over 4868.00 frames.], tot_loss[loss=0.1871, simple_loss=0.2495, pruned_loss=0.06232, over 972752.99 frames.], batch size: 16, lr: 9.71e-04 +2022-05-03 21:59:06,585 INFO [train.py:715] (7/8) Epoch 1, batch 11300, loss[loss=0.1246, simple_loss=0.1999, pruned_loss=0.02469, over 4744.00 frames.], tot_loss[loss=0.1865, simple_loss=0.2489, pruned_loss=0.06206, over 972790.73 frames.], batch size: 12, lr: 9.70e-04 +2022-05-03 21:59:46,988 INFO [train.py:715] (7/8) Epoch 1, batch 11350, loss[loss=0.1776, simple_loss=0.2455, pruned_loss=0.0548, over 4931.00 frames.], tot_loss[loss=0.1869, simple_loss=0.2494, pruned_loss=0.06221, over 972445.41 frames.], batch size: 23, lr: 9.70e-04 +2022-05-03 22:00:26,694 INFO [train.py:715] (7/8) Epoch 1, batch 11400, loss[loss=0.208, simple_loss=0.2691, pruned_loss=0.07342, over 4903.00 frames.], tot_loss[loss=0.189, simple_loss=0.2514, pruned_loss=0.06328, over 972987.49 frames.], batch size: 19, lr: 9.69e-04 +2022-05-03 22:01:04,861 INFO [train.py:715] (7/8) Epoch 1, batch 11450, loss[loss=0.1711, simple_loss=0.2283, pruned_loss=0.05694, over 4858.00 frames.], tot_loss[loss=0.1873, simple_loss=0.2499, pruned_loss=0.06239, over 973220.05 frames.], batch size: 12, lr: 9.69e-04 +2022-05-03 22:01:44,071 INFO [train.py:715] (7/8) Epoch 1, batch 11500, loss[loss=0.2374, simple_loss=0.2898, pruned_loss=0.09249, over 4763.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2499, pruned_loss=0.06253, over 972955.79 frames.], batch size: 18, lr: 9.68e-04 +2022-05-03 22:02:23,962 INFO [train.py:715] (7/8) Epoch 1, batch 11550, loss[loss=0.1793, simple_loss=0.2455, pruned_loss=0.05653, over 4930.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2487, pruned_loss=0.0614, over 973341.05 frames.], batch size: 29, lr: 9.68e-04 +2022-05-03 22:03:03,165 INFO [train.py:715] (7/8) Epoch 1, batch 11600, loss[loss=0.16, simple_loss=0.2331, pruned_loss=0.04343, over 4949.00 frames.], tot_loss[loss=0.1871, simple_loss=0.2495, pruned_loss=0.06233, over 972752.23 frames.], batch size: 21, lr: 9.67e-04 +2022-05-03 22:03:41,497 INFO [train.py:715] (7/8) Epoch 1, batch 11650, loss[loss=0.1546, simple_loss=0.2222, pruned_loss=0.04354, over 4942.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2498, pruned_loss=0.06229, over 973415.08 frames.], batch size: 35, lr: 9.67e-04 +2022-05-03 22:04:21,435 INFO [train.py:715] (7/8) Epoch 1, batch 11700, loss[loss=0.1971, simple_loss=0.2568, pruned_loss=0.06869, over 4835.00 frames.], tot_loss[loss=0.1867, simple_loss=0.2497, pruned_loss=0.06182, over 974068.88 frames.], batch size: 27, lr: 9.66e-04 +2022-05-03 22:05:01,251 INFO [train.py:715] (7/8) Epoch 1, batch 11750, loss[loss=0.246, simple_loss=0.3053, pruned_loss=0.09339, over 4686.00 frames.], tot_loss[loss=0.1867, simple_loss=0.2498, pruned_loss=0.06177, over 973118.88 frames.], batch size: 15, lr: 9.66e-04 +2022-05-03 22:05:40,554 INFO [train.py:715] (7/8) Epoch 1, batch 11800, loss[loss=0.2022, simple_loss=0.2568, pruned_loss=0.07381, over 4683.00 frames.], tot_loss[loss=0.1889, simple_loss=0.2514, pruned_loss=0.06314, over 973626.81 frames.], batch size: 15, lr: 9.65e-04 +2022-05-03 22:06:19,252 INFO [train.py:715] (7/8) Epoch 1, batch 11850, loss[loss=0.2025, simple_loss=0.2539, pruned_loss=0.07561, over 4833.00 frames.], tot_loss[loss=0.189, simple_loss=0.2517, pruned_loss=0.06314, over 972671.55 frames.], batch size: 15, lr: 9.65e-04 +2022-05-03 22:06:59,290 INFO [train.py:715] (7/8) Epoch 1, batch 11900, loss[loss=0.1841, simple_loss=0.2368, pruned_loss=0.06571, over 4769.00 frames.], tot_loss[loss=0.1892, simple_loss=0.2515, pruned_loss=0.06342, over 973086.33 frames.], batch size: 17, lr: 9.64e-04 +2022-05-03 22:07:38,637 INFO [train.py:715] (7/8) Epoch 1, batch 11950, loss[loss=0.17, simple_loss=0.2371, pruned_loss=0.05146, over 4851.00 frames.], tot_loss[loss=0.1878, simple_loss=0.2505, pruned_loss=0.06256, over 972595.94 frames.], batch size: 32, lr: 9.63e-04 +2022-05-03 22:08:17,117 INFO [train.py:715] (7/8) Epoch 1, batch 12000, loss[loss=0.1921, simple_loss=0.2616, pruned_loss=0.06128, over 4811.00 frames.], tot_loss[loss=0.186, simple_loss=0.2494, pruned_loss=0.06136, over 971958.58 frames.], batch size: 25, lr: 9.63e-04 +2022-05-03 22:08:17,117 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 22:08:27,631 INFO [train.py:742] (7/8) Epoch 1, validation: loss=0.1244, simple_loss=0.2116, pruned_loss=0.01858, over 914524.00 frames. +2022-05-03 22:09:06,363 INFO [train.py:715] (7/8) Epoch 1, batch 12050, loss[loss=0.2523, simple_loss=0.3173, pruned_loss=0.09371, over 4768.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2504, pruned_loss=0.06247, over 971920.63 frames.], batch size: 14, lr: 9.62e-04 +2022-05-03 22:09:46,993 INFO [train.py:715] (7/8) Epoch 1, batch 12100, loss[loss=0.203, simple_loss=0.2721, pruned_loss=0.06699, over 4879.00 frames.], tot_loss[loss=0.1888, simple_loss=0.2515, pruned_loss=0.06311, over 972219.54 frames.], batch size: 32, lr: 9.62e-04 +2022-05-03 22:10:27,672 INFO [train.py:715] (7/8) Epoch 1, batch 12150, loss[loss=0.1839, simple_loss=0.2575, pruned_loss=0.05519, over 4940.00 frames.], tot_loss[loss=0.1886, simple_loss=0.2515, pruned_loss=0.06283, over 972887.41 frames.], batch size: 23, lr: 9.61e-04 +2022-05-03 22:11:06,638 INFO [train.py:715] (7/8) Epoch 1, batch 12200, loss[loss=0.1856, simple_loss=0.2436, pruned_loss=0.06385, over 4848.00 frames.], tot_loss[loss=0.1884, simple_loss=0.2513, pruned_loss=0.06271, over 973162.80 frames.], batch size: 20, lr: 9.61e-04 +2022-05-03 22:11:46,547 INFO [train.py:715] (7/8) Epoch 1, batch 12250, loss[loss=0.1918, simple_loss=0.2504, pruned_loss=0.06666, over 4787.00 frames.], tot_loss[loss=0.1878, simple_loss=0.2508, pruned_loss=0.06242, over 972540.53 frames.], batch size: 18, lr: 9.60e-04 +2022-05-03 22:12:27,159 INFO [train.py:715] (7/8) Epoch 1, batch 12300, loss[loss=0.2139, simple_loss=0.2782, pruned_loss=0.07482, over 4982.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2512, pruned_loss=0.06203, over 973202.32 frames.], batch size: 26, lr: 9.60e-04 +2022-05-03 22:13:06,775 INFO [train.py:715] (7/8) Epoch 1, batch 12350, loss[loss=0.2599, simple_loss=0.3065, pruned_loss=0.1066, over 4816.00 frames.], tot_loss[loss=0.1868, simple_loss=0.2504, pruned_loss=0.06158, over 972521.19 frames.], batch size: 25, lr: 9.59e-04 +2022-05-03 22:13:45,540 INFO [train.py:715] (7/8) Epoch 1, batch 12400, loss[loss=0.2077, simple_loss=0.2756, pruned_loss=0.06983, over 4849.00 frames.], tot_loss[loss=0.1862, simple_loss=0.2495, pruned_loss=0.06141, over 972181.90 frames.], batch size: 30, lr: 9.59e-04 +2022-05-03 22:14:25,689 INFO [train.py:715] (7/8) Epoch 1, batch 12450, loss[loss=0.1487, simple_loss=0.2121, pruned_loss=0.0426, over 4833.00 frames.], tot_loss[loss=0.1861, simple_loss=0.2491, pruned_loss=0.06152, over 971852.07 frames.], batch size: 13, lr: 9.58e-04 +2022-05-03 22:15:05,671 INFO [train.py:715] (7/8) Epoch 1, batch 12500, loss[loss=0.1674, simple_loss=0.232, pruned_loss=0.05142, over 4737.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2494, pruned_loss=0.06187, over 971706.76 frames.], batch size: 16, lr: 9.58e-04 +2022-05-03 22:15:44,878 INFO [train.py:715] (7/8) Epoch 1, batch 12550, loss[loss=0.2064, simple_loss=0.2757, pruned_loss=0.06851, over 4865.00 frames.], tot_loss[loss=0.1885, simple_loss=0.251, pruned_loss=0.06299, over 971480.70 frames.], batch size: 20, lr: 9.57e-04 +2022-05-03 22:16:24,275 INFO [train.py:715] (7/8) Epoch 1, batch 12600, loss[loss=0.1895, simple_loss=0.2455, pruned_loss=0.06677, over 4936.00 frames.], tot_loss[loss=0.1884, simple_loss=0.2512, pruned_loss=0.06281, over 971768.32 frames.], batch size: 21, lr: 9.57e-04 +2022-05-03 22:17:04,553 INFO [train.py:715] (7/8) Epoch 1, batch 12650, loss[loss=0.197, simple_loss=0.2554, pruned_loss=0.06926, over 4787.00 frames.], tot_loss[loss=0.1885, simple_loss=0.2513, pruned_loss=0.06288, over 971234.71 frames.], batch size: 14, lr: 9.56e-04 +2022-05-03 22:17:43,555 INFO [train.py:715] (7/8) Epoch 1, batch 12700, loss[loss=0.2194, simple_loss=0.2661, pruned_loss=0.08633, over 4866.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2532, pruned_loss=0.06413, over 970549.73 frames.], batch size: 39, lr: 9.56e-04 +2022-05-03 22:18:22,952 INFO [train.py:715] (7/8) Epoch 1, batch 12750, loss[loss=0.1709, simple_loss=0.2408, pruned_loss=0.05053, over 4915.00 frames.], tot_loss[loss=0.1897, simple_loss=0.2519, pruned_loss=0.06375, over 970813.87 frames.], batch size: 17, lr: 9.55e-04 +2022-05-03 22:19:03,049 INFO [train.py:715] (7/8) Epoch 1, batch 12800, loss[loss=0.1984, simple_loss=0.2484, pruned_loss=0.07418, over 4968.00 frames.], tot_loss[loss=0.1891, simple_loss=0.2514, pruned_loss=0.06339, over 972135.56 frames.], batch size: 24, lr: 9.55e-04 +2022-05-03 22:19:42,873 INFO [train.py:715] (7/8) Epoch 1, batch 12850, loss[loss=0.1898, simple_loss=0.2425, pruned_loss=0.06848, over 4811.00 frames.], tot_loss[loss=0.1874, simple_loss=0.2502, pruned_loss=0.06231, over 973408.88 frames.], batch size: 26, lr: 9.54e-04 +2022-05-03 22:20:21,823 INFO [train.py:715] (7/8) Epoch 1, batch 12900, loss[loss=0.1985, simple_loss=0.266, pruned_loss=0.06552, over 4903.00 frames.], tot_loss[loss=0.1879, simple_loss=0.2508, pruned_loss=0.06249, over 973749.15 frames.], batch size: 39, lr: 9.54e-04 +2022-05-03 22:21:01,115 INFO [train.py:715] (7/8) Epoch 1, batch 12950, loss[loss=0.1681, simple_loss=0.2265, pruned_loss=0.05488, over 4808.00 frames.], tot_loss[loss=0.187, simple_loss=0.2502, pruned_loss=0.06191, over 972680.14 frames.], batch size: 21, lr: 9.53e-04 +2022-05-03 22:21:41,532 INFO [train.py:715] (7/8) Epoch 1, batch 13000, loss[loss=0.2294, simple_loss=0.2844, pruned_loss=0.08716, over 4919.00 frames.], tot_loss[loss=0.1868, simple_loss=0.2502, pruned_loss=0.06174, over 972750.81 frames.], batch size: 17, lr: 9.53e-04 +2022-05-03 22:22:21,100 INFO [train.py:715] (7/8) Epoch 1, batch 13050, loss[loss=0.1697, simple_loss=0.2395, pruned_loss=0.04988, over 4842.00 frames.], tot_loss[loss=0.187, simple_loss=0.2503, pruned_loss=0.06187, over 972369.77 frames.], batch size: 13, lr: 9.52e-04 +2022-05-03 22:23:01,175 INFO [train.py:715] (7/8) Epoch 1, batch 13100, loss[loss=0.1572, simple_loss=0.2153, pruned_loss=0.04956, over 4849.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2503, pruned_loss=0.06207, over 972028.34 frames.], batch size: 15, lr: 9.52e-04 +2022-05-03 22:23:41,364 INFO [train.py:715] (7/8) Epoch 1, batch 13150, loss[loss=0.1977, simple_loss=0.2684, pruned_loss=0.06351, over 4929.00 frames.], tot_loss[loss=0.1886, simple_loss=0.2516, pruned_loss=0.06285, over 972257.80 frames.], batch size: 29, lr: 9.51e-04 +2022-05-03 22:24:23,882 INFO [train.py:715] (7/8) Epoch 1, batch 13200, loss[loss=0.2121, simple_loss=0.272, pruned_loss=0.07611, over 4787.00 frames.], tot_loss[loss=0.1887, simple_loss=0.251, pruned_loss=0.06317, over 971615.02 frames.], batch size: 17, lr: 9.51e-04 +2022-05-03 22:25:03,006 INFO [train.py:715] (7/8) Epoch 1, batch 13250, loss[loss=0.1568, simple_loss=0.2313, pruned_loss=0.04113, over 4789.00 frames.], tot_loss[loss=0.1887, simple_loss=0.2515, pruned_loss=0.06298, over 972141.15 frames.], batch size: 18, lr: 9.51e-04 +2022-05-03 22:25:41,755 INFO [train.py:715] (7/8) Epoch 1, batch 13300, loss[loss=0.2071, simple_loss=0.2779, pruned_loss=0.06814, over 4864.00 frames.], tot_loss[loss=0.187, simple_loss=0.2501, pruned_loss=0.06198, over 972882.28 frames.], batch size: 20, lr: 9.50e-04 +2022-05-03 22:26:21,983 INFO [train.py:715] (7/8) Epoch 1, batch 13350, loss[loss=0.1575, simple_loss=0.229, pruned_loss=0.04299, over 4811.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2504, pruned_loss=0.0623, over 972966.58 frames.], batch size: 26, lr: 9.50e-04 +2022-05-03 22:27:01,388 INFO [train.py:715] (7/8) Epoch 1, batch 13400, loss[loss=0.1632, simple_loss=0.2282, pruned_loss=0.0491, over 4988.00 frames.], tot_loss[loss=0.1869, simple_loss=0.2496, pruned_loss=0.06208, over 973194.39 frames.], batch size: 28, lr: 9.49e-04 +2022-05-03 22:27:41,360 INFO [train.py:715] (7/8) Epoch 1, batch 13450, loss[loss=0.211, simple_loss=0.2619, pruned_loss=0.08004, over 4786.00 frames.], tot_loss[loss=0.1876, simple_loss=0.25, pruned_loss=0.0626, over 971389.33 frames.], batch size: 14, lr: 9.49e-04 +2022-05-03 22:28:21,068 INFO [train.py:715] (7/8) Epoch 1, batch 13500, loss[loss=0.1913, simple_loss=0.2524, pruned_loss=0.0651, over 4751.00 frames.], tot_loss[loss=0.1883, simple_loss=0.2507, pruned_loss=0.06295, over 971417.68 frames.], batch size: 19, lr: 9.48e-04 +2022-05-03 22:29:01,040 INFO [train.py:715] (7/8) Epoch 1, batch 13550, loss[loss=0.1795, simple_loss=0.2449, pruned_loss=0.05707, over 4799.00 frames.], tot_loss[loss=0.1897, simple_loss=0.2517, pruned_loss=0.06383, over 971503.29 frames.], batch size: 24, lr: 9.48e-04 +2022-05-03 22:29:39,300 INFO [train.py:715] (7/8) Epoch 1, batch 13600, loss[loss=0.2206, simple_loss=0.2684, pruned_loss=0.08643, over 4702.00 frames.], tot_loss[loss=0.1903, simple_loss=0.2524, pruned_loss=0.06412, over 971879.72 frames.], batch size: 15, lr: 9.47e-04 +2022-05-03 22:30:18,510 INFO [train.py:715] (7/8) Epoch 1, batch 13650, loss[loss=0.1809, simple_loss=0.2428, pruned_loss=0.05955, over 4796.00 frames.], tot_loss[loss=0.1892, simple_loss=0.2511, pruned_loss=0.06358, over 971954.70 frames.], batch size: 17, lr: 9.47e-04 +2022-05-03 22:30:58,738 INFO [train.py:715] (7/8) Epoch 1, batch 13700, loss[loss=0.2376, simple_loss=0.2866, pruned_loss=0.09428, over 4988.00 frames.], tot_loss[loss=0.189, simple_loss=0.2507, pruned_loss=0.06367, over 973224.59 frames.], batch size: 15, lr: 9.46e-04 +2022-05-03 22:31:38,137 INFO [train.py:715] (7/8) Epoch 1, batch 13750, loss[loss=0.1742, simple_loss=0.2276, pruned_loss=0.06043, over 4794.00 frames.], tot_loss[loss=0.1886, simple_loss=0.2506, pruned_loss=0.06333, over 972500.61 frames.], batch size: 12, lr: 9.46e-04 +2022-05-03 22:32:17,281 INFO [train.py:715] (7/8) Epoch 1, batch 13800, loss[loss=0.1749, simple_loss=0.2573, pruned_loss=0.04625, over 4818.00 frames.], tot_loss[loss=0.1896, simple_loss=0.2512, pruned_loss=0.06399, over 972360.04 frames.], batch size: 26, lr: 9.45e-04 +2022-05-03 22:32:56,972 INFO [train.py:715] (7/8) Epoch 1, batch 13850, loss[loss=0.1736, simple_loss=0.2391, pruned_loss=0.05406, over 4987.00 frames.], tot_loss[loss=0.1896, simple_loss=0.2512, pruned_loss=0.06396, over 972548.40 frames.], batch size: 33, lr: 9.45e-04 +2022-05-03 22:33:36,814 INFO [train.py:715] (7/8) Epoch 1, batch 13900, loss[loss=0.1652, simple_loss=0.2348, pruned_loss=0.04786, over 4859.00 frames.], tot_loss[loss=0.189, simple_loss=0.2504, pruned_loss=0.0638, over 972708.66 frames.], batch size: 20, lr: 9.44e-04 +2022-05-03 22:34:15,311 INFO [train.py:715] (7/8) Epoch 1, batch 13950, loss[loss=0.1766, simple_loss=0.2339, pruned_loss=0.05968, over 4786.00 frames.], tot_loss[loss=0.1886, simple_loss=0.2502, pruned_loss=0.06353, over 974340.05 frames.], batch size: 14, lr: 9.44e-04 +2022-05-03 22:34:54,570 INFO [train.py:715] (7/8) Epoch 1, batch 14000, loss[loss=0.1734, simple_loss=0.2295, pruned_loss=0.05867, over 4943.00 frames.], tot_loss[loss=0.1893, simple_loss=0.251, pruned_loss=0.06381, over 974843.96 frames.], batch size: 14, lr: 9.43e-04 +2022-05-03 22:35:34,718 INFO [train.py:715] (7/8) Epoch 1, batch 14050, loss[loss=0.2551, simple_loss=0.3157, pruned_loss=0.09729, over 4962.00 frames.], tot_loss[loss=0.1886, simple_loss=0.2505, pruned_loss=0.06335, over 974391.60 frames.], batch size: 39, lr: 9.43e-04 +2022-05-03 22:36:13,519 INFO [train.py:715] (7/8) Epoch 1, batch 14100, loss[loss=0.1993, simple_loss=0.2761, pruned_loss=0.06122, over 4938.00 frames.], tot_loss[loss=0.1884, simple_loss=0.2505, pruned_loss=0.06318, over 973441.94 frames.], batch size: 29, lr: 9.42e-04 +2022-05-03 22:36:52,752 INFO [train.py:715] (7/8) Epoch 1, batch 14150, loss[loss=0.1634, simple_loss=0.2407, pruned_loss=0.04307, over 4893.00 frames.], tot_loss[loss=0.1898, simple_loss=0.2516, pruned_loss=0.06399, over 973374.07 frames.], batch size: 16, lr: 9.42e-04 +2022-05-03 22:37:31,985 INFO [train.py:715] (7/8) Epoch 1, batch 14200, loss[loss=0.1473, simple_loss=0.2128, pruned_loss=0.04092, over 4975.00 frames.], tot_loss[loss=0.1889, simple_loss=0.251, pruned_loss=0.06335, over 973817.34 frames.], batch size: 28, lr: 9.41e-04 +2022-05-03 22:38:12,096 INFO [train.py:715] (7/8) Epoch 1, batch 14250, loss[loss=0.1716, simple_loss=0.2317, pruned_loss=0.05573, over 4824.00 frames.], tot_loss[loss=0.1869, simple_loss=0.2496, pruned_loss=0.06207, over 973410.09 frames.], batch size: 26, lr: 9.41e-04 +2022-05-03 22:38:50,575 INFO [train.py:715] (7/8) Epoch 1, batch 14300, loss[loss=0.1958, simple_loss=0.2617, pruned_loss=0.06499, over 4798.00 frames.], tot_loss[loss=0.1854, simple_loss=0.2487, pruned_loss=0.06108, over 972869.89 frames.], batch size: 21, lr: 9.40e-04 +2022-05-03 22:39:29,562 INFO [train.py:715] (7/8) Epoch 1, batch 14350, loss[loss=0.1647, simple_loss=0.2389, pruned_loss=0.0453, over 4906.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2501, pruned_loss=0.06268, over 971802.42 frames.], batch size: 18, lr: 9.40e-04 +2022-05-03 22:40:09,912 INFO [train.py:715] (7/8) Epoch 1, batch 14400, loss[loss=0.2165, simple_loss=0.2696, pruned_loss=0.08174, over 4856.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2498, pruned_loss=0.06231, over 971235.79 frames.], batch size: 32, lr: 9.39e-04 +2022-05-03 22:40:48,730 INFO [train.py:715] (7/8) Epoch 1, batch 14450, loss[loss=0.2108, simple_loss=0.2623, pruned_loss=0.07964, over 4929.00 frames.], tot_loss[loss=0.1878, simple_loss=0.2503, pruned_loss=0.06261, over 971833.48 frames.], batch size: 23, lr: 9.39e-04 +2022-05-03 22:41:28,255 INFO [train.py:715] (7/8) Epoch 1, batch 14500, loss[loss=0.2195, simple_loss=0.2602, pruned_loss=0.08943, over 4907.00 frames.], tot_loss[loss=0.188, simple_loss=0.2503, pruned_loss=0.06283, over 972548.87 frames.], batch size: 17, lr: 9.39e-04 +2022-05-03 22:42:08,353 INFO [train.py:715] (7/8) Epoch 1, batch 14550, loss[loss=0.1591, simple_loss=0.2272, pruned_loss=0.04546, over 4771.00 frames.], tot_loss[loss=0.1889, simple_loss=0.2509, pruned_loss=0.06343, over 973080.22 frames.], batch size: 18, lr: 9.38e-04 +2022-05-03 22:42:47,869 INFO [train.py:715] (7/8) Epoch 1, batch 14600, loss[loss=0.1786, simple_loss=0.2343, pruned_loss=0.0615, over 4847.00 frames.], tot_loss[loss=0.1873, simple_loss=0.2496, pruned_loss=0.06256, over 973742.98 frames.], batch size: 15, lr: 9.38e-04 +2022-05-03 22:43:26,825 INFO [train.py:715] (7/8) Epoch 1, batch 14650, loss[loss=0.1766, simple_loss=0.2341, pruned_loss=0.05953, over 4729.00 frames.], tot_loss[loss=0.1878, simple_loss=0.2501, pruned_loss=0.06275, over 972829.70 frames.], batch size: 16, lr: 9.37e-04 +2022-05-03 22:44:05,665 INFO [train.py:715] (7/8) Epoch 1, batch 14700, loss[loss=0.1954, simple_loss=0.2629, pruned_loss=0.06391, over 4736.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2495, pruned_loss=0.06251, over 972802.39 frames.], batch size: 16, lr: 9.37e-04 +2022-05-03 22:44:45,791 INFO [train.py:715] (7/8) Epoch 1, batch 14750, loss[loss=0.1752, simple_loss=0.2537, pruned_loss=0.04833, over 4811.00 frames.], tot_loss[loss=0.1859, simple_loss=0.2484, pruned_loss=0.06169, over 972559.76 frames.], batch size: 25, lr: 9.36e-04 +2022-05-03 22:45:24,940 INFO [train.py:715] (7/8) Epoch 1, batch 14800, loss[loss=0.1688, simple_loss=0.2368, pruned_loss=0.05037, over 4900.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2476, pruned_loss=0.0611, over 972404.75 frames.], batch size: 22, lr: 9.36e-04 +2022-05-03 22:46:04,498 INFO [train.py:715] (7/8) Epoch 1, batch 14850, loss[loss=0.1786, simple_loss=0.2502, pruned_loss=0.05347, over 4851.00 frames.], tot_loss[loss=0.1863, simple_loss=0.2488, pruned_loss=0.06189, over 971868.55 frames.], batch size: 34, lr: 9.35e-04 +2022-05-03 22:46:43,816 INFO [train.py:715] (7/8) Epoch 1, batch 14900, loss[loss=0.2053, simple_loss=0.2307, pruned_loss=0.08997, over 4791.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2495, pruned_loss=0.06248, over 971812.93 frames.], batch size: 12, lr: 9.35e-04 +2022-05-03 22:47:22,420 INFO [train.py:715] (7/8) Epoch 1, batch 14950, loss[loss=0.1952, simple_loss=0.2467, pruned_loss=0.07183, over 4923.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2496, pruned_loss=0.06285, over 972056.37 frames.], batch size: 23, lr: 9.34e-04 +2022-05-03 22:48:02,038 INFO [train.py:715] (7/8) Epoch 1, batch 15000, loss[loss=0.2049, simple_loss=0.2697, pruned_loss=0.07001, over 4883.00 frames.], tot_loss[loss=0.1883, simple_loss=0.2503, pruned_loss=0.06309, over 972665.20 frames.], batch size: 22, lr: 9.34e-04 +2022-05-03 22:48:02,039 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 22:48:17,510 INFO [train.py:742] (7/8) Epoch 1, validation: loss=0.1242, simple_loss=0.2115, pruned_loss=0.01842, over 914524.00 frames. +2022-05-03 22:48:57,644 INFO [train.py:715] (7/8) Epoch 1, batch 15050, loss[loss=0.2237, simple_loss=0.2627, pruned_loss=0.09233, over 4750.00 frames.], tot_loss[loss=0.1878, simple_loss=0.2498, pruned_loss=0.06292, over 972358.40 frames.], batch size: 16, lr: 9.33e-04 +2022-05-03 22:49:37,563 INFO [train.py:715] (7/8) Epoch 1, batch 15100, loss[loss=0.2571, simple_loss=0.2953, pruned_loss=0.1094, over 4919.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2498, pruned_loss=0.06272, over 971772.08 frames.], batch size: 18, lr: 9.33e-04 +2022-05-03 22:50:18,096 INFO [train.py:715] (7/8) Epoch 1, batch 15150, loss[loss=0.1849, simple_loss=0.2431, pruned_loss=0.06338, over 4816.00 frames.], tot_loss[loss=0.188, simple_loss=0.2501, pruned_loss=0.06293, over 972009.65 frames.], batch size: 26, lr: 9.32e-04 +2022-05-03 22:50:57,479 INFO [train.py:715] (7/8) Epoch 1, batch 15200, loss[loss=0.1748, simple_loss=0.2443, pruned_loss=0.05265, over 4794.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2492, pruned_loss=0.06194, over 972138.60 frames.], batch size: 14, lr: 9.32e-04 +2022-05-03 22:51:37,957 INFO [train.py:715] (7/8) Epoch 1, batch 15250, loss[loss=0.2106, simple_loss=0.2666, pruned_loss=0.07726, over 4870.00 frames.], tot_loss[loss=0.1883, simple_loss=0.2508, pruned_loss=0.06286, over 972648.04 frames.], batch size: 20, lr: 9.32e-04 +2022-05-03 22:52:17,873 INFO [train.py:715] (7/8) Epoch 1, batch 15300, loss[loss=0.1456, simple_loss=0.2183, pruned_loss=0.03642, over 4863.00 frames.], tot_loss[loss=0.1874, simple_loss=0.25, pruned_loss=0.06242, over 972262.67 frames.], batch size: 20, lr: 9.31e-04 +2022-05-03 22:52:57,765 INFO [train.py:715] (7/8) Epoch 1, batch 15350, loss[loss=0.2382, simple_loss=0.3001, pruned_loss=0.08816, over 4907.00 frames.], tot_loss[loss=0.1895, simple_loss=0.2522, pruned_loss=0.06338, over 973256.25 frames.], batch size: 17, lr: 9.31e-04 +2022-05-03 22:53:37,902 INFO [train.py:715] (7/8) Epoch 1, batch 15400, loss[loss=0.1459, simple_loss=0.2048, pruned_loss=0.04354, over 4713.00 frames.], tot_loss[loss=0.189, simple_loss=0.2517, pruned_loss=0.06319, over 973909.31 frames.], batch size: 15, lr: 9.30e-04 +2022-05-03 22:54:18,167 INFO [train.py:715] (7/8) Epoch 1, batch 15450, loss[loss=0.2293, simple_loss=0.2875, pruned_loss=0.08555, over 4972.00 frames.], tot_loss[loss=0.1887, simple_loss=0.2509, pruned_loss=0.06321, over 973591.60 frames.], batch size: 28, lr: 9.30e-04 +2022-05-03 22:54:58,644 INFO [train.py:715] (7/8) Epoch 1, batch 15500, loss[loss=0.1396, simple_loss=0.2142, pruned_loss=0.03253, over 4773.00 frames.], tot_loss[loss=0.1874, simple_loss=0.25, pruned_loss=0.06241, over 972372.17 frames.], batch size: 14, lr: 9.29e-04 +2022-05-03 22:55:37,739 INFO [train.py:715] (7/8) Epoch 1, batch 15550, loss[loss=0.1595, simple_loss=0.2432, pruned_loss=0.03788, over 4786.00 frames.], tot_loss[loss=0.1871, simple_loss=0.2504, pruned_loss=0.06192, over 972187.61 frames.], batch size: 18, lr: 9.29e-04 +2022-05-03 22:56:18,061 INFO [train.py:715] (7/8) Epoch 1, batch 15600, loss[loss=0.191, simple_loss=0.245, pruned_loss=0.06847, over 4979.00 frames.], tot_loss[loss=0.1888, simple_loss=0.2515, pruned_loss=0.06302, over 971874.17 frames.], batch size: 14, lr: 9.28e-04 +2022-05-03 22:56:58,355 INFO [train.py:715] (7/8) Epoch 1, batch 15650, loss[loss=0.1657, simple_loss=0.246, pruned_loss=0.04265, over 4744.00 frames.], tot_loss[loss=0.188, simple_loss=0.2511, pruned_loss=0.06245, over 972567.44 frames.], batch size: 19, lr: 9.28e-04 +2022-05-03 22:57:38,276 INFO [train.py:715] (7/8) Epoch 1, batch 15700, loss[loss=0.1714, simple_loss=0.243, pruned_loss=0.04985, over 4739.00 frames.], tot_loss[loss=0.1881, simple_loss=0.2511, pruned_loss=0.06258, over 973046.87 frames.], batch size: 16, lr: 9.27e-04 +2022-05-03 22:58:17,912 INFO [train.py:715] (7/8) Epoch 1, batch 15750, loss[loss=0.1649, simple_loss=0.2295, pruned_loss=0.05011, over 4957.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2506, pruned_loss=0.06218, over 973344.75 frames.], batch size: 21, lr: 9.27e-04 +2022-05-03 22:58:58,197 INFO [train.py:715] (7/8) Epoch 1, batch 15800, loss[loss=0.1605, simple_loss=0.2383, pruned_loss=0.04134, over 4937.00 frames.], tot_loss[loss=0.1878, simple_loss=0.2508, pruned_loss=0.06234, over 973064.01 frames.], batch size: 23, lr: 9.27e-04 +2022-05-03 22:59:38,879 INFO [train.py:715] (7/8) Epoch 1, batch 15850, loss[loss=0.1743, simple_loss=0.2421, pruned_loss=0.05327, over 4743.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2508, pruned_loss=0.06212, over 972435.98 frames.], batch size: 16, lr: 9.26e-04 +2022-05-03 23:00:18,431 INFO [train.py:715] (7/8) Epoch 1, batch 15900, loss[loss=0.1748, simple_loss=0.2497, pruned_loss=0.04992, over 4700.00 frames.], tot_loss[loss=0.1867, simple_loss=0.2499, pruned_loss=0.06169, over 972369.71 frames.], batch size: 15, lr: 9.26e-04 +2022-05-03 23:00:58,072 INFO [train.py:715] (7/8) Epoch 1, batch 15950, loss[loss=0.1528, simple_loss=0.2116, pruned_loss=0.04702, over 4942.00 frames.], tot_loss[loss=0.1867, simple_loss=0.2499, pruned_loss=0.06178, over 972381.69 frames.], batch size: 18, lr: 9.25e-04 +2022-05-03 23:01:37,504 INFO [train.py:715] (7/8) Epoch 1, batch 16000, loss[loss=0.2037, simple_loss=0.2762, pruned_loss=0.06561, over 4802.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2493, pruned_loss=0.06194, over 972187.71 frames.], batch size: 21, lr: 9.25e-04 +2022-05-03 23:02:16,257 INFO [train.py:715] (7/8) Epoch 1, batch 16050, loss[loss=0.1736, simple_loss=0.243, pruned_loss=0.05217, over 4861.00 frames.], tot_loss[loss=0.1859, simple_loss=0.2491, pruned_loss=0.06132, over 972379.96 frames.], batch size: 30, lr: 9.24e-04 +2022-05-03 23:02:55,588 INFO [train.py:715] (7/8) Epoch 1, batch 16100, loss[loss=0.161, simple_loss=0.2347, pruned_loss=0.04358, over 4641.00 frames.], tot_loss[loss=0.186, simple_loss=0.2493, pruned_loss=0.06139, over 971872.81 frames.], batch size: 13, lr: 9.24e-04 +2022-05-03 23:03:35,237 INFO [train.py:715] (7/8) Epoch 1, batch 16150, loss[loss=0.1955, simple_loss=0.2587, pruned_loss=0.0661, over 4802.00 frames.], tot_loss[loss=0.1855, simple_loss=0.2488, pruned_loss=0.06111, over 971712.86 frames.], batch size: 24, lr: 9.23e-04 +2022-05-03 23:04:15,423 INFO [train.py:715] (7/8) Epoch 1, batch 16200, loss[loss=0.1824, simple_loss=0.2473, pruned_loss=0.05875, over 4929.00 frames.], tot_loss[loss=0.1853, simple_loss=0.2487, pruned_loss=0.06095, over 972139.93 frames.], batch size: 17, lr: 9.23e-04 +2022-05-03 23:04:53,729 INFO [train.py:715] (7/8) Epoch 1, batch 16250, loss[loss=0.1774, simple_loss=0.2414, pruned_loss=0.05665, over 4775.00 frames.], tot_loss[loss=0.1856, simple_loss=0.2492, pruned_loss=0.06102, over 972431.06 frames.], batch size: 17, lr: 9.22e-04 +2022-05-03 23:05:33,196 INFO [train.py:715] (7/8) Epoch 1, batch 16300, loss[loss=0.2004, simple_loss=0.2581, pruned_loss=0.07133, over 4833.00 frames.], tot_loss[loss=0.1855, simple_loss=0.249, pruned_loss=0.06106, over 972241.53 frames.], batch size: 15, lr: 9.22e-04 +2022-05-03 23:06:12,744 INFO [train.py:715] (7/8) Epoch 1, batch 16350, loss[loss=0.1849, simple_loss=0.2547, pruned_loss=0.05757, over 4825.00 frames.], tot_loss[loss=0.186, simple_loss=0.2494, pruned_loss=0.06126, over 972365.22 frames.], batch size: 13, lr: 9.22e-04 +2022-05-03 23:06:51,401 INFO [train.py:715] (7/8) Epoch 1, batch 16400, loss[loss=0.1701, simple_loss=0.2351, pruned_loss=0.05248, over 4976.00 frames.], tot_loss[loss=0.1844, simple_loss=0.2483, pruned_loss=0.06024, over 972348.63 frames.], batch size: 39, lr: 9.21e-04 +2022-05-03 23:07:30,894 INFO [train.py:715] (7/8) Epoch 1, batch 16450, loss[loss=0.1841, simple_loss=0.2533, pruned_loss=0.05751, over 4847.00 frames.], tot_loss[loss=0.1856, simple_loss=0.2492, pruned_loss=0.061, over 973096.75 frames.], batch size: 32, lr: 9.21e-04 +2022-05-03 23:08:10,544 INFO [train.py:715] (7/8) Epoch 1, batch 16500, loss[loss=0.1569, simple_loss=0.2312, pruned_loss=0.04123, over 4789.00 frames.], tot_loss[loss=0.1851, simple_loss=0.2488, pruned_loss=0.06071, over 972785.06 frames.], batch size: 18, lr: 9.20e-04 +2022-05-03 23:08:50,456 INFO [train.py:715] (7/8) Epoch 1, batch 16550, loss[loss=0.1694, simple_loss=0.2433, pruned_loss=0.0477, over 4748.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2494, pruned_loss=0.06095, over 972943.36 frames.], batch size: 16, lr: 9.20e-04 +2022-05-03 23:09:28,843 INFO [train.py:715] (7/8) Epoch 1, batch 16600, loss[loss=0.1795, simple_loss=0.245, pruned_loss=0.05702, over 4928.00 frames.], tot_loss[loss=0.1859, simple_loss=0.2491, pruned_loss=0.06136, over 972811.24 frames.], batch size: 21, lr: 9.19e-04 +2022-05-03 23:10:09,005 INFO [train.py:715] (7/8) Epoch 1, batch 16650, loss[loss=0.2259, simple_loss=0.2817, pruned_loss=0.08506, over 4972.00 frames.], tot_loss[loss=0.1861, simple_loss=0.2492, pruned_loss=0.06146, over 972994.07 frames.], batch size: 14, lr: 9.19e-04 +2022-05-03 23:10:48,686 INFO [train.py:715] (7/8) Epoch 1, batch 16700, loss[loss=0.188, simple_loss=0.2546, pruned_loss=0.06072, over 4972.00 frames.], tot_loss[loss=0.1862, simple_loss=0.2496, pruned_loss=0.06139, over 972180.88 frames.], batch size: 25, lr: 9.18e-04 +2022-05-03 23:11:28,440 INFO [train.py:715] (7/8) Epoch 1, batch 16750, loss[loss=0.1748, simple_loss=0.2441, pruned_loss=0.0527, over 4931.00 frames.], tot_loss[loss=0.1868, simple_loss=0.2497, pruned_loss=0.0619, over 972855.21 frames.], batch size: 21, lr: 9.18e-04 +2022-05-03 23:12:08,282 INFO [train.py:715] (7/8) Epoch 1, batch 16800, loss[loss=0.1872, simple_loss=0.2429, pruned_loss=0.06577, over 4834.00 frames.], tot_loss[loss=0.1861, simple_loss=0.2491, pruned_loss=0.06154, over 972953.32 frames.], batch size: 30, lr: 9.18e-04 +2022-05-03 23:12:47,926 INFO [train.py:715] (7/8) Epoch 1, batch 16850, loss[loss=0.1849, simple_loss=0.2574, pruned_loss=0.0562, over 4754.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2476, pruned_loss=0.06106, over 972070.97 frames.], batch size: 18, lr: 9.17e-04 +2022-05-03 23:13:27,911 INFO [train.py:715] (7/8) Epoch 1, batch 16900, loss[loss=0.2049, simple_loss=0.259, pruned_loss=0.07539, over 4747.00 frames.], tot_loss[loss=0.1854, simple_loss=0.2485, pruned_loss=0.06114, over 971876.44 frames.], batch size: 16, lr: 9.17e-04 +2022-05-03 23:14:06,931 INFO [train.py:715] (7/8) Epoch 1, batch 16950, loss[loss=0.2039, simple_loss=0.259, pruned_loss=0.07441, over 4876.00 frames.], tot_loss[loss=0.1865, simple_loss=0.2494, pruned_loss=0.06183, over 973244.31 frames.], batch size: 38, lr: 9.16e-04 +2022-05-03 23:14:46,349 INFO [train.py:715] (7/8) Epoch 1, batch 17000, loss[loss=0.1583, simple_loss=0.2371, pruned_loss=0.03974, over 4781.00 frames.], tot_loss[loss=0.1853, simple_loss=0.2487, pruned_loss=0.061, over 973292.20 frames.], batch size: 18, lr: 9.16e-04 +2022-05-03 23:15:26,359 INFO [train.py:715] (7/8) Epoch 1, batch 17050, loss[loss=0.1991, simple_loss=0.265, pruned_loss=0.06657, over 4977.00 frames.], tot_loss[loss=0.1857, simple_loss=0.249, pruned_loss=0.06119, over 973208.97 frames.], batch size: 24, lr: 9.15e-04 +2022-05-03 23:16:05,142 INFO [train.py:715] (7/8) Epoch 1, batch 17100, loss[loss=0.1665, simple_loss=0.2342, pruned_loss=0.04937, over 4811.00 frames.], tot_loss[loss=0.1871, simple_loss=0.2502, pruned_loss=0.06205, over 972867.98 frames.], batch size: 25, lr: 9.15e-04 +2022-05-03 23:16:44,851 INFO [train.py:715] (7/8) Epoch 1, batch 17150, loss[loss=0.2344, simple_loss=0.2761, pruned_loss=0.09632, over 4876.00 frames.], tot_loss[loss=0.1888, simple_loss=0.2513, pruned_loss=0.06318, over 972416.62 frames.], batch size: 30, lr: 9.15e-04 +2022-05-03 23:17:25,482 INFO [train.py:715] (7/8) Epoch 1, batch 17200, loss[loss=0.1731, simple_loss=0.235, pruned_loss=0.05558, over 4987.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2502, pruned_loss=0.06245, over 973580.96 frames.], batch size: 28, lr: 9.14e-04 +2022-05-03 23:18:05,280 INFO [train.py:715] (7/8) Epoch 1, batch 17250, loss[loss=0.1574, simple_loss=0.2303, pruned_loss=0.04222, over 4897.00 frames.], tot_loss[loss=0.1868, simple_loss=0.2493, pruned_loss=0.0621, over 974622.87 frames.], batch size: 17, lr: 9.14e-04 +2022-05-03 23:18:43,790 INFO [train.py:715] (7/8) Epoch 1, batch 17300, loss[loss=0.1566, simple_loss=0.2107, pruned_loss=0.05126, over 4830.00 frames.], tot_loss[loss=0.1873, simple_loss=0.2496, pruned_loss=0.0625, over 974145.16 frames.], batch size: 12, lr: 9.13e-04 +2022-05-03 23:19:23,814 INFO [train.py:715] (7/8) Epoch 1, batch 17350, loss[loss=0.1627, simple_loss=0.2221, pruned_loss=0.05161, over 4828.00 frames.], tot_loss[loss=0.1874, simple_loss=0.2497, pruned_loss=0.06252, over 974017.70 frames.], batch size: 13, lr: 9.13e-04 +2022-05-03 23:20:03,641 INFO [train.py:715] (7/8) Epoch 1, batch 17400, loss[loss=0.1371, simple_loss=0.2029, pruned_loss=0.0356, over 4814.00 frames.], tot_loss[loss=0.1858, simple_loss=0.2482, pruned_loss=0.06167, over 973496.91 frames.], batch size: 21, lr: 9.12e-04 +2022-05-03 23:20:42,901 INFO [train.py:715] (7/8) Epoch 1, batch 17450, loss[loss=0.2157, simple_loss=0.2818, pruned_loss=0.07475, over 4804.00 frames.], tot_loss[loss=0.1863, simple_loss=0.2486, pruned_loss=0.06196, over 972897.66 frames.], batch size: 17, lr: 9.12e-04 +2022-05-03 23:21:23,299 INFO [train.py:715] (7/8) Epoch 1, batch 17500, loss[loss=0.1979, simple_loss=0.2525, pruned_loss=0.07169, over 4874.00 frames.], tot_loss[loss=0.186, simple_loss=0.2483, pruned_loss=0.06188, over 972447.10 frames.], batch size: 20, lr: 9.11e-04 +2022-05-03 23:22:03,723 INFO [train.py:715] (7/8) Epoch 1, batch 17550, loss[loss=0.1991, simple_loss=0.2514, pruned_loss=0.07343, over 4831.00 frames.], tot_loss[loss=0.1846, simple_loss=0.2478, pruned_loss=0.06072, over 972100.12 frames.], batch size: 25, lr: 9.11e-04 +2022-05-03 23:22:44,349 INFO [train.py:715] (7/8) Epoch 1, batch 17600, loss[loss=0.2076, simple_loss=0.2731, pruned_loss=0.071, over 4821.00 frames.], tot_loss[loss=0.1851, simple_loss=0.2483, pruned_loss=0.06092, over 971032.63 frames.], batch size: 25, lr: 9.11e-04 +2022-05-03 23:23:24,045 INFO [train.py:715] (7/8) Epoch 1, batch 17650, loss[loss=0.1918, simple_loss=0.247, pruned_loss=0.06825, over 4854.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2472, pruned_loss=0.06013, over 971466.58 frames.], batch size: 20, lr: 9.10e-04 +2022-05-03 23:24:04,741 INFO [train.py:715] (7/8) Epoch 1, batch 17700, loss[loss=0.1681, simple_loss=0.2288, pruned_loss=0.05372, over 4860.00 frames.], tot_loss[loss=0.1835, simple_loss=0.247, pruned_loss=0.06001, over 971855.31 frames.], batch size: 13, lr: 9.10e-04 +2022-05-03 23:24:44,987 INFO [train.py:715] (7/8) Epoch 1, batch 17750, loss[loss=0.1472, simple_loss=0.2127, pruned_loss=0.04083, over 4827.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2472, pruned_loss=0.0602, over 971600.57 frames.], batch size: 13, lr: 9.09e-04 +2022-05-03 23:25:24,520 INFO [train.py:715] (7/8) Epoch 1, batch 17800, loss[loss=0.1629, simple_loss=0.2162, pruned_loss=0.05482, over 4814.00 frames.], tot_loss[loss=0.1835, simple_loss=0.2471, pruned_loss=0.0599, over 972310.35 frames.], batch size: 14, lr: 9.09e-04 +2022-05-03 23:26:04,927 INFO [train.py:715] (7/8) Epoch 1, batch 17850, loss[loss=0.2023, simple_loss=0.2702, pruned_loss=0.06717, over 4883.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2472, pruned_loss=0.06032, over 973207.04 frames.], batch size: 16, lr: 9.08e-04 +2022-05-03 23:26:44,326 INFO [train.py:715] (7/8) Epoch 1, batch 17900, loss[loss=0.2273, simple_loss=0.2626, pruned_loss=0.09595, over 4779.00 frames.], tot_loss[loss=0.1836, simple_loss=0.2473, pruned_loss=0.05999, over 972987.46 frames.], batch size: 14, lr: 9.08e-04 +2022-05-03 23:27:23,564 INFO [train.py:715] (7/8) Epoch 1, batch 17950, loss[loss=0.2242, simple_loss=0.2889, pruned_loss=0.07977, over 4758.00 frames.], tot_loss[loss=0.1856, simple_loss=0.2484, pruned_loss=0.06145, over 972565.39 frames.], batch size: 16, lr: 9.08e-04 +2022-05-03 23:28:02,860 INFO [train.py:715] (7/8) Epoch 1, batch 18000, loss[loss=0.168, simple_loss=0.2272, pruned_loss=0.05441, over 4810.00 frames.], tot_loss[loss=0.1844, simple_loss=0.2475, pruned_loss=0.06064, over 972998.79 frames.], batch size: 21, lr: 9.07e-04 +2022-05-03 23:28:02,861 INFO [train.py:733] (7/8) Computing validation loss +2022-05-03 23:28:17,471 INFO [train.py:742] (7/8) Epoch 1, validation: loss=0.123, simple_loss=0.21, pruned_loss=0.01804, over 914524.00 frames. +2022-05-03 23:28:56,685 INFO [train.py:715] (7/8) Epoch 1, batch 18050, loss[loss=0.1677, simple_loss=0.2421, pruned_loss=0.04661, over 4849.00 frames.], tot_loss[loss=0.1848, simple_loss=0.2478, pruned_loss=0.06088, over 973500.26 frames.], batch size: 13, lr: 9.07e-04 +2022-05-03 23:29:37,120 INFO [train.py:715] (7/8) Epoch 1, batch 18100, loss[loss=0.2308, simple_loss=0.2831, pruned_loss=0.08927, over 4853.00 frames.], tot_loss[loss=0.1855, simple_loss=0.2485, pruned_loss=0.06126, over 973809.73 frames.], batch size: 34, lr: 9.06e-04 +2022-05-03 23:30:16,934 INFO [train.py:715] (7/8) Epoch 1, batch 18150, loss[loss=0.18, simple_loss=0.2489, pruned_loss=0.05557, over 4751.00 frames.], tot_loss[loss=0.1861, simple_loss=0.2494, pruned_loss=0.0614, over 974039.46 frames.], batch size: 19, lr: 9.06e-04 +2022-05-03 23:30:55,303 INFO [train.py:715] (7/8) Epoch 1, batch 18200, loss[loss=0.1612, simple_loss=0.2266, pruned_loss=0.04784, over 4753.00 frames.], tot_loss[loss=0.1854, simple_loss=0.2488, pruned_loss=0.06096, over 973174.72 frames.], batch size: 19, lr: 9.05e-04 +2022-05-03 23:31:34,986 INFO [train.py:715] (7/8) Epoch 1, batch 18250, loss[loss=0.2095, simple_loss=0.2634, pruned_loss=0.0778, over 4940.00 frames.], tot_loss[loss=0.1853, simple_loss=0.2488, pruned_loss=0.06085, over 973832.54 frames.], batch size: 29, lr: 9.05e-04 +2022-05-03 23:32:14,617 INFO [train.py:715] (7/8) Epoch 1, batch 18300, loss[loss=0.2095, simple_loss=0.2692, pruned_loss=0.07492, over 4810.00 frames.], tot_loss[loss=0.185, simple_loss=0.2488, pruned_loss=0.06055, over 973428.65 frames.], batch size: 21, lr: 9.05e-04 +2022-05-03 23:32:53,401 INFO [train.py:715] (7/8) Epoch 1, batch 18350, loss[loss=0.2027, simple_loss=0.2609, pruned_loss=0.0723, over 4956.00 frames.], tot_loss[loss=0.1852, simple_loss=0.2488, pruned_loss=0.06081, over 973759.27 frames.], batch size: 14, lr: 9.04e-04 +2022-05-03 23:33:33,136 INFO [train.py:715] (7/8) Epoch 1, batch 18400, loss[loss=0.1821, simple_loss=0.2501, pruned_loss=0.05698, over 4872.00 frames.], tot_loss[loss=0.1855, simple_loss=0.2484, pruned_loss=0.06131, over 972543.61 frames.], batch size: 32, lr: 9.04e-04 +2022-05-03 23:34:13,407 INFO [train.py:715] (7/8) Epoch 1, batch 18450, loss[loss=0.1928, simple_loss=0.2592, pruned_loss=0.0632, over 4919.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2482, pruned_loss=0.06085, over 972310.28 frames.], batch size: 19, lr: 9.03e-04 +2022-05-03 23:34:52,238 INFO [train.py:715] (7/8) Epoch 1, batch 18500, loss[loss=0.1589, simple_loss=0.2186, pruned_loss=0.04964, over 4986.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2486, pruned_loss=0.06141, over 972631.07 frames.], batch size: 35, lr: 9.03e-04 +2022-05-03 23:35:31,275 INFO [train.py:715] (7/8) Epoch 1, batch 18550, loss[loss=0.1657, simple_loss=0.2352, pruned_loss=0.04813, over 4957.00 frames.], tot_loss[loss=0.1858, simple_loss=0.249, pruned_loss=0.06132, over 972969.97 frames.], batch size: 21, lr: 9.03e-04 +2022-05-03 23:36:11,455 INFO [train.py:715] (7/8) Epoch 1, batch 18600, loss[loss=0.1779, simple_loss=0.2399, pruned_loss=0.05795, over 4793.00 frames.], tot_loss[loss=0.1846, simple_loss=0.2479, pruned_loss=0.06064, over 972648.99 frames.], batch size: 24, lr: 9.02e-04 +2022-05-03 23:36:50,768 INFO [train.py:715] (7/8) Epoch 1, batch 18650, loss[loss=0.1988, simple_loss=0.2601, pruned_loss=0.06878, over 4821.00 frames.], tot_loss[loss=0.1845, simple_loss=0.2476, pruned_loss=0.06066, over 973328.78 frames.], batch size: 15, lr: 9.02e-04 +2022-05-03 23:37:29,518 INFO [train.py:715] (7/8) Epoch 1, batch 18700, loss[loss=0.1724, simple_loss=0.231, pruned_loss=0.05692, over 4687.00 frames.], tot_loss[loss=0.1852, simple_loss=0.2482, pruned_loss=0.06111, over 973141.58 frames.], batch size: 15, lr: 9.01e-04 +2022-05-03 23:38:08,764 INFO [train.py:715] (7/8) Epoch 1, batch 18750, loss[loss=0.1711, simple_loss=0.2278, pruned_loss=0.05718, over 4969.00 frames.], tot_loss[loss=0.1854, simple_loss=0.2486, pruned_loss=0.06111, over 972472.43 frames.], batch size: 35, lr: 9.01e-04 +2022-05-03 23:38:48,690 INFO [train.py:715] (7/8) Epoch 1, batch 18800, loss[loss=0.1701, simple_loss=0.2474, pruned_loss=0.04642, over 4877.00 frames.], tot_loss[loss=0.1856, simple_loss=0.2486, pruned_loss=0.06133, over 972257.63 frames.], batch size: 20, lr: 9.00e-04 +2022-05-03 23:39:27,387 INFO [train.py:715] (7/8) Epoch 1, batch 18850, loss[loss=0.2514, simple_loss=0.3037, pruned_loss=0.09957, over 4904.00 frames.], tot_loss[loss=0.1855, simple_loss=0.2489, pruned_loss=0.0611, over 972404.08 frames.], batch size: 19, lr: 9.00e-04 +2022-05-03 23:40:06,876 INFO [train.py:715] (7/8) Epoch 1, batch 18900, loss[loss=0.1584, simple_loss=0.2334, pruned_loss=0.04173, over 4836.00 frames.], tot_loss[loss=0.1855, simple_loss=0.2491, pruned_loss=0.06096, over 973165.57 frames.], batch size: 26, lr: 9.00e-04 +2022-05-03 23:40:46,611 INFO [train.py:715] (7/8) Epoch 1, batch 18950, loss[loss=0.1814, simple_loss=0.2339, pruned_loss=0.06452, over 4770.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2485, pruned_loss=0.06066, over 973357.36 frames.], batch size: 12, lr: 8.99e-04 +2022-05-03 23:41:25,997 INFO [train.py:715] (7/8) Epoch 1, batch 19000, loss[loss=0.1653, simple_loss=0.234, pruned_loss=0.04828, over 4984.00 frames.], tot_loss[loss=0.1854, simple_loss=0.2488, pruned_loss=0.06098, over 973683.39 frames.], batch size: 14, lr: 8.99e-04 +2022-05-03 23:42:05,679 INFO [train.py:715] (7/8) Epoch 1, batch 19050, loss[loss=0.1999, simple_loss=0.2512, pruned_loss=0.07429, over 4810.00 frames.], tot_loss[loss=0.1851, simple_loss=0.2485, pruned_loss=0.06088, over 973440.56 frames.], batch size: 24, lr: 8.98e-04 +2022-05-03 23:42:44,850 INFO [train.py:715] (7/8) Epoch 1, batch 19100, loss[loss=0.1819, simple_loss=0.2487, pruned_loss=0.05752, over 4814.00 frames.], tot_loss[loss=0.1845, simple_loss=0.2484, pruned_loss=0.0603, over 972968.97 frames.], batch size: 15, lr: 8.98e-04 +2022-05-03 23:43:24,777 INFO [train.py:715] (7/8) Epoch 1, batch 19150, loss[loss=0.1979, simple_loss=0.2488, pruned_loss=0.07354, over 4783.00 frames.], tot_loss[loss=0.1862, simple_loss=0.2501, pruned_loss=0.06117, over 972666.49 frames.], batch size: 12, lr: 8.98e-04 +2022-05-03 23:44:03,415 INFO [train.py:715] (7/8) Epoch 1, batch 19200, loss[loss=0.1894, simple_loss=0.2564, pruned_loss=0.06117, over 4860.00 frames.], tot_loss[loss=0.1855, simple_loss=0.2495, pruned_loss=0.06073, over 973352.21 frames.], batch size: 20, lr: 8.97e-04 +2022-05-03 23:44:42,700 INFO [train.py:715] (7/8) Epoch 1, batch 19250, loss[loss=0.1644, simple_loss=0.2199, pruned_loss=0.05446, over 4865.00 frames.], tot_loss[loss=0.1846, simple_loss=0.2489, pruned_loss=0.06019, over 973014.62 frames.], batch size: 32, lr: 8.97e-04 +2022-05-03 23:45:23,321 INFO [train.py:715] (7/8) Epoch 1, batch 19300, loss[loss=0.1717, simple_loss=0.2393, pruned_loss=0.05201, over 4808.00 frames.], tot_loss[loss=0.184, simple_loss=0.2483, pruned_loss=0.05988, over 972782.02 frames.], batch size: 21, lr: 8.96e-04 +2022-05-03 23:46:02,788 INFO [train.py:715] (7/8) Epoch 1, batch 19350, loss[loss=0.2102, simple_loss=0.2662, pruned_loss=0.07713, over 4743.00 frames.], tot_loss[loss=0.1844, simple_loss=0.2484, pruned_loss=0.06019, over 972592.29 frames.], batch size: 16, lr: 8.96e-04 +2022-05-03 23:46:41,172 INFO [train.py:715] (7/8) Epoch 1, batch 19400, loss[loss=0.1829, simple_loss=0.2546, pruned_loss=0.05559, over 4938.00 frames.], tot_loss[loss=0.1844, simple_loss=0.2486, pruned_loss=0.06012, over 973159.67 frames.], batch size: 29, lr: 8.95e-04 +2022-05-03 23:47:20,597 INFO [train.py:715] (7/8) Epoch 1, batch 19450, loss[loss=0.1508, simple_loss=0.2122, pruned_loss=0.04475, over 4978.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2473, pruned_loss=0.05943, over 974020.87 frames.], batch size: 25, lr: 8.95e-04 +2022-05-03 23:48:00,485 INFO [train.py:715] (7/8) Epoch 1, batch 19500, loss[loss=0.155, simple_loss=0.2249, pruned_loss=0.04256, over 4800.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2472, pruned_loss=0.05933, over 973142.24 frames.], batch size: 21, lr: 8.95e-04 +2022-05-03 23:48:39,206 INFO [train.py:715] (7/8) Epoch 1, batch 19550, loss[loss=0.1449, simple_loss=0.2166, pruned_loss=0.03658, over 4950.00 frames.], tot_loss[loss=0.183, simple_loss=0.2474, pruned_loss=0.0593, over 972319.02 frames.], batch size: 23, lr: 8.94e-04 +2022-05-03 23:49:18,324 INFO [train.py:715] (7/8) Epoch 1, batch 19600, loss[loss=0.1927, simple_loss=0.2511, pruned_loss=0.06711, over 4799.00 frames.], tot_loss[loss=0.1833, simple_loss=0.2475, pruned_loss=0.05956, over 970598.23 frames.], batch size: 25, lr: 8.94e-04 +2022-05-03 23:49:58,547 INFO [train.py:715] (7/8) Epoch 1, batch 19650, loss[loss=0.182, simple_loss=0.2446, pruned_loss=0.05969, over 4950.00 frames.], tot_loss[loss=0.1822, simple_loss=0.2465, pruned_loss=0.05898, over 970540.84 frames.], batch size: 35, lr: 8.93e-04 +2022-05-03 23:50:37,449 INFO [train.py:715] (7/8) Epoch 1, batch 19700, loss[loss=0.218, simple_loss=0.2736, pruned_loss=0.08121, over 4875.00 frames.], tot_loss[loss=0.1813, simple_loss=0.2456, pruned_loss=0.05852, over 970410.84 frames.], batch size: 32, lr: 8.93e-04 +2022-05-03 23:51:16,597 INFO [train.py:715] (7/8) Epoch 1, batch 19750, loss[loss=0.1815, simple_loss=0.2435, pruned_loss=0.0597, over 4638.00 frames.], tot_loss[loss=0.1823, simple_loss=0.2461, pruned_loss=0.05923, over 970661.21 frames.], batch size: 13, lr: 8.93e-04 +2022-05-03 23:51:56,241 INFO [train.py:715] (7/8) Epoch 1, batch 19800, loss[loss=0.1728, simple_loss=0.2318, pruned_loss=0.05694, over 4854.00 frames.], tot_loss[loss=0.1825, simple_loss=0.2467, pruned_loss=0.05919, over 971155.39 frames.], batch size: 13, lr: 8.92e-04 +2022-05-03 23:52:36,509 INFO [train.py:715] (7/8) Epoch 1, batch 19850, loss[loss=0.2085, simple_loss=0.263, pruned_loss=0.07701, over 4877.00 frames.], tot_loss[loss=0.1833, simple_loss=0.2467, pruned_loss=0.06001, over 971786.88 frames.], batch size: 38, lr: 8.92e-04 +2022-05-03 23:53:15,889 INFO [train.py:715] (7/8) Epoch 1, batch 19900, loss[loss=0.1705, simple_loss=0.2428, pruned_loss=0.04911, over 4776.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2471, pruned_loss=0.06031, over 972383.19 frames.], batch size: 19, lr: 8.91e-04 +2022-05-03 23:53:54,990 INFO [train.py:715] (7/8) Epoch 1, batch 19950, loss[loss=0.1935, simple_loss=0.2619, pruned_loss=0.06259, over 4877.00 frames.], tot_loss[loss=0.1846, simple_loss=0.2479, pruned_loss=0.06065, over 972260.29 frames.], batch size: 39, lr: 8.91e-04 +2022-05-03 23:54:35,251 INFO [train.py:715] (7/8) Epoch 1, batch 20000, loss[loss=0.1954, simple_loss=0.2709, pruned_loss=0.05995, over 4857.00 frames.], tot_loss[loss=0.1846, simple_loss=0.2483, pruned_loss=0.06048, over 972179.23 frames.], batch size: 20, lr: 8.91e-04 +2022-05-03 23:55:14,863 INFO [train.py:715] (7/8) Epoch 1, batch 20050, loss[loss=0.2123, simple_loss=0.2696, pruned_loss=0.07748, over 4789.00 frames.], tot_loss[loss=0.1847, simple_loss=0.2483, pruned_loss=0.06052, over 973153.46 frames.], batch size: 18, lr: 8.90e-04 +2022-05-03 23:55:54,267 INFO [train.py:715] (7/8) Epoch 1, batch 20100, loss[loss=0.188, simple_loss=0.2505, pruned_loss=0.0627, over 4986.00 frames.], tot_loss[loss=0.1853, simple_loss=0.2486, pruned_loss=0.06099, over 973461.61 frames.], batch size: 25, lr: 8.90e-04 +2022-05-03 23:56:34,287 INFO [train.py:715] (7/8) Epoch 1, batch 20150, loss[loss=0.172, simple_loss=0.2314, pruned_loss=0.05634, over 4941.00 frames.], tot_loss[loss=0.1852, simple_loss=0.2488, pruned_loss=0.06078, over 972911.14 frames.], batch size: 29, lr: 8.89e-04 +2022-05-03 23:57:15,159 INFO [train.py:715] (7/8) Epoch 1, batch 20200, loss[loss=0.223, simple_loss=0.2764, pruned_loss=0.08475, over 4866.00 frames.], tot_loss[loss=0.1847, simple_loss=0.2486, pruned_loss=0.06041, over 973059.21 frames.], batch size: 20, lr: 8.89e-04 +2022-05-03 23:57:53,974 INFO [train.py:715] (7/8) Epoch 1, batch 20250, loss[loss=0.2364, simple_loss=0.2936, pruned_loss=0.08959, over 4969.00 frames.], tot_loss[loss=0.1851, simple_loss=0.2492, pruned_loss=0.06052, over 973297.00 frames.], batch size: 15, lr: 8.89e-04 +2022-05-03 23:58:33,273 INFO [train.py:715] (7/8) Epoch 1, batch 20300, loss[loss=0.1707, simple_loss=0.2346, pruned_loss=0.05342, over 4921.00 frames.], tot_loss[loss=0.1843, simple_loss=0.2483, pruned_loss=0.06017, over 972619.83 frames.], batch size: 23, lr: 8.88e-04 +2022-05-03 23:59:13,205 INFO [train.py:715] (7/8) Epoch 1, batch 20350, loss[loss=0.1971, simple_loss=0.256, pruned_loss=0.06909, over 4887.00 frames.], tot_loss[loss=0.1853, simple_loss=0.2486, pruned_loss=0.06095, over 972185.14 frames.], batch size: 19, lr: 8.88e-04 +2022-05-03 23:59:51,745 INFO [train.py:715] (7/8) Epoch 1, batch 20400, loss[loss=0.1742, simple_loss=0.2341, pruned_loss=0.05717, over 4904.00 frames.], tot_loss[loss=0.1837, simple_loss=0.247, pruned_loss=0.06021, over 972187.40 frames.], batch size: 18, lr: 8.87e-04 +2022-05-04 00:00:31,297 INFO [train.py:715] (7/8) Epoch 1, batch 20450, loss[loss=0.1682, simple_loss=0.234, pruned_loss=0.05118, over 4818.00 frames.], tot_loss[loss=0.1843, simple_loss=0.2477, pruned_loss=0.06051, over 972364.00 frames.], batch size: 13, lr: 8.87e-04 +2022-05-04 00:01:10,347 INFO [train.py:715] (7/8) Epoch 1, batch 20500, loss[loss=0.2181, simple_loss=0.2749, pruned_loss=0.08064, over 4982.00 frames.], tot_loss[loss=0.1847, simple_loss=0.2482, pruned_loss=0.06057, over 972584.72 frames.], batch size: 35, lr: 8.87e-04 +2022-05-04 00:01:50,043 INFO [train.py:715] (7/8) Epoch 1, batch 20550, loss[loss=0.1858, simple_loss=0.2308, pruned_loss=0.07035, over 4927.00 frames.], tot_loss[loss=0.184, simple_loss=0.2475, pruned_loss=0.06021, over 972539.71 frames.], batch size: 23, lr: 8.86e-04 +2022-05-04 00:02:28,912 INFO [train.py:715] (7/8) Epoch 1, batch 20600, loss[loss=0.182, simple_loss=0.2497, pruned_loss=0.05712, over 4802.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2476, pruned_loss=0.06001, over 973256.20 frames.], batch size: 21, lr: 8.86e-04 +2022-05-04 00:03:08,455 INFO [train.py:715] (7/8) Epoch 1, batch 20650, loss[loss=0.1915, simple_loss=0.2645, pruned_loss=0.05928, over 4873.00 frames.], tot_loss[loss=0.183, simple_loss=0.247, pruned_loss=0.05946, over 973278.82 frames.], batch size: 16, lr: 8.85e-04 +2022-05-04 00:03:48,943 INFO [train.py:715] (7/8) Epoch 1, batch 20700, loss[loss=0.2072, simple_loss=0.2655, pruned_loss=0.07448, over 4892.00 frames.], tot_loss[loss=0.1823, simple_loss=0.2466, pruned_loss=0.05902, over 973079.88 frames.], batch size: 22, lr: 8.85e-04 +2022-05-04 00:04:28,576 INFO [train.py:715] (7/8) Epoch 1, batch 20750, loss[loss=0.1659, simple_loss=0.2211, pruned_loss=0.05537, over 4787.00 frames.], tot_loss[loss=0.1824, simple_loss=0.2468, pruned_loss=0.05897, over 973525.30 frames.], batch size: 17, lr: 8.85e-04 +2022-05-04 00:05:07,883 INFO [train.py:715] (7/8) Epoch 1, batch 20800, loss[loss=0.1393, simple_loss=0.2177, pruned_loss=0.03041, over 4971.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2457, pruned_loss=0.05822, over 972668.20 frames.], batch size: 28, lr: 8.84e-04 +2022-05-04 00:05:47,731 INFO [train.py:715] (7/8) Epoch 1, batch 20850, loss[loss=0.1673, simple_loss=0.244, pruned_loss=0.04526, over 4778.00 frames.], tot_loss[loss=0.1822, simple_loss=0.2464, pruned_loss=0.05896, over 972719.40 frames.], batch size: 18, lr: 8.84e-04 +2022-05-04 00:06:27,487 INFO [train.py:715] (7/8) Epoch 1, batch 20900, loss[loss=0.1881, simple_loss=0.2524, pruned_loss=0.06186, over 4911.00 frames.], tot_loss[loss=0.1815, simple_loss=0.246, pruned_loss=0.05849, over 972663.88 frames.], batch size: 18, lr: 8.83e-04 +2022-05-04 00:07:06,278 INFO [train.py:715] (7/8) Epoch 1, batch 20950, loss[loss=0.2112, simple_loss=0.2787, pruned_loss=0.07182, over 4982.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2461, pruned_loss=0.05818, over 972765.87 frames.], batch size: 24, lr: 8.83e-04 +2022-05-04 00:07:45,662 INFO [train.py:715] (7/8) Epoch 1, batch 21000, loss[loss=0.1764, simple_loss=0.2492, pruned_loss=0.05181, over 4825.00 frames.], tot_loss[loss=0.182, simple_loss=0.2465, pruned_loss=0.05881, over 971926.17 frames.], batch size: 15, lr: 8.83e-04 +2022-05-04 00:07:45,663 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 00:08:00,764 INFO [train.py:742] (7/8) Epoch 1, validation: loss=0.1226, simple_loss=0.2094, pruned_loss=0.01784, over 914524.00 frames. +2022-05-04 00:08:40,111 INFO [train.py:715] (7/8) Epoch 1, batch 21050, loss[loss=0.1457, simple_loss=0.2253, pruned_loss=0.03303, over 4975.00 frames.], tot_loss[loss=0.181, simple_loss=0.2453, pruned_loss=0.05832, over 971748.50 frames.], batch size: 14, lr: 8.82e-04 +2022-05-04 00:09:19,953 INFO [train.py:715] (7/8) Epoch 1, batch 21100, loss[loss=0.1918, simple_loss=0.26, pruned_loss=0.06181, over 4754.00 frames.], tot_loss[loss=0.1825, simple_loss=0.2471, pruned_loss=0.05895, over 971456.28 frames.], batch size: 16, lr: 8.82e-04 +2022-05-04 00:09:58,324 INFO [train.py:715] (7/8) Epoch 1, batch 21150, loss[loss=0.1872, simple_loss=0.2502, pruned_loss=0.06211, over 4792.00 frames.], tot_loss[loss=0.1842, simple_loss=0.2482, pruned_loss=0.06015, over 971818.45 frames.], batch size: 17, lr: 8.81e-04 +2022-05-04 00:10:40,735 INFO [train.py:715] (7/8) Epoch 1, batch 21200, loss[loss=0.1352, simple_loss=0.2032, pruned_loss=0.0336, over 4828.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2476, pruned_loss=0.06013, over 972597.24 frames.], batch size: 13, lr: 8.81e-04 +2022-05-04 00:11:20,089 INFO [train.py:715] (7/8) Epoch 1, batch 21250, loss[loss=0.1464, simple_loss=0.2217, pruned_loss=0.03556, over 4858.00 frames.], tot_loss[loss=0.184, simple_loss=0.2474, pruned_loss=0.06033, over 972132.47 frames.], batch size: 20, lr: 8.81e-04 +2022-05-04 00:11:59,262 INFO [train.py:715] (7/8) Epoch 1, batch 21300, loss[loss=0.1711, simple_loss=0.2358, pruned_loss=0.05315, over 4818.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2471, pruned_loss=0.06023, over 971306.10 frames.], batch size: 12, lr: 8.80e-04 +2022-05-04 00:12:38,149 INFO [train.py:715] (7/8) Epoch 1, batch 21350, loss[loss=0.169, simple_loss=0.2475, pruned_loss=0.04522, over 4987.00 frames.], tot_loss[loss=0.183, simple_loss=0.247, pruned_loss=0.05949, over 971336.37 frames.], batch size: 28, lr: 8.80e-04 +2022-05-04 00:13:17,801 INFO [train.py:715] (7/8) Epoch 1, batch 21400, loss[loss=0.1555, simple_loss=0.2223, pruned_loss=0.0444, over 4857.00 frames.], tot_loss[loss=0.183, simple_loss=0.2469, pruned_loss=0.05953, over 971359.96 frames.], batch size: 20, lr: 8.80e-04 +2022-05-04 00:13:57,970 INFO [train.py:715] (7/8) Epoch 1, batch 21450, loss[loss=0.1729, simple_loss=0.2414, pruned_loss=0.0522, over 4943.00 frames.], tot_loss[loss=0.1836, simple_loss=0.2473, pruned_loss=0.05993, over 972356.05 frames.], batch size: 21, lr: 8.79e-04 +2022-05-04 00:14:36,219 INFO [train.py:715] (7/8) Epoch 1, batch 21500, loss[loss=0.2089, simple_loss=0.276, pruned_loss=0.07091, over 4834.00 frames.], tot_loss[loss=0.1839, simple_loss=0.248, pruned_loss=0.05987, over 971933.80 frames.], batch size: 26, lr: 8.79e-04 +2022-05-04 00:15:15,309 INFO [train.py:715] (7/8) Epoch 1, batch 21550, loss[loss=0.1822, simple_loss=0.2489, pruned_loss=0.05768, over 4949.00 frames.], tot_loss[loss=0.1843, simple_loss=0.2482, pruned_loss=0.06019, over 972165.98 frames.], batch size: 39, lr: 8.78e-04 +2022-05-04 00:15:54,610 INFO [train.py:715] (7/8) Epoch 1, batch 21600, loss[loss=0.1951, simple_loss=0.2636, pruned_loss=0.06326, over 4768.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2477, pruned_loss=0.06009, over 971481.12 frames.], batch size: 18, lr: 8.78e-04 +2022-05-04 00:16:33,919 INFO [train.py:715] (7/8) Epoch 1, batch 21650, loss[loss=0.1753, simple_loss=0.2334, pruned_loss=0.05865, over 4809.00 frames.], tot_loss[loss=0.1832, simple_loss=0.2471, pruned_loss=0.0596, over 971941.79 frames.], batch size: 13, lr: 8.78e-04 +2022-05-04 00:17:12,483 INFO [train.py:715] (7/8) Epoch 1, batch 21700, loss[loss=0.2387, simple_loss=0.282, pruned_loss=0.09763, over 4839.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2469, pruned_loss=0.05969, over 972225.65 frames.], batch size: 15, lr: 8.77e-04 +2022-05-04 00:17:52,132 INFO [train.py:715] (7/8) Epoch 1, batch 21750, loss[loss=0.1541, simple_loss=0.22, pruned_loss=0.04411, over 4869.00 frames.], tot_loss[loss=0.1823, simple_loss=0.2462, pruned_loss=0.05919, over 972230.67 frames.], batch size: 16, lr: 8.77e-04 +2022-05-04 00:18:31,691 INFO [train.py:715] (7/8) Epoch 1, batch 21800, loss[loss=0.1562, simple_loss=0.2216, pruned_loss=0.04536, over 4855.00 frames.], tot_loss[loss=0.183, simple_loss=0.2468, pruned_loss=0.05958, over 972298.11 frames.], batch size: 12, lr: 8.76e-04 +2022-05-04 00:19:10,441 INFO [train.py:715] (7/8) Epoch 1, batch 21850, loss[loss=0.2004, simple_loss=0.2607, pruned_loss=0.07003, over 4784.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2465, pruned_loss=0.05966, over 972459.54 frames.], batch size: 18, lr: 8.76e-04 +2022-05-04 00:19:50,600 INFO [train.py:715] (7/8) Epoch 1, batch 21900, loss[loss=0.2056, simple_loss=0.2703, pruned_loss=0.0704, over 4969.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2473, pruned_loss=0.06019, over 972300.12 frames.], batch size: 28, lr: 8.76e-04 +2022-05-04 00:20:30,155 INFO [train.py:715] (7/8) Epoch 1, batch 21950, loss[loss=0.1888, simple_loss=0.2437, pruned_loss=0.06699, over 4987.00 frames.], tot_loss[loss=0.1835, simple_loss=0.247, pruned_loss=0.05998, over 971942.38 frames.], batch size: 39, lr: 8.75e-04 +2022-05-04 00:21:09,937 INFO [train.py:715] (7/8) Epoch 1, batch 22000, loss[loss=0.158, simple_loss=0.2301, pruned_loss=0.04296, over 4855.00 frames.], tot_loss[loss=0.184, simple_loss=0.2476, pruned_loss=0.06018, over 972106.44 frames.], batch size: 20, lr: 8.75e-04 +2022-05-04 00:21:48,906 INFO [train.py:715] (7/8) Epoch 1, batch 22050, loss[loss=0.1711, simple_loss=0.2374, pruned_loss=0.05237, over 4830.00 frames.], tot_loss[loss=0.1833, simple_loss=0.2471, pruned_loss=0.05974, over 971729.75 frames.], batch size: 13, lr: 8.75e-04 +2022-05-04 00:22:28,895 INFO [train.py:715] (7/8) Epoch 1, batch 22100, loss[loss=0.2065, simple_loss=0.2507, pruned_loss=0.08109, over 4961.00 frames.], tot_loss[loss=0.1835, simple_loss=0.2471, pruned_loss=0.05991, over 972262.92 frames.], batch size: 24, lr: 8.74e-04 +2022-05-04 00:23:08,227 INFO [train.py:715] (7/8) Epoch 1, batch 22150, loss[loss=0.1576, simple_loss=0.224, pruned_loss=0.04559, over 4813.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2472, pruned_loss=0.06031, over 972490.38 frames.], batch size: 26, lr: 8.74e-04 +2022-05-04 00:23:46,651 INFO [train.py:715] (7/8) Epoch 1, batch 22200, loss[loss=0.2774, simple_loss=0.3294, pruned_loss=0.1127, over 4741.00 frames.], tot_loss[loss=0.1844, simple_loss=0.2476, pruned_loss=0.06059, over 972787.08 frames.], batch size: 16, lr: 8.73e-04 +2022-05-04 00:24:25,887 INFO [train.py:715] (7/8) Epoch 1, batch 22250, loss[loss=0.1553, simple_loss=0.2207, pruned_loss=0.04492, over 4777.00 frames.], tot_loss[loss=0.1846, simple_loss=0.248, pruned_loss=0.06059, over 972987.75 frames.], batch size: 14, lr: 8.73e-04 +2022-05-04 00:25:05,567 INFO [train.py:715] (7/8) Epoch 1, batch 22300, loss[loss=0.2033, simple_loss=0.2752, pruned_loss=0.06572, over 4806.00 frames.], tot_loss[loss=0.1833, simple_loss=0.2471, pruned_loss=0.05974, over 973323.52 frames.], batch size: 21, lr: 8.73e-04 +2022-05-04 00:25:45,329 INFO [train.py:715] (7/8) Epoch 1, batch 22350, loss[loss=0.1711, simple_loss=0.2366, pruned_loss=0.05279, over 4932.00 frames.], tot_loss[loss=0.1834, simple_loss=0.2472, pruned_loss=0.0598, over 973372.35 frames.], batch size: 29, lr: 8.72e-04 +2022-05-04 00:26:24,292 INFO [train.py:715] (7/8) Epoch 1, batch 22400, loss[loss=0.2014, simple_loss=0.2586, pruned_loss=0.07213, over 4834.00 frames.], tot_loss[loss=0.1851, simple_loss=0.2487, pruned_loss=0.06073, over 973760.38 frames.], batch size: 30, lr: 8.72e-04 +2022-05-04 00:27:04,015 INFO [train.py:715] (7/8) Epoch 1, batch 22450, loss[loss=0.1553, simple_loss=0.2346, pruned_loss=0.03802, over 4900.00 frames.], tot_loss[loss=0.1822, simple_loss=0.2466, pruned_loss=0.05887, over 973627.12 frames.], batch size: 19, lr: 8.72e-04 +2022-05-04 00:27:43,652 INFO [train.py:715] (7/8) Epoch 1, batch 22500, loss[loss=0.1872, simple_loss=0.2425, pruned_loss=0.06597, over 4983.00 frames.], tot_loss[loss=0.1826, simple_loss=0.2468, pruned_loss=0.05922, over 974154.06 frames.], batch size: 28, lr: 8.71e-04 +2022-05-04 00:28:22,148 INFO [train.py:715] (7/8) Epoch 1, batch 22550, loss[loss=0.1633, simple_loss=0.2358, pruned_loss=0.04539, over 4815.00 frames.], tot_loss[loss=0.1821, simple_loss=0.2462, pruned_loss=0.05903, over 973644.72 frames.], batch size: 25, lr: 8.71e-04 +2022-05-04 00:29:02,215 INFO [train.py:715] (7/8) Epoch 1, batch 22600, loss[loss=0.1628, simple_loss=0.2193, pruned_loss=0.05317, over 4934.00 frames.], tot_loss[loss=0.1827, simple_loss=0.2464, pruned_loss=0.05948, over 973798.69 frames.], batch size: 18, lr: 8.70e-04 +2022-05-04 00:29:42,689 INFO [train.py:715] (7/8) Epoch 1, batch 22650, loss[loss=0.2076, simple_loss=0.2732, pruned_loss=0.07099, over 4977.00 frames.], tot_loss[loss=0.1837, simple_loss=0.2471, pruned_loss=0.06019, over 973470.94 frames.], batch size: 15, lr: 8.70e-04 +2022-05-04 00:30:22,587 INFO [train.py:715] (7/8) Epoch 1, batch 22700, loss[loss=0.2149, simple_loss=0.2813, pruned_loss=0.07427, over 4782.00 frames.], tot_loss[loss=0.1834, simple_loss=0.247, pruned_loss=0.05993, over 973316.05 frames.], batch size: 17, lr: 8.70e-04 +2022-05-04 00:31:00,981 INFO [train.py:715] (7/8) Epoch 1, batch 22750, loss[loss=0.2105, simple_loss=0.2695, pruned_loss=0.0758, over 4863.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2474, pruned_loss=0.06013, over 973326.94 frames.], batch size: 16, lr: 8.69e-04 +2022-05-04 00:31:41,167 INFO [train.py:715] (7/8) Epoch 1, batch 22800, loss[loss=0.1765, simple_loss=0.2411, pruned_loss=0.05598, over 4973.00 frames.], tot_loss[loss=0.1832, simple_loss=0.2472, pruned_loss=0.05961, over 973568.78 frames.], batch size: 24, lr: 8.69e-04 +2022-05-04 00:32:20,890 INFO [train.py:715] (7/8) Epoch 1, batch 22850, loss[loss=0.2303, simple_loss=0.2813, pruned_loss=0.08965, over 4846.00 frames.], tot_loss[loss=0.1839, simple_loss=0.248, pruned_loss=0.05992, over 972661.57 frames.], batch size: 34, lr: 8.68e-04 +2022-05-04 00:32:59,726 INFO [train.py:715] (7/8) Epoch 1, batch 22900, loss[loss=0.1813, simple_loss=0.235, pruned_loss=0.06381, over 4851.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2472, pruned_loss=0.05952, over 971970.76 frames.], batch size: 30, lr: 8.68e-04 +2022-05-04 00:33:39,277 INFO [train.py:715] (7/8) Epoch 1, batch 22950, loss[loss=0.2329, simple_loss=0.2818, pruned_loss=0.09204, over 4772.00 frames.], tot_loss[loss=0.1836, simple_loss=0.2476, pruned_loss=0.0598, over 972038.10 frames.], batch size: 19, lr: 8.68e-04 +2022-05-04 00:34:19,080 INFO [train.py:715] (7/8) Epoch 1, batch 23000, loss[loss=0.1747, simple_loss=0.2483, pruned_loss=0.05056, over 4974.00 frames.], tot_loss[loss=0.1832, simple_loss=0.247, pruned_loss=0.05975, over 972522.13 frames.], batch size: 25, lr: 8.67e-04 +2022-05-04 00:34:57,986 INFO [train.py:715] (7/8) Epoch 1, batch 23050, loss[loss=0.2136, simple_loss=0.2803, pruned_loss=0.07348, over 4696.00 frames.], tot_loss[loss=0.1843, simple_loss=0.2478, pruned_loss=0.06034, over 972323.15 frames.], batch size: 15, lr: 8.67e-04 +2022-05-04 00:35:37,122 INFO [train.py:715] (7/8) Epoch 1, batch 23100, loss[loss=0.1632, simple_loss=0.2417, pruned_loss=0.04229, over 4635.00 frames.], tot_loss[loss=0.1834, simple_loss=0.2471, pruned_loss=0.05983, over 971420.64 frames.], batch size: 13, lr: 8.67e-04 +2022-05-04 00:36:16,859 INFO [train.py:715] (7/8) Epoch 1, batch 23150, loss[loss=0.1661, simple_loss=0.2349, pruned_loss=0.0486, over 4939.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2467, pruned_loss=0.05961, over 971788.26 frames.], batch size: 23, lr: 8.66e-04 +2022-05-04 00:36:56,382 INFO [train.py:715] (7/8) Epoch 1, batch 23200, loss[loss=0.2233, simple_loss=0.2733, pruned_loss=0.08662, over 4982.00 frames.], tot_loss[loss=0.1833, simple_loss=0.2467, pruned_loss=0.05994, over 972373.28 frames.], batch size: 14, lr: 8.66e-04 +2022-05-04 00:37:34,620 INFO [train.py:715] (7/8) Epoch 1, batch 23250, loss[loss=0.1764, simple_loss=0.2445, pruned_loss=0.05415, over 4907.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2471, pruned_loss=0.05957, over 972398.53 frames.], batch size: 39, lr: 8.66e-04 +2022-05-04 00:38:14,195 INFO [train.py:715] (7/8) Epoch 1, batch 23300, loss[loss=0.1827, simple_loss=0.2429, pruned_loss=0.06127, over 4742.00 frames.], tot_loss[loss=0.1835, simple_loss=0.2473, pruned_loss=0.05985, over 972851.86 frames.], batch size: 16, lr: 8.65e-04 +2022-05-04 00:38:53,774 INFO [train.py:715] (7/8) Epoch 1, batch 23350, loss[loss=0.2194, simple_loss=0.2857, pruned_loss=0.07657, over 4855.00 frames.], tot_loss[loss=0.183, simple_loss=0.247, pruned_loss=0.05953, over 972628.11 frames.], batch size: 20, lr: 8.65e-04 +2022-05-04 00:39:32,071 INFO [train.py:715] (7/8) Epoch 1, batch 23400, loss[loss=0.1639, simple_loss=0.2314, pruned_loss=0.04822, over 4908.00 frames.], tot_loss[loss=0.1833, simple_loss=0.247, pruned_loss=0.05973, over 972886.24 frames.], batch size: 17, lr: 8.64e-04 +2022-05-04 00:40:11,309 INFO [train.py:715] (7/8) Epoch 1, batch 23450, loss[loss=0.1665, simple_loss=0.2289, pruned_loss=0.05199, over 4884.00 frames.], tot_loss[loss=0.1843, simple_loss=0.2479, pruned_loss=0.06038, over 973126.08 frames.], batch size: 32, lr: 8.64e-04 +2022-05-04 00:40:50,696 INFO [train.py:715] (7/8) Epoch 1, batch 23500, loss[loss=0.1841, simple_loss=0.2441, pruned_loss=0.06211, over 4750.00 frames.], tot_loss[loss=0.1851, simple_loss=0.2485, pruned_loss=0.06086, over 973077.67 frames.], batch size: 16, lr: 8.64e-04 +2022-05-04 00:41:29,532 INFO [train.py:715] (7/8) Epoch 1, batch 23550, loss[loss=0.2258, simple_loss=0.2833, pruned_loss=0.08421, over 4960.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2487, pruned_loss=0.06137, over 972972.28 frames.], batch size: 21, lr: 8.63e-04 +2022-05-04 00:42:07,729 INFO [train.py:715] (7/8) Epoch 1, batch 23600, loss[loss=0.2125, simple_loss=0.2925, pruned_loss=0.06625, over 4883.00 frames.], tot_loss[loss=0.1845, simple_loss=0.2479, pruned_loss=0.06057, over 973978.07 frames.], batch size: 22, lr: 8.63e-04 +2022-05-04 00:42:47,234 INFO [train.py:715] (7/8) Epoch 1, batch 23650, loss[loss=0.1574, simple_loss=0.2205, pruned_loss=0.04719, over 4957.00 frames.], tot_loss[loss=0.1835, simple_loss=0.2469, pruned_loss=0.06009, over 973163.18 frames.], batch size: 14, lr: 8.63e-04 +2022-05-04 00:43:26,753 INFO [train.py:715] (7/8) Epoch 1, batch 23700, loss[loss=0.2387, simple_loss=0.2947, pruned_loss=0.09133, over 4824.00 frames.], tot_loss[loss=0.1832, simple_loss=0.2466, pruned_loss=0.05987, over 973266.55 frames.], batch size: 30, lr: 8.62e-04 +2022-05-04 00:44:05,094 INFO [train.py:715] (7/8) Epoch 1, batch 23750, loss[loss=0.181, simple_loss=0.2596, pruned_loss=0.05125, over 4777.00 frames.], tot_loss[loss=0.1821, simple_loss=0.2462, pruned_loss=0.059, over 972866.67 frames.], batch size: 17, lr: 8.62e-04 +2022-05-04 00:44:44,146 INFO [train.py:715] (7/8) Epoch 1, batch 23800, loss[loss=0.1933, simple_loss=0.2558, pruned_loss=0.06539, over 4829.00 frames.], tot_loss[loss=0.183, simple_loss=0.2467, pruned_loss=0.05965, over 972069.00 frames.], batch size: 15, lr: 8.61e-04 +2022-05-04 00:45:24,232 INFO [train.py:715] (7/8) Epoch 1, batch 23850, loss[loss=0.1266, simple_loss=0.197, pruned_loss=0.02808, over 4827.00 frames.], tot_loss[loss=0.1845, simple_loss=0.248, pruned_loss=0.06057, over 971736.91 frames.], batch size: 12, lr: 8.61e-04 +2022-05-04 00:46:03,793 INFO [train.py:715] (7/8) Epoch 1, batch 23900, loss[loss=0.2005, simple_loss=0.2595, pruned_loss=0.07074, over 4784.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2472, pruned_loss=0.06031, over 972041.83 frames.], batch size: 14, lr: 8.61e-04 +2022-05-04 00:46:42,594 INFO [train.py:715] (7/8) Epoch 1, batch 23950, loss[loss=0.1486, simple_loss=0.2184, pruned_loss=0.03937, over 4765.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2463, pruned_loss=0.05978, over 971949.87 frames.], batch size: 14, lr: 8.60e-04 +2022-05-04 00:47:22,328 INFO [train.py:715] (7/8) Epoch 1, batch 24000, loss[loss=0.1747, simple_loss=0.2307, pruned_loss=0.05936, over 4754.00 frames.], tot_loss[loss=0.1823, simple_loss=0.2459, pruned_loss=0.05929, over 972730.04 frames.], batch size: 19, lr: 8.60e-04 +2022-05-04 00:47:22,329 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 00:47:34,530 INFO [train.py:742] (7/8) Epoch 1, validation: loss=0.1217, simple_loss=0.2087, pruned_loss=0.01736, over 914524.00 frames. +2022-05-04 00:48:14,357 INFO [train.py:715] (7/8) Epoch 1, batch 24050, loss[loss=0.1553, simple_loss=0.2272, pruned_loss=0.04169, over 4752.00 frames.], tot_loss[loss=0.1814, simple_loss=0.245, pruned_loss=0.0589, over 973091.20 frames.], batch size: 14, lr: 8.60e-04 +2022-05-04 00:48:53,684 INFO [train.py:715] (7/8) Epoch 1, batch 24100, loss[loss=0.1563, simple_loss=0.2232, pruned_loss=0.04465, over 4768.00 frames.], tot_loss[loss=0.1813, simple_loss=0.2453, pruned_loss=0.05869, over 972923.94 frames.], batch size: 18, lr: 8.59e-04 +2022-05-04 00:49:32,281 INFO [train.py:715] (7/8) Epoch 1, batch 24150, loss[loss=0.215, simple_loss=0.2811, pruned_loss=0.07449, over 4964.00 frames.], tot_loss[loss=0.1816, simple_loss=0.2455, pruned_loss=0.05886, over 974088.76 frames.], batch size: 39, lr: 8.59e-04 +2022-05-04 00:50:11,576 INFO [train.py:715] (7/8) Epoch 1, batch 24200, loss[loss=0.1804, simple_loss=0.2579, pruned_loss=0.05148, over 4903.00 frames.], tot_loss[loss=0.1809, simple_loss=0.245, pruned_loss=0.05841, over 973996.36 frames.], batch size: 18, lr: 8.59e-04 +2022-05-04 00:50:52,255 INFO [train.py:715] (7/8) Epoch 1, batch 24250, loss[loss=0.1686, simple_loss=0.2392, pruned_loss=0.04901, over 4813.00 frames.], tot_loss[loss=0.1824, simple_loss=0.2464, pruned_loss=0.05922, over 973827.42 frames.], batch size: 25, lr: 8.58e-04 +2022-05-04 00:51:31,682 INFO [train.py:715] (7/8) Epoch 1, batch 24300, loss[loss=0.1714, simple_loss=0.246, pruned_loss=0.04846, over 4959.00 frames.], tot_loss[loss=0.183, simple_loss=0.247, pruned_loss=0.05953, over 973302.86 frames.], batch size: 24, lr: 8.58e-04 +2022-05-04 00:52:11,128 INFO [train.py:715] (7/8) Epoch 1, batch 24350, loss[loss=0.1955, simple_loss=0.2543, pruned_loss=0.06835, over 4979.00 frames.], tot_loss[loss=0.1827, simple_loss=0.2466, pruned_loss=0.05939, over 973372.75 frames.], batch size: 35, lr: 8.57e-04 +2022-05-04 00:52:51,502 INFO [train.py:715] (7/8) Epoch 1, batch 24400, loss[loss=0.228, simple_loss=0.2834, pruned_loss=0.0863, over 4709.00 frames.], tot_loss[loss=0.183, simple_loss=0.247, pruned_loss=0.05955, over 972985.46 frames.], batch size: 15, lr: 8.57e-04 +2022-05-04 00:53:30,582 INFO [train.py:715] (7/8) Epoch 1, batch 24450, loss[loss=0.1796, simple_loss=0.2376, pruned_loss=0.06082, over 4837.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2469, pruned_loss=0.05946, over 973226.73 frames.], batch size: 30, lr: 8.57e-04 +2022-05-04 00:54:09,304 INFO [train.py:715] (7/8) Epoch 1, batch 24500, loss[loss=0.1504, simple_loss=0.2076, pruned_loss=0.04662, over 4857.00 frames.], tot_loss[loss=0.183, simple_loss=0.2471, pruned_loss=0.05941, over 972141.43 frames.], batch size: 20, lr: 8.56e-04 +2022-05-04 00:54:48,966 INFO [train.py:715] (7/8) Epoch 1, batch 24550, loss[loss=0.1865, simple_loss=0.2555, pruned_loss=0.05872, over 4853.00 frames.], tot_loss[loss=0.1823, simple_loss=0.2463, pruned_loss=0.05915, over 972386.50 frames.], batch size: 32, lr: 8.56e-04 +2022-05-04 00:55:29,268 INFO [train.py:715] (7/8) Epoch 1, batch 24600, loss[loss=0.2072, simple_loss=0.2555, pruned_loss=0.07951, over 4982.00 frames.], tot_loss[loss=0.1828, simple_loss=0.2468, pruned_loss=0.05936, over 973212.14 frames.], batch size: 28, lr: 8.56e-04 +2022-05-04 00:56:08,137 INFO [train.py:715] (7/8) Epoch 1, batch 24650, loss[loss=0.1642, simple_loss=0.236, pruned_loss=0.04622, over 4872.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2468, pruned_loss=0.0597, over 972886.74 frames.], batch size: 38, lr: 8.55e-04 +2022-05-04 00:56:47,168 INFO [train.py:715] (7/8) Epoch 1, batch 24700, loss[loss=0.1819, simple_loss=0.2337, pruned_loss=0.06506, over 4991.00 frames.], tot_loss[loss=0.1825, simple_loss=0.2465, pruned_loss=0.05924, over 972731.40 frames.], batch size: 14, lr: 8.55e-04 +2022-05-04 00:57:27,344 INFO [train.py:715] (7/8) Epoch 1, batch 24750, loss[loss=0.1728, simple_loss=0.2368, pruned_loss=0.05441, over 4965.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2479, pruned_loss=0.05996, over 972806.54 frames.], batch size: 14, lr: 8.55e-04 +2022-05-04 00:58:06,478 INFO [train.py:715] (7/8) Epoch 1, batch 24800, loss[loss=0.1506, simple_loss=0.2243, pruned_loss=0.03849, over 4830.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2476, pruned_loss=0.06001, over 972913.08 frames.], batch size: 15, lr: 8.54e-04 +2022-05-04 00:58:45,107 INFO [train.py:715] (7/8) Epoch 1, batch 24850, loss[loss=0.2025, simple_loss=0.2691, pruned_loss=0.06794, over 4871.00 frames.], tot_loss[loss=0.1817, simple_loss=0.2458, pruned_loss=0.0588, over 971975.45 frames.], batch size: 22, lr: 8.54e-04 +2022-05-04 00:59:25,592 INFO [train.py:715] (7/8) Epoch 1, batch 24900, loss[loss=0.2262, simple_loss=0.2825, pruned_loss=0.08495, over 4928.00 frames.], tot_loss[loss=0.1814, simple_loss=0.2454, pruned_loss=0.05867, over 971694.91 frames.], batch size: 39, lr: 8.54e-04 +2022-05-04 01:00:05,522 INFO [train.py:715] (7/8) Epoch 1, batch 24950, loss[loss=0.1849, simple_loss=0.2608, pruned_loss=0.0545, over 4925.00 frames.], tot_loss[loss=0.1817, simple_loss=0.2456, pruned_loss=0.05892, over 972324.04 frames.], batch size: 23, lr: 8.53e-04 +2022-05-04 01:00:44,294 INFO [train.py:715] (7/8) Epoch 1, batch 25000, loss[loss=0.1803, simple_loss=0.2434, pruned_loss=0.05857, over 4825.00 frames.], tot_loss[loss=0.1826, simple_loss=0.2462, pruned_loss=0.05955, over 972776.27 frames.], batch size: 26, lr: 8.53e-04 +2022-05-04 01:01:22,937 INFO [train.py:715] (7/8) Epoch 1, batch 25050, loss[loss=0.1967, simple_loss=0.2363, pruned_loss=0.07858, over 4781.00 frames.], tot_loss[loss=0.182, simple_loss=0.2453, pruned_loss=0.05935, over 972654.94 frames.], batch size: 12, lr: 8.53e-04 +2022-05-04 01:02:02,858 INFO [train.py:715] (7/8) Epoch 1, batch 25100, loss[loss=0.1941, simple_loss=0.2631, pruned_loss=0.06257, over 4782.00 frames.], tot_loss[loss=0.1818, simple_loss=0.2455, pruned_loss=0.05904, over 971661.87 frames.], batch size: 18, lr: 8.52e-04 +2022-05-04 01:02:42,031 INFO [train.py:715] (7/8) Epoch 1, batch 25150, loss[loss=0.2068, simple_loss=0.2577, pruned_loss=0.07795, over 4854.00 frames.], tot_loss[loss=0.1817, simple_loss=0.2454, pruned_loss=0.05894, over 971712.01 frames.], batch size: 20, lr: 8.52e-04 +2022-05-04 01:03:20,877 INFO [train.py:715] (7/8) Epoch 1, batch 25200, loss[loss=0.1842, simple_loss=0.2619, pruned_loss=0.05328, over 4694.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2448, pruned_loss=0.05882, over 972608.55 frames.], batch size: 15, lr: 8.51e-04 +2022-05-04 01:04:00,096 INFO [train.py:715] (7/8) Epoch 1, batch 25250, loss[loss=0.1648, simple_loss=0.2265, pruned_loss=0.05158, over 4973.00 frames.], tot_loss[loss=0.1806, simple_loss=0.244, pruned_loss=0.05862, over 972539.48 frames.], batch size: 35, lr: 8.51e-04 +2022-05-04 01:04:40,226 INFO [train.py:715] (7/8) Epoch 1, batch 25300, loss[loss=0.1961, simple_loss=0.2582, pruned_loss=0.06703, over 4934.00 frames.], tot_loss[loss=0.1817, simple_loss=0.2453, pruned_loss=0.05909, over 972257.38 frames.], batch size: 23, lr: 8.51e-04 +2022-05-04 01:05:18,878 INFO [train.py:715] (7/8) Epoch 1, batch 25350, loss[loss=0.1873, simple_loss=0.2557, pruned_loss=0.05947, over 4967.00 frames.], tot_loss[loss=0.1819, simple_loss=0.2457, pruned_loss=0.05902, over 972286.11 frames.], batch size: 28, lr: 8.50e-04 +2022-05-04 01:05:58,218 INFO [train.py:715] (7/8) Epoch 1, batch 25400, loss[loss=0.206, simple_loss=0.2634, pruned_loss=0.07436, over 4875.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2443, pruned_loss=0.0587, over 970975.64 frames.], batch size: 16, lr: 8.50e-04 +2022-05-04 01:06:38,482 INFO [train.py:715] (7/8) Epoch 1, batch 25450, loss[loss=0.1983, simple_loss=0.2648, pruned_loss=0.06588, over 4893.00 frames.], tot_loss[loss=0.1819, simple_loss=0.2457, pruned_loss=0.05911, over 972399.61 frames.], batch size: 22, lr: 8.50e-04 +2022-05-04 01:07:18,418 INFO [train.py:715] (7/8) Epoch 1, batch 25500, loss[loss=0.1494, simple_loss=0.221, pruned_loss=0.03887, over 4826.00 frames.], tot_loss[loss=0.1816, simple_loss=0.2455, pruned_loss=0.0589, over 971882.14 frames.], batch size: 26, lr: 8.49e-04 +2022-05-04 01:07:56,849 INFO [train.py:715] (7/8) Epoch 1, batch 25550, loss[loss=0.1902, simple_loss=0.2519, pruned_loss=0.06431, over 4824.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2468, pruned_loss=0.05955, over 971462.20 frames.], batch size: 13, lr: 8.49e-04 +2022-05-04 01:08:36,977 INFO [train.py:715] (7/8) Epoch 1, batch 25600, loss[loss=0.1865, simple_loss=0.2437, pruned_loss=0.06469, over 4665.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2456, pruned_loss=0.05832, over 971401.62 frames.], batch size: 13, lr: 8.49e-04 +2022-05-04 01:09:17,503 INFO [train.py:715] (7/8) Epoch 1, batch 25650, loss[loss=0.223, simple_loss=0.2701, pruned_loss=0.08796, over 4980.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2455, pruned_loss=0.05837, over 972402.41 frames.], batch size: 15, lr: 8.48e-04 +2022-05-04 01:09:56,992 INFO [train.py:715] (7/8) Epoch 1, batch 25700, loss[loss=0.2029, simple_loss=0.2603, pruned_loss=0.07273, over 4988.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2452, pruned_loss=0.05829, over 972543.05 frames.], batch size: 15, lr: 8.48e-04 +2022-05-04 01:10:36,899 INFO [train.py:715] (7/8) Epoch 1, batch 25750, loss[loss=0.1794, simple_loss=0.2355, pruned_loss=0.06167, over 4781.00 frames.], tot_loss[loss=0.1807, simple_loss=0.2449, pruned_loss=0.05822, over 972846.68 frames.], batch size: 17, lr: 8.48e-04 +2022-05-04 01:11:17,398 INFO [train.py:715] (7/8) Epoch 1, batch 25800, loss[loss=0.1997, simple_loss=0.2667, pruned_loss=0.06634, over 4748.00 frames.], tot_loss[loss=0.1808, simple_loss=0.245, pruned_loss=0.05827, over 973265.63 frames.], batch size: 16, lr: 8.47e-04 +2022-05-04 01:11:56,820 INFO [train.py:715] (7/8) Epoch 1, batch 25850, loss[loss=0.2067, simple_loss=0.2583, pruned_loss=0.07757, over 4849.00 frames.], tot_loss[loss=0.1806, simple_loss=0.245, pruned_loss=0.05813, over 973166.97 frames.], batch size: 32, lr: 8.47e-04 +2022-05-04 01:12:35,653 INFO [train.py:715] (7/8) Epoch 1, batch 25900, loss[loss=0.1518, simple_loss=0.2306, pruned_loss=0.03655, over 4794.00 frames.], tot_loss[loss=0.1798, simple_loss=0.2447, pruned_loss=0.05745, over 973608.57 frames.], batch size: 21, lr: 8.47e-04 +2022-05-04 01:13:15,328 INFO [train.py:715] (7/8) Epoch 1, batch 25950, loss[loss=0.2194, simple_loss=0.2938, pruned_loss=0.07256, over 4758.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2455, pruned_loss=0.05785, over 973389.28 frames.], batch size: 16, lr: 8.46e-04 +2022-05-04 01:13:55,208 INFO [train.py:715] (7/8) Epoch 1, batch 26000, loss[loss=0.1914, simple_loss=0.2449, pruned_loss=0.06898, over 4926.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2447, pruned_loss=0.05758, over 973381.75 frames.], batch size: 17, lr: 8.46e-04 +2022-05-04 01:14:34,096 INFO [train.py:715] (7/8) Epoch 1, batch 26050, loss[loss=0.1708, simple_loss=0.2265, pruned_loss=0.0576, over 4895.00 frames.], tot_loss[loss=0.1795, simple_loss=0.2445, pruned_loss=0.05728, over 972928.23 frames.], batch size: 19, lr: 8.46e-04 +2022-05-04 01:15:13,493 INFO [train.py:715] (7/8) Epoch 1, batch 26100, loss[loss=0.1905, simple_loss=0.253, pruned_loss=0.064, over 4784.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2457, pruned_loss=0.0582, over 973133.47 frames.], batch size: 18, lr: 8.45e-04 +2022-05-04 01:15:53,625 INFO [train.py:715] (7/8) Epoch 1, batch 26150, loss[loss=0.1842, simple_loss=0.242, pruned_loss=0.06326, over 4750.00 frames.], tot_loss[loss=0.1803, simple_loss=0.245, pruned_loss=0.05782, over 972718.72 frames.], batch size: 16, lr: 8.45e-04 +2022-05-04 01:16:32,573 INFO [train.py:715] (7/8) Epoch 1, batch 26200, loss[loss=0.1793, simple_loss=0.2346, pruned_loss=0.06204, over 4980.00 frames.], tot_loss[loss=0.1808, simple_loss=0.2453, pruned_loss=0.0582, over 973420.86 frames.], batch size: 24, lr: 8.44e-04 +2022-05-04 01:17:11,437 INFO [train.py:715] (7/8) Epoch 1, batch 26250, loss[loss=0.1979, simple_loss=0.2619, pruned_loss=0.06689, over 4985.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2454, pruned_loss=0.05821, over 973276.11 frames.], batch size: 24, lr: 8.44e-04 +2022-05-04 01:17:51,346 INFO [train.py:715] (7/8) Epoch 1, batch 26300, loss[loss=0.1532, simple_loss=0.2201, pruned_loss=0.04318, over 4841.00 frames.], tot_loss[loss=0.1804, simple_loss=0.2449, pruned_loss=0.05797, over 972220.99 frames.], batch size: 30, lr: 8.44e-04 +2022-05-04 01:18:31,203 INFO [train.py:715] (7/8) Epoch 1, batch 26350, loss[loss=0.1449, simple_loss=0.2096, pruned_loss=0.04015, over 4752.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2454, pruned_loss=0.05822, over 971407.68 frames.], batch size: 18, lr: 8.43e-04 +2022-05-04 01:19:09,973 INFO [train.py:715] (7/8) Epoch 1, batch 26400, loss[loss=0.1746, simple_loss=0.2412, pruned_loss=0.05398, over 4819.00 frames.], tot_loss[loss=0.182, simple_loss=0.2459, pruned_loss=0.05901, over 971007.08 frames.], batch size: 13, lr: 8.43e-04 +2022-05-04 01:19:49,173 INFO [train.py:715] (7/8) Epoch 1, batch 26450, loss[loss=0.1901, simple_loss=0.2423, pruned_loss=0.06896, over 4957.00 frames.], tot_loss[loss=0.1826, simple_loss=0.2465, pruned_loss=0.05936, over 971948.37 frames.], batch size: 35, lr: 8.43e-04 +2022-05-04 01:20:28,909 INFO [train.py:715] (7/8) Epoch 1, batch 26500, loss[loss=0.1582, simple_loss=0.2333, pruned_loss=0.04154, over 4756.00 frames.], tot_loss[loss=0.1816, simple_loss=0.2457, pruned_loss=0.05877, over 972062.93 frames.], batch size: 16, lr: 8.42e-04 +2022-05-04 01:21:08,260 INFO [train.py:715] (7/8) Epoch 1, batch 26550, loss[loss=0.17, simple_loss=0.2476, pruned_loss=0.04623, over 4962.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2452, pruned_loss=0.05796, over 972918.29 frames.], batch size: 21, lr: 8.42e-04 +2022-05-04 01:21:47,619 INFO [train.py:715] (7/8) Epoch 1, batch 26600, loss[loss=0.167, simple_loss=0.2349, pruned_loss=0.04961, over 4808.00 frames.], tot_loss[loss=0.1815, simple_loss=0.2458, pruned_loss=0.05858, over 972951.78 frames.], batch size: 25, lr: 8.42e-04 +2022-05-04 01:22:27,659 INFO [train.py:715] (7/8) Epoch 1, batch 26650, loss[loss=0.1848, simple_loss=0.2521, pruned_loss=0.05876, over 4944.00 frames.], tot_loss[loss=0.1808, simple_loss=0.2447, pruned_loss=0.05847, over 973533.82 frames.], batch size: 23, lr: 8.41e-04 +2022-05-04 01:23:07,614 INFO [train.py:715] (7/8) Epoch 1, batch 26700, loss[loss=0.268, simple_loss=0.3178, pruned_loss=0.1091, over 4963.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2446, pruned_loss=0.05826, over 973315.61 frames.], batch size: 14, lr: 8.41e-04 +2022-05-04 01:23:46,586 INFO [train.py:715] (7/8) Epoch 1, batch 26750, loss[loss=0.1709, simple_loss=0.2379, pruned_loss=0.05201, over 4963.00 frames.], tot_loss[loss=0.1798, simple_loss=0.2441, pruned_loss=0.05776, over 973001.61 frames.], batch size: 21, lr: 8.41e-04 +2022-05-04 01:24:26,596 INFO [train.py:715] (7/8) Epoch 1, batch 26800, loss[loss=0.1679, simple_loss=0.2342, pruned_loss=0.05079, over 4900.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2439, pruned_loss=0.05768, over 973092.05 frames.], batch size: 19, lr: 8.40e-04 +2022-05-04 01:25:06,141 INFO [train.py:715] (7/8) Epoch 1, batch 26850, loss[loss=0.1939, simple_loss=0.2651, pruned_loss=0.06137, over 4966.00 frames.], tot_loss[loss=0.18, simple_loss=0.2442, pruned_loss=0.05793, over 973077.15 frames.], batch size: 24, lr: 8.40e-04 +2022-05-04 01:25:45,417 INFO [train.py:715] (7/8) Epoch 1, batch 26900, loss[loss=0.1798, simple_loss=0.2591, pruned_loss=0.05025, over 4872.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2445, pruned_loss=0.05803, over 972810.84 frames.], batch size: 20, lr: 8.40e-04 +2022-05-04 01:26:24,112 INFO [train.py:715] (7/8) Epoch 1, batch 26950, loss[loss=0.1939, simple_loss=0.2475, pruned_loss=0.07018, over 4810.00 frames.], tot_loss[loss=0.1811, simple_loss=0.245, pruned_loss=0.05864, over 972037.95 frames.], batch size: 13, lr: 8.39e-04 +2022-05-04 01:27:04,126 INFO [train.py:715] (7/8) Epoch 1, batch 27000, loss[loss=0.1698, simple_loss=0.2373, pruned_loss=0.05118, over 4963.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2444, pruned_loss=0.05812, over 972235.71 frames.], batch size: 15, lr: 8.39e-04 +2022-05-04 01:27:04,127 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 01:27:12,718 INFO [train.py:742] (7/8) Epoch 1, validation: loss=0.1212, simple_loss=0.2081, pruned_loss=0.01718, over 914524.00 frames. +2022-05-04 01:27:53,062 INFO [train.py:715] (7/8) Epoch 1, batch 27050, loss[loss=0.1459, simple_loss=0.2115, pruned_loss=0.04011, over 4973.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2442, pruned_loss=0.05766, over 971993.27 frames.], batch size: 15, lr: 8.39e-04 +2022-05-04 01:28:33,376 INFO [train.py:715] (7/8) Epoch 1, batch 27100, loss[loss=0.1517, simple_loss=0.2241, pruned_loss=0.03962, over 4817.00 frames.], tot_loss[loss=0.1798, simple_loss=0.2437, pruned_loss=0.05792, over 971417.26 frames.], batch size: 27, lr: 8.38e-04 +2022-05-04 01:29:11,784 INFO [train.py:715] (7/8) Epoch 1, batch 27150, loss[loss=0.1712, simple_loss=0.2363, pruned_loss=0.0531, over 4812.00 frames.], tot_loss[loss=0.1788, simple_loss=0.2428, pruned_loss=0.05735, over 971688.42 frames.], batch size: 13, lr: 8.38e-04 +2022-05-04 01:29:51,725 INFO [train.py:715] (7/8) Epoch 1, batch 27200, loss[loss=0.2243, simple_loss=0.274, pruned_loss=0.08729, over 4870.00 frames.], tot_loss[loss=0.1787, simple_loss=0.243, pruned_loss=0.05723, over 971995.81 frames.], batch size: 22, lr: 8.38e-04 +2022-05-04 01:30:32,015 INFO [train.py:715] (7/8) Epoch 1, batch 27250, loss[loss=0.2096, simple_loss=0.2811, pruned_loss=0.06903, over 4947.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2447, pruned_loss=0.05822, over 971885.20 frames.], batch size: 24, lr: 8.37e-04 +2022-05-04 01:31:11,132 INFO [train.py:715] (7/8) Epoch 1, batch 27300, loss[loss=0.1599, simple_loss=0.2309, pruned_loss=0.0444, over 4905.00 frames.], tot_loss[loss=0.1813, simple_loss=0.2453, pruned_loss=0.05866, over 972840.96 frames.], batch size: 29, lr: 8.37e-04 +2022-05-04 01:31:49,676 INFO [train.py:715] (7/8) Epoch 1, batch 27350, loss[loss=0.1384, simple_loss=0.2145, pruned_loss=0.03116, over 4988.00 frames.], tot_loss[loss=0.181, simple_loss=0.2453, pruned_loss=0.05832, over 971586.96 frames.], batch size: 28, lr: 8.37e-04 +2022-05-04 01:32:29,600 INFO [train.py:715] (7/8) Epoch 1, batch 27400, loss[loss=0.1252, simple_loss=0.197, pruned_loss=0.02672, over 4812.00 frames.], tot_loss[loss=0.1816, simple_loss=0.2459, pruned_loss=0.05863, over 972545.80 frames.], batch size: 13, lr: 8.36e-04 +2022-05-04 01:33:09,600 INFO [train.py:715] (7/8) Epoch 1, batch 27450, loss[loss=0.1941, simple_loss=0.2565, pruned_loss=0.06583, over 4921.00 frames.], tot_loss[loss=0.1815, simple_loss=0.2458, pruned_loss=0.05857, over 973331.33 frames.], batch size: 17, lr: 8.36e-04 +2022-05-04 01:33:48,110 INFO [train.py:715] (7/8) Epoch 1, batch 27500, loss[loss=0.1745, simple_loss=0.2347, pruned_loss=0.05712, over 4762.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2455, pruned_loss=0.05848, over 972882.77 frames.], batch size: 16, lr: 8.36e-04 +2022-05-04 01:34:27,762 INFO [train.py:715] (7/8) Epoch 1, batch 27550, loss[loss=0.1523, simple_loss=0.2193, pruned_loss=0.0427, over 4786.00 frames.], tot_loss[loss=0.1807, simple_loss=0.245, pruned_loss=0.05816, over 972557.00 frames.], batch size: 18, lr: 8.35e-04 +2022-05-04 01:35:07,990 INFO [train.py:715] (7/8) Epoch 1, batch 27600, loss[loss=0.1744, simple_loss=0.251, pruned_loss=0.04893, over 4978.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2448, pruned_loss=0.05826, over 973453.33 frames.], batch size: 28, lr: 8.35e-04 +2022-05-04 01:35:47,294 INFO [train.py:715] (7/8) Epoch 1, batch 27650, loss[loss=0.186, simple_loss=0.2595, pruned_loss=0.05627, over 4909.00 frames.], tot_loss[loss=0.1809, simple_loss=0.245, pruned_loss=0.05841, over 973014.21 frames.], batch size: 17, lr: 8.35e-04 +2022-05-04 01:36:26,735 INFO [train.py:715] (7/8) Epoch 1, batch 27700, loss[loss=0.1699, simple_loss=0.2451, pruned_loss=0.0474, over 4851.00 frames.], tot_loss[loss=0.181, simple_loss=0.2447, pruned_loss=0.05869, over 972705.19 frames.], batch size: 20, lr: 8.34e-04 +2022-05-04 01:37:07,287 INFO [train.py:715] (7/8) Epoch 1, batch 27750, loss[loss=0.1729, simple_loss=0.2423, pruned_loss=0.05175, over 4986.00 frames.], tot_loss[loss=0.1816, simple_loss=0.2449, pruned_loss=0.05915, over 973124.46 frames.], batch size: 14, lr: 8.34e-04 +2022-05-04 01:37:47,072 INFO [train.py:715] (7/8) Epoch 1, batch 27800, loss[loss=0.1374, simple_loss=0.2065, pruned_loss=0.03413, over 4916.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2446, pruned_loss=0.05889, over 973235.61 frames.], batch size: 17, lr: 8.34e-04 +2022-05-04 01:38:26,359 INFO [train.py:715] (7/8) Epoch 1, batch 27850, loss[loss=0.1658, simple_loss=0.2252, pruned_loss=0.05325, over 4863.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2444, pruned_loss=0.05898, over 972696.89 frames.], batch size: 20, lr: 8.33e-04 +2022-05-04 01:39:06,465 INFO [train.py:715] (7/8) Epoch 1, batch 27900, loss[loss=0.1789, simple_loss=0.2403, pruned_loss=0.05873, over 4965.00 frames.], tot_loss[loss=0.181, simple_loss=0.2449, pruned_loss=0.05852, over 972948.26 frames.], batch size: 35, lr: 8.33e-04 +2022-05-04 01:39:45,948 INFO [train.py:715] (7/8) Epoch 1, batch 27950, loss[loss=0.1666, simple_loss=0.2344, pruned_loss=0.04942, over 4833.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2437, pruned_loss=0.05755, over 972597.98 frames.], batch size: 30, lr: 8.33e-04 +2022-05-04 01:40:25,328 INFO [train.py:715] (7/8) Epoch 1, batch 28000, loss[loss=0.1721, simple_loss=0.2421, pruned_loss=0.05106, over 4922.00 frames.], tot_loss[loss=0.1795, simple_loss=0.2436, pruned_loss=0.05771, over 971744.84 frames.], batch size: 29, lr: 8.32e-04 +2022-05-04 01:41:04,107 INFO [train.py:715] (7/8) Epoch 1, batch 28050, loss[loss=0.1639, simple_loss=0.224, pruned_loss=0.05187, over 4984.00 frames.], tot_loss[loss=0.1792, simple_loss=0.2433, pruned_loss=0.05759, over 972145.14 frames.], batch size: 14, lr: 8.32e-04 +2022-05-04 01:41:44,524 INFO [train.py:715] (7/8) Epoch 1, batch 28100, loss[loss=0.1931, simple_loss=0.2576, pruned_loss=0.06428, over 4906.00 frames.], tot_loss[loss=0.1788, simple_loss=0.2427, pruned_loss=0.05749, over 972069.43 frames.], batch size: 17, lr: 8.32e-04 +2022-05-04 01:42:23,902 INFO [train.py:715] (7/8) Epoch 1, batch 28150, loss[loss=0.1592, simple_loss=0.2225, pruned_loss=0.04797, over 4738.00 frames.], tot_loss[loss=0.1791, simple_loss=0.243, pruned_loss=0.05762, over 971883.38 frames.], batch size: 12, lr: 8.31e-04 +2022-05-04 01:43:03,290 INFO [train.py:715] (7/8) Epoch 1, batch 28200, loss[loss=0.151, simple_loss=0.223, pruned_loss=0.03947, over 4982.00 frames.], tot_loss[loss=0.1792, simple_loss=0.243, pruned_loss=0.05776, over 972342.99 frames.], batch size: 28, lr: 8.31e-04 +2022-05-04 01:43:43,975 INFO [train.py:715] (7/8) Epoch 1, batch 28250, loss[loss=0.1693, simple_loss=0.2481, pruned_loss=0.04529, over 4799.00 frames.], tot_loss[loss=0.1808, simple_loss=0.2446, pruned_loss=0.05849, over 972626.23 frames.], batch size: 25, lr: 8.31e-04 +2022-05-04 01:44:24,419 INFO [train.py:715] (7/8) Epoch 1, batch 28300, loss[loss=0.1552, simple_loss=0.2184, pruned_loss=0.04604, over 4822.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2443, pruned_loss=0.05814, over 971834.60 frames.], batch size: 15, lr: 8.30e-04 +2022-05-04 01:45:03,752 INFO [train.py:715] (7/8) Epoch 1, batch 28350, loss[loss=0.1824, simple_loss=0.2533, pruned_loss=0.05579, over 4822.00 frames.], tot_loss[loss=0.1798, simple_loss=0.2443, pruned_loss=0.05766, over 971733.28 frames.], batch size: 27, lr: 8.30e-04 +2022-05-04 01:45:42,704 INFO [train.py:715] (7/8) Epoch 1, batch 28400, loss[loss=0.1918, simple_loss=0.2392, pruned_loss=0.07222, over 4834.00 frames.], tot_loss[loss=0.1802, simple_loss=0.2447, pruned_loss=0.05784, over 972300.00 frames.], batch size: 15, lr: 8.30e-04 +2022-05-04 01:46:23,128 INFO [train.py:715] (7/8) Epoch 1, batch 28450, loss[loss=0.2386, simple_loss=0.3004, pruned_loss=0.08838, over 4850.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2447, pruned_loss=0.05756, over 972361.60 frames.], batch size: 20, lr: 8.29e-04 +2022-05-04 01:47:02,717 INFO [train.py:715] (7/8) Epoch 1, batch 28500, loss[loss=0.1409, simple_loss=0.21, pruned_loss=0.03586, over 4772.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2453, pruned_loss=0.05799, over 973135.46 frames.], batch size: 18, lr: 8.29e-04 +2022-05-04 01:47:41,719 INFO [train.py:715] (7/8) Epoch 1, batch 28550, loss[loss=0.1788, simple_loss=0.2385, pruned_loss=0.05956, over 4874.00 frames.], tot_loss[loss=0.1801, simple_loss=0.2449, pruned_loss=0.05768, over 972298.84 frames.], batch size: 32, lr: 8.29e-04 +2022-05-04 01:48:22,006 INFO [train.py:715] (7/8) Epoch 1, batch 28600, loss[loss=0.1396, simple_loss=0.2157, pruned_loss=0.03175, over 4940.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2452, pruned_loss=0.05772, over 972907.92 frames.], batch size: 29, lr: 8.28e-04 +2022-05-04 01:49:01,951 INFO [train.py:715] (7/8) Epoch 1, batch 28650, loss[loss=0.1779, simple_loss=0.2409, pruned_loss=0.05743, over 4932.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2452, pruned_loss=0.0577, over 972984.58 frames.], batch size: 23, lr: 8.28e-04 +2022-05-04 01:49:41,100 INFO [train.py:715] (7/8) Epoch 1, batch 28700, loss[loss=0.1561, simple_loss=0.234, pruned_loss=0.03912, over 4936.00 frames.], tot_loss[loss=0.1805, simple_loss=0.2452, pruned_loss=0.05786, over 973021.03 frames.], batch size: 29, lr: 8.28e-04 +2022-05-04 01:50:20,244 INFO [train.py:715] (7/8) Epoch 1, batch 28750, loss[loss=0.1635, simple_loss=0.2271, pruned_loss=0.04992, over 4862.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2445, pruned_loss=0.05749, over 973395.27 frames.], batch size: 20, lr: 8.27e-04 +2022-05-04 01:51:00,839 INFO [train.py:715] (7/8) Epoch 1, batch 28800, loss[loss=0.1505, simple_loss=0.2198, pruned_loss=0.04056, over 4944.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2438, pruned_loss=0.05715, over 973830.18 frames.], batch size: 21, lr: 8.27e-04 +2022-05-04 01:51:40,147 INFO [train.py:715] (7/8) Epoch 1, batch 28850, loss[loss=0.1594, simple_loss=0.232, pruned_loss=0.04339, over 4777.00 frames.], tot_loss[loss=0.1801, simple_loss=0.2446, pruned_loss=0.05781, over 973640.84 frames.], batch size: 18, lr: 8.27e-04 +2022-05-04 01:52:19,910 INFO [train.py:715] (7/8) Epoch 1, batch 28900, loss[loss=0.1483, simple_loss=0.2147, pruned_loss=0.04099, over 4907.00 frames.], tot_loss[loss=0.1794, simple_loss=0.244, pruned_loss=0.05745, over 973457.90 frames.], batch size: 17, lr: 8.27e-04 +2022-05-04 01:53:00,603 INFO [train.py:715] (7/8) Epoch 1, batch 28950, loss[loss=0.1785, simple_loss=0.2519, pruned_loss=0.05256, over 4789.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2437, pruned_loss=0.0575, over 973353.29 frames.], batch size: 21, lr: 8.26e-04 +2022-05-04 01:53:40,740 INFO [train.py:715] (7/8) Epoch 1, batch 29000, loss[loss=0.2083, simple_loss=0.2759, pruned_loss=0.07037, over 4937.00 frames.], tot_loss[loss=0.1801, simple_loss=0.2442, pruned_loss=0.05798, over 972415.10 frames.], batch size: 29, lr: 8.26e-04 +2022-05-04 01:54:19,716 INFO [train.py:715] (7/8) Epoch 1, batch 29050, loss[loss=0.1629, simple_loss=0.2352, pruned_loss=0.04525, over 4814.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2438, pruned_loss=0.05768, over 972323.70 frames.], batch size: 27, lr: 8.26e-04 +2022-05-04 01:54:59,588 INFO [train.py:715] (7/8) Epoch 1, batch 29100, loss[loss=0.19, simple_loss=0.244, pruned_loss=0.06802, over 4866.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2434, pruned_loss=0.05741, over 972079.47 frames.], batch size: 30, lr: 8.25e-04 +2022-05-04 01:55:40,269 INFO [train.py:715] (7/8) Epoch 1, batch 29150, loss[loss=0.2184, simple_loss=0.2751, pruned_loss=0.08085, over 4891.00 frames.], tot_loss[loss=0.1802, simple_loss=0.2442, pruned_loss=0.05815, over 972810.68 frames.], batch size: 39, lr: 8.25e-04 +2022-05-04 01:56:22,368 INFO [train.py:715] (7/8) Epoch 1, batch 29200, loss[loss=0.1431, simple_loss=0.2073, pruned_loss=0.03945, over 4987.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2437, pruned_loss=0.05758, over 972798.12 frames.], batch size: 14, lr: 8.25e-04 +2022-05-04 01:57:01,398 INFO [train.py:715] (7/8) Epoch 1, batch 29250, loss[loss=0.1755, simple_loss=0.2406, pruned_loss=0.05522, over 4847.00 frames.], tot_loss[loss=0.1802, simple_loss=0.2447, pruned_loss=0.0579, over 973701.14 frames.], batch size: 15, lr: 8.24e-04 +2022-05-04 01:57:41,947 INFO [train.py:715] (7/8) Epoch 1, batch 29300, loss[loss=0.1317, simple_loss=0.2065, pruned_loss=0.02842, over 4933.00 frames.], tot_loss[loss=0.1816, simple_loss=0.2463, pruned_loss=0.05846, over 973447.55 frames.], batch size: 18, lr: 8.24e-04 +2022-05-04 01:58:22,151 INFO [train.py:715] (7/8) Epoch 1, batch 29350, loss[loss=0.1826, simple_loss=0.241, pruned_loss=0.06213, over 4826.00 frames.], tot_loss[loss=0.1817, simple_loss=0.2463, pruned_loss=0.05856, over 973421.91 frames.], batch size: 15, lr: 8.24e-04 +2022-05-04 01:59:00,693 INFO [train.py:715] (7/8) Epoch 1, batch 29400, loss[loss=0.1834, simple_loss=0.2424, pruned_loss=0.06218, over 4778.00 frames.], tot_loss[loss=0.1817, simple_loss=0.2462, pruned_loss=0.05858, over 972544.16 frames.], batch size: 19, lr: 8.23e-04 +2022-05-04 01:59:40,304 INFO [train.py:715] (7/8) Epoch 1, batch 29450, loss[loss=0.2043, simple_loss=0.2694, pruned_loss=0.06961, over 4754.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2454, pruned_loss=0.05818, over 973328.39 frames.], batch size: 19, lr: 8.23e-04 +2022-05-04 02:00:20,006 INFO [train.py:715] (7/8) Epoch 1, batch 29500, loss[loss=0.1474, simple_loss=0.2107, pruned_loss=0.04201, over 4895.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2444, pruned_loss=0.05772, over 973160.02 frames.], batch size: 17, lr: 8.23e-04 +2022-05-04 02:00:59,407 INFO [train.py:715] (7/8) Epoch 1, batch 29550, loss[loss=0.2137, simple_loss=0.2599, pruned_loss=0.08368, over 4872.00 frames.], tot_loss[loss=0.1807, simple_loss=0.2448, pruned_loss=0.05825, over 972648.10 frames.], batch size: 32, lr: 8.22e-04 +2022-05-04 02:01:37,995 INFO [train.py:715] (7/8) Epoch 1, batch 29600, loss[loss=0.1836, simple_loss=0.2543, pruned_loss=0.05649, over 4891.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2443, pruned_loss=0.05771, over 971745.01 frames.], batch size: 19, lr: 8.22e-04 +2022-05-04 02:02:18,242 INFO [train.py:715] (7/8) Epoch 1, batch 29650, loss[loss=0.183, simple_loss=0.2437, pruned_loss=0.06113, over 4777.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2442, pruned_loss=0.05761, over 970913.26 frames.], batch size: 18, lr: 8.22e-04 +2022-05-04 02:02:58,335 INFO [train.py:715] (7/8) Epoch 1, batch 29700, loss[loss=0.1942, simple_loss=0.2565, pruned_loss=0.06596, over 4960.00 frames.], tot_loss[loss=0.1798, simple_loss=0.2441, pruned_loss=0.05778, over 972183.84 frames.], batch size: 24, lr: 8.21e-04 +2022-05-04 02:03:36,332 INFO [train.py:715] (7/8) Epoch 1, batch 29750, loss[loss=0.1976, simple_loss=0.2669, pruned_loss=0.06412, over 4812.00 frames.], tot_loss[loss=0.1806, simple_loss=0.245, pruned_loss=0.05814, over 972189.47 frames.], batch size: 21, lr: 8.21e-04 +2022-05-04 02:04:15,637 INFO [train.py:715] (7/8) Epoch 1, batch 29800, loss[loss=0.1833, simple_loss=0.2525, pruned_loss=0.05706, over 4924.00 frames.], tot_loss[loss=0.1822, simple_loss=0.2469, pruned_loss=0.0588, over 972245.93 frames.], batch size: 19, lr: 8.21e-04 +2022-05-04 02:04:55,053 INFO [train.py:715] (7/8) Epoch 1, batch 29850, loss[loss=0.1845, simple_loss=0.2522, pruned_loss=0.05842, over 4955.00 frames.], tot_loss[loss=0.1813, simple_loss=0.2461, pruned_loss=0.05821, over 972334.23 frames.], batch size: 24, lr: 8.20e-04 +2022-05-04 02:05:34,426 INFO [train.py:715] (7/8) Epoch 1, batch 29900, loss[loss=0.1618, simple_loss=0.2308, pruned_loss=0.04641, over 4825.00 frames.], tot_loss[loss=0.1804, simple_loss=0.2456, pruned_loss=0.05757, over 972882.13 frames.], batch size: 25, lr: 8.20e-04 +2022-05-04 02:06:12,930 INFO [train.py:715] (7/8) Epoch 1, batch 29950, loss[loss=0.1821, simple_loss=0.2475, pruned_loss=0.05832, over 4912.00 frames.], tot_loss[loss=0.1804, simple_loss=0.2456, pruned_loss=0.05765, over 973501.22 frames.], batch size: 29, lr: 8.20e-04 +2022-05-04 02:06:52,739 INFO [train.py:715] (7/8) Epoch 1, batch 30000, loss[loss=0.189, simple_loss=0.2504, pruned_loss=0.06378, over 4791.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2447, pruned_loss=0.05697, over 973643.63 frames.], batch size: 14, lr: 8.20e-04 +2022-05-04 02:06:52,740 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 02:07:09,693 INFO [train.py:742] (7/8) Epoch 1, validation: loss=0.1207, simple_loss=0.2076, pruned_loss=0.01687, over 914524.00 frames. +2022-05-04 02:07:50,183 INFO [train.py:715] (7/8) Epoch 1, batch 30050, loss[loss=0.1503, simple_loss=0.2212, pruned_loss=0.03969, over 4924.00 frames.], tot_loss[loss=0.1785, simple_loss=0.2442, pruned_loss=0.05642, over 973716.69 frames.], batch size: 29, lr: 8.19e-04 +2022-05-04 02:08:29,662 INFO [train.py:715] (7/8) Epoch 1, batch 30100, loss[loss=0.1858, simple_loss=0.2455, pruned_loss=0.06311, over 4977.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2446, pruned_loss=0.05705, over 973470.63 frames.], batch size: 28, lr: 8.19e-04 +2022-05-04 02:09:09,063 INFO [train.py:715] (7/8) Epoch 1, batch 30150, loss[loss=0.1664, simple_loss=0.2344, pruned_loss=0.04926, over 4704.00 frames.], tot_loss[loss=0.1798, simple_loss=0.2444, pruned_loss=0.05757, over 973279.07 frames.], batch size: 15, lr: 8.19e-04 +2022-05-04 02:09:48,373 INFO [train.py:715] (7/8) Epoch 1, batch 30200, loss[loss=0.2054, simple_loss=0.2794, pruned_loss=0.06566, over 4938.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2458, pruned_loss=0.05834, over 973274.05 frames.], batch size: 23, lr: 8.18e-04 +2022-05-04 02:10:28,821 INFO [train.py:715] (7/8) Epoch 1, batch 30250, loss[loss=0.2063, simple_loss=0.2656, pruned_loss=0.07353, over 4885.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2457, pruned_loss=0.0582, over 973742.96 frames.], batch size: 22, lr: 8.18e-04 +2022-05-04 02:11:08,801 INFO [train.py:715] (7/8) Epoch 1, batch 30300, loss[loss=0.2001, simple_loss=0.2625, pruned_loss=0.06882, over 4772.00 frames.], tot_loss[loss=0.1807, simple_loss=0.2455, pruned_loss=0.05794, over 973531.31 frames.], batch size: 18, lr: 8.18e-04 +2022-05-04 02:11:47,708 INFO [train.py:715] (7/8) Epoch 1, batch 30350, loss[loss=0.2109, simple_loss=0.2725, pruned_loss=0.0746, over 4819.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2449, pruned_loss=0.05785, over 972984.07 frames.], batch size: 15, lr: 8.17e-04 +2022-05-04 02:12:27,777 INFO [train.py:715] (7/8) Epoch 1, batch 30400, loss[loss=0.1926, simple_loss=0.2481, pruned_loss=0.06854, over 4936.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2445, pruned_loss=0.05713, over 973002.88 frames.], batch size: 23, lr: 8.17e-04 +2022-05-04 02:13:07,268 INFO [train.py:715] (7/8) Epoch 1, batch 30450, loss[loss=0.1906, simple_loss=0.2564, pruned_loss=0.06239, over 4875.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2454, pruned_loss=0.05762, over 973043.67 frames.], batch size: 16, lr: 8.17e-04 +2022-05-04 02:13:46,442 INFO [train.py:715] (7/8) Epoch 1, batch 30500, loss[loss=0.1769, simple_loss=0.2356, pruned_loss=0.05916, over 4837.00 frames.], tot_loss[loss=0.181, simple_loss=0.2458, pruned_loss=0.05812, over 973004.84 frames.], batch size: 15, lr: 8.16e-04 +2022-05-04 02:14:25,543 INFO [train.py:715] (7/8) Epoch 1, batch 30550, loss[loss=0.1535, simple_loss=0.2288, pruned_loss=0.03909, over 4829.00 frames.], tot_loss[loss=0.18, simple_loss=0.2448, pruned_loss=0.05763, over 972714.37 frames.], batch size: 15, lr: 8.16e-04 +2022-05-04 02:15:05,342 INFO [train.py:715] (7/8) Epoch 1, batch 30600, loss[loss=0.1558, simple_loss=0.222, pruned_loss=0.04476, over 4906.00 frames.], tot_loss[loss=0.1802, simple_loss=0.2449, pruned_loss=0.05774, over 973798.32 frames.], batch size: 17, lr: 8.16e-04 +2022-05-04 02:15:44,807 INFO [train.py:715] (7/8) Epoch 1, batch 30650, loss[loss=0.2005, simple_loss=0.2732, pruned_loss=0.06393, over 4824.00 frames.], tot_loss[loss=0.1802, simple_loss=0.2447, pruned_loss=0.05782, over 972915.67 frames.], batch size: 26, lr: 8.15e-04 +2022-05-04 02:16:23,389 INFO [train.py:715] (7/8) Epoch 1, batch 30700, loss[loss=0.1549, simple_loss=0.2175, pruned_loss=0.04612, over 4740.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2441, pruned_loss=0.05722, over 972095.05 frames.], batch size: 16, lr: 8.15e-04 +2022-05-04 02:17:03,639 INFO [train.py:715] (7/8) Epoch 1, batch 30750, loss[loss=0.1621, simple_loss=0.2335, pruned_loss=0.04539, over 4933.00 frames.], tot_loss[loss=0.1784, simple_loss=0.2435, pruned_loss=0.05662, over 972192.07 frames.], batch size: 23, lr: 8.15e-04 +2022-05-04 02:17:43,209 INFO [train.py:715] (7/8) Epoch 1, batch 30800, loss[loss=0.1587, simple_loss=0.237, pruned_loss=0.04024, over 4889.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2431, pruned_loss=0.0567, over 972351.01 frames.], batch size: 22, lr: 8.15e-04 +2022-05-04 02:18:22,128 INFO [train.py:715] (7/8) Epoch 1, batch 30850, loss[loss=0.1519, simple_loss=0.2265, pruned_loss=0.03866, over 4904.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2436, pruned_loss=0.05727, over 972465.48 frames.], batch size: 17, lr: 8.14e-04 +2022-05-04 02:19:01,717 INFO [train.py:715] (7/8) Epoch 1, batch 30900, loss[loss=0.1727, simple_loss=0.2399, pruned_loss=0.0528, over 4851.00 frames.], tot_loss[loss=0.1784, simple_loss=0.2431, pruned_loss=0.05681, over 972268.20 frames.], batch size: 20, lr: 8.14e-04 +2022-05-04 02:19:41,346 INFO [train.py:715] (7/8) Epoch 1, batch 30950, loss[loss=0.2245, simple_loss=0.2791, pruned_loss=0.08495, over 4887.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2428, pruned_loss=0.05673, over 971780.75 frames.], batch size: 39, lr: 8.14e-04 +2022-05-04 02:20:20,854 INFO [train.py:715] (7/8) Epoch 1, batch 31000, loss[loss=0.1852, simple_loss=0.2445, pruned_loss=0.06296, over 4687.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2442, pruned_loss=0.05785, over 971515.53 frames.], batch size: 15, lr: 8.13e-04 +2022-05-04 02:21:00,358 INFO [train.py:715] (7/8) Epoch 1, batch 31050, loss[loss=0.1868, simple_loss=0.2465, pruned_loss=0.06351, over 4757.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2431, pruned_loss=0.05704, over 970185.13 frames.], batch size: 16, lr: 8.13e-04 +2022-05-04 02:21:40,836 INFO [train.py:715] (7/8) Epoch 1, batch 31100, loss[loss=0.17, simple_loss=0.2297, pruned_loss=0.05517, over 4705.00 frames.], tot_loss[loss=0.1789, simple_loss=0.2434, pruned_loss=0.05721, over 971132.27 frames.], batch size: 15, lr: 8.13e-04 +2022-05-04 02:22:20,584 INFO [train.py:715] (7/8) Epoch 1, batch 31150, loss[loss=0.1769, simple_loss=0.2357, pruned_loss=0.05911, over 4772.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2433, pruned_loss=0.05695, over 971253.21 frames.], batch size: 18, lr: 8.12e-04 +2022-05-04 02:22:59,628 INFO [train.py:715] (7/8) Epoch 1, batch 31200, loss[loss=0.171, simple_loss=0.2358, pruned_loss=0.05311, over 4837.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2438, pruned_loss=0.05738, over 971693.04 frames.], batch size: 30, lr: 8.12e-04 +2022-05-04 02:23:39,864 INFO [train.py:715] (7/8) Epoch 1, batch 31250, loss[loss=0.1995, simple_loss=0.252, pruned_loss=0.07349, over 4881.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2442, pruned_loss=0.05747, over 973096.50 frames.], batch size: 22, lr: 8.12e-04 +2022-05-04 02:24:19,628 INFO [train.py:715] (7/8) Epoch 1, batch 31300, loss[loss=0.1758, simple_loss=0.2394, pruned_loss=0.0561, over 4801.00 frames.], tot_loss[loss=0.1801, simple_loss=0.2447, pruned_loss=0.05776, over 972562.71 frames.], batch size: 13, lr: 8.11e-04 +2022-05-04 02:24:59,063 INFO [train.py:715] (7/8) Epoch 1, batch 31350, loss[loss=0.1652, simple_loss=0.2284, pruned_loss=0.05096, over 4801.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2442, pruned_loss=0.05726, over 971815.60 frames.], batch size: 24, lr: 8.11e-04 +2022-05-04 02:25:38,862 INFO [train.py:715] (7/8) Epoch 1, batch 31400, loss[loss=0.1953, simple_loss=0.2469, pruned_loss=0.07188, over 4960.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2444, pruned_loss=0.05717, over 972148.16 frames.], batch size: 15, lr: 8.11e-04 +2022-05-04 02:26:18,869 INFO [train.py:715] (7/8) Epoch 1, batch 31450, loss[loss=0.1911, simple_loss=0.2557, pruned_loss=0.06321, over 4980.00 frames.], tot_loss[loss=0.179, simple_loss=0.2443, pruned_loss=0.05682, over 973283.64 frames.], batch size: 14, lr: 8.11e-04 +2022-05-04 02:26:58,731 INFO [train.py:715] (7/8) Epoch 1, batch 31500, loss[loss=0.1505, simple_loss=0.2238, pruned_loss=0.03856, over 4776.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2437, pruned_loss=0.0565, over 972544.56 frames.], batch size: 14, lr: 8.10e-04 +2022-05-04 02:27:37,228 INFO [train.py:715] (7/8) Epoch 1, batch 31550, loss[loss=0.1538, simple_loss=0.2273, pruned_loss=0.04017, over 4907.00 frames.], tot_loss[loss=0.1782, simple_loss=0.2433, pruned_loss=0.05656, over 972607.05 frames.], batch size: 17, lr: 8.10e-04 +2022-05-04 02:28:17,418 INFO [train.py:715] (7/8) Epoch 1, batch 31600, loss[loss=0.1673, simple_loss=0.2303, pruned_loss=0.05217, over 4847.00 frames.], tot_loss[loss=0.1782, simple_loss=0.2431, pruned_loss=0.05666, over 973188.76 frames.], batch size: 30, lr: 8.10e-04 +2022-05-04 02:28:57,091 INFO [train.py:715] (7/8) Epoch 1, batch 31650, loss[loss=0.1886, simple_loss=0.2442, pruned_loss=0.06651, over 4742.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2431, pruned_loss=0.05674, over 973892.60 frames.], batch size: 19, lr: 8.09e-04 +2022-05-04 02:29:37,004 INFO [train.py:715] (7/8) Epoch 1, batch 31700, loss[loss=0.2256, simple_loss=0.2913, pruned_loss=0.0799, over 4808.00 frames.], tot_loss[loss=0.1794, simple_loss=0.2443, pruned_loss=0.05724, over 973231.43 frames.], batch size: 25, lr: 8.09e-04 +2022-05-04 02:30:16,366 INFO [train.py:715] (7/8) Epoch 1, batch 31750, loss[loss=0.1901, simple_loss=0.2506, pruned_loss=0.06482, over 4863.00 frames.], tot_loss[loss=0.1801, simple_loss=0.2448, pruned_loss=0.05767, over 973000.71 frames.], batch size: 30, lr: 8.09e-04 +2022-05-04 02:30:56,483 INFO [train.py:715] (7/8) Epoch 1, batch 31800, loss[loss=0.1678, simple_loss=0.2295, pruned_loss=0.05309, over 4916.00 frames.], tot_loss[loss=0.1787, simple_loss=0.2436, pruned_loss=0.05687, over 972567.15 frames.], batch size: 18, lr: 8.08e-04 +2022-05-04 02:31:36,276 INFO [train.py:715] (7/8) Epoch 1, batch 31850, loss[loss=0.1707, simple_loss=0.2442, pruned_loss=0.04857, over 4822.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2441, pruned_loss=0.0575, over 972847.54 frames.], batch size: 26, lr: 8.08e-04 +2022-05-04 02:32:15,745 INFO [train.py:715] (7/8) Epoch 1, batch 31900, loss[loss=0.1676, simple_loss=0.2344, pruned_loss=0.0504, over 4782.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2448, pruned_loss=0.05821, over 973466.59 frames.], batch size: 17, lr: 8.08e-04 +2022-05-04 02:32:55,109 INFO [train.py:715] (7/8) Epoch 1, batch 31950, loss[loss=0.16, simple_loss=0.2232, pruned_loss=0.0484, over 4924.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2453, pruned_loss=0.0584, over 973296.84 frames.], batch size: 18, lr: 8.08e-04 +2022-05-04 02:33:34,640 INFO [train.py:715] (7/8) Epoch 1, batch 32000, loss[loss=0.1562, simple_loss=0.2199, pruned_loss=0.04619, over 4841.00 frames.], tot_loss[loss=0.1807, simple_loss=0.2452, pruned_loss=0.0581, over 973594.82 frames.], batch size: 30, lr: 8.07e-04 +2022-05-04 02:34:14,064 INFO [train.py:715] (7/8) Epoch 1, batch 32050, loss[loss=0.1784, simple_loss=0.2473, pruned_loss=0.05474, over 4764.00 frames.], tot_loss[loss=0.1801, simple_loss=0.2447, pruned_loss=0.05771, over 973644.41 frames.], batch size: 19, lr: 8.07e-04 +2022-05-04 02:34:53,320 INFO [train.py:715] (7/8) Epoch 1, batch 32100, loss[loss=0.1385, simple_loss=0.2014, pruned_loss=0.03779, over 4847.00 frames.], tot_loss[loss=0.1804, simple_loss=0.2451, pruned_loss=0.05785, over 973024.53 frames.], batch size: 13, lr: 8.07e-04 +2022-05-04 02:35:32,942 INFO [train.py:715] (7/8) Epoch 1, batch 32150, loss[loss=0.1482, simple_loss=0.2188, pruned_loss=0.03884, over 4828.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2441, pruned_loss=0.05728, over 973387.72 frames.], batch size: 15, lr: 8.06e-04 +2022-05-04 02:36:12,939 INFO [train.py:715] (7/8) Epoch 1, batch 32200, loss[loss=0.1515, simple_loss=0.2139, pruned_loss=0.04454, over 4773.00 frames.], tot_loss[loss=0.1781, simple_loss=0.243, pruned_loss=0.05664, over 972957.10 frames.], batch size: 19, lr: 8.06e-04 +2022-05-04 02:36:51,841 INFO [train.py:715] (7/8) Epoch 1, batch 32250, loss[loss=0.2159, simple_loss=0.2688, pruned_loss=0.08153, over 4872.00 frames.], tot_loss[loss=0.1782, simple_loss=0.2427, pruned_loss=0.05681, over 972538.70 frames.], batch size: 16, lr: 8.06e-04 +2022-05-04 02:37:31,250 INFO [train.py:715] (7/8) Epoch 1, batch 32300, loss[loss=0.1762, simple_loss=0.2347, pruned_loss=0.05887, over 4844.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2442, pruned_loss=0.05757, over 972849.56 frames.], batch size: 15, lr: 8.05e-04 +2022-05-04 02:38:10,689 INFO [train.py:715] (7/8) Epoch 1, batch 32350, loss[loss=0.1796, simple_loss=0.2411, pruned_loss=0.05905, over 4979.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2439, pruned_loss=0.05714, over 973060.61 frames.], batch size: 15, lr: 8.05e-04 +2022-05-04 02:38:50,283 INFO [train.py:715] (7/8) Epoch 1, batch 32400, loss[loss=0.1509, simple_loss=0.2201, pruned_loss=0.0408, over 4654.00 frames.], tot_loss[loss=0.1788, simple_loss=0.2435, pruned_loss=0.05705, over 972924.69 frames.], batch size: 13, lr: 8.05e-04 +2022-05-04 02:39:29,217 INFO [train.py:715] (7/8) Epoch 1, batch 32450, loss[loss=0.1566, simple_loss=0.2278, pruned_loss=0.04269, over 4856.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2438, pruned_loss=0.05743, over 972814.98 frames.], batch size: 30, lr: 8.05e-04 +2022-05-04 02:40:08,862 INFO [train.py:715] (7/8) Epoch 1, batch 32500, loss[loss=0.1762, simple_loss=0.2417, pruned_loss=0.05534, over 4699.00 frames.], tot_loss[loss=0.1772, simple_loss=0.2421, pruned_loss=0.05616, over 972921.35 frames.], batch size: 15, lr: 8.04e-04 +2022-05-04 02:40:48,377 INFO [train.py:715] (7/8) Epoch 1, batch 32550, loss[loss=0.2141, simple_loss=0.2679, pruned_loss=0.08013, over 4899.00 frames.], tot_loss[loss=0.177, simple_loss=0.242, pruned_loss=0.05594, over 972944.37 frames.], batch size: 17, lr: 8.04e-04 +2022-05-04 02:41:27,300 INFO [train.py:715] (7/8) Epoch 1, batch 32600, loss[loss=0.1569, simple_loss=0.2114, pruned_loss=0.05118, over 4784.00 frames.], tot_loss[loss=0.1784, simple_loss=0.2428, pruned_loss=0.05695, over 972277.03 frames.], batch size: 18, lr: 8.04e-04 +2022-05-04 02:42:06,692 INFO [train.py:715] (7/8) Epoch 1, batch 32650, loss[loss=0.2198, simple_loss=0.2718, pruned_loss=0.08385, over 4799.00 frames.], tot_loss[loss=0.1787, simple_loss=0.2433, pruned_loss=0.05702, over 972342.60 frames.], batch size: 24, lr: 8.03e-04 +2022-05-04 02:42:46,234 INFO [train.py:715] (7/8) Epoch 1, batch 32700, loss[loss=0.1643, simple_loss=0.2377, pruned_loss=0.0455, over 4794.00 frames.], tot_loss[loss=0.1788, simple_loss=0.2436, pruned_loss=0.057, over 972181.92 frames.], batch size: 24, lr: 8.03e-04 +2022-05-04 02:43:25,965 INFO [train.py:715] (7/8) Epoch 1, batch 32750, loss[loss=0.1747, simple_loss=0.2422, pruned_loss=0.05358, over 4914.00 frames.], tot_loss[loss=0.1787, simple_loss=0.2433, pruned_loss=0.05703, over 972525.88 frames.], batch size: 19, lr: 8.03e-04 +2022-05-04 02:44:05,920 INFO [train.py:715] (7/8) Epoch 1, batch 32800, loss[loss=0.1722, simple_loss=0.2371, pruned_loss=0.05369, over 4967.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2433, pruned_loss=0.05695, over 972106.84 frames.], batch size: 24, lr: 8.02e-04 +2022-05-04 02:44:45,558 INFO [train.py:715] (7/8) Epoch 1, batch 32850, loss[loss=0.1632, simple_loss=0.222, pruned_loss=0.05217, over 4791.00 frames.], tot_loss[loss=0.1788, simple_loss=0.2432, pruned_loss=0.05719, over 972671.86 frames.], batch size: 24, lr: 8.02e-04 +2022-05-04 02:45:24,930 INFO [train.py:715] (7/8) Epoch 1, batch 32900, loss[loss=0.1322, simple_loss=0.1987, pruned_loss=0.03285, over 4643.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2428, pruned_loss=0.05673, over 973058.81 frames.], batch size: 13, lr: 8.02e-04 +2022-05-04 02:46:04,181 INFO [train.py:715] (7/8) Epoch 1, batch 32950, loss[loss=0.1627, simple_loss=0.2263, pruned_loss=0.04952, over 4827.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2434, pruned_loss=0.05691, over 972872.77 frames.], batch size: 30, lr: 8.02e-04 +2022-05-04 02:46:43,645 INFO [train.py:715] (7/8) Epoch 1, batch 33000, loss[loss=0.1842, simple_loss=0.2478, pruned_loss=0.06026, over 4880.00 frames.], tot_loss[loss=0.1789, simple_loss=0.2436, pruned_loss=0.05711, over 973405.38 frames.], batch size: 19, lr: 8.01e-04 +2022-05-04 02:46:43,646 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 02:46:52,425 INFO [train.py:742] (7/8) Epoch 1, validation: loss=0.1208, simple_loss=0.2074, pruned_loss=0.01714, over 914524.00 frames. +2022-05-04 02:47:32,106 INFO [train.py:715] (7/8) Epoch 1, batch 33050, loss[loss=0.1812, simple_loss=0.2578, pruned_loss=0.05233, over 4783.00 frames.], tot_loss[loss=0.1776, simple_loss=0.2424, pruned_loss=0.05645, over 973544.20 frames.], batch size: 18, lr: 8.01e-04 +2022-05-04 02:48:12,133 INFO [train.py:715] (7/8) Epoch 1, batch 33100, loss[loss=0.2046, simple_loss=0.2587, pruned_loss=0.07522, over 4760.00 frames.], tot_loss[loss=0.1779, simple_loss=0.2424, pruned_loss=0.05672, over 974252.86 frames.], batch size: 19, lr: 8.01e-04 +2022-05-04 02:48:52,002 INFO [train.py:715] (7/8) Epoch 1, batch 33150, loss[loss=0.184, simple_loss=0.2568, pruned_loss=0.05561, over 4839.00 frames.], tot_loss[loss=0.1785, simple_loss=0.2432, pruned_loss=0.05694, over 973361.95 frames.], batch size: 15, lr: 8.00e-04 +2022-05-04 02:49:31,138 INFO [train.py:715] (7/8) Epoch 1, batch 33200, loss[loss=0.2047, simple_loss=0.2668, pruned_loss=0.07133, over 4911.00 frames.], tot_loss[loss=0.1787, simple_loss=0.2433, pruned_loss=0.05704, over 973188.74 frames.], batch size: 19, lr: 8.00e-04 +2022-05-04 02:50:11,559 INFO [train.py:715] (7/8) Epoch 1, batch 33250, loss[loss=0.1727, simple_loss=0.2284, pruned_loss=0.05855, over 4759.00 frames.], tot_loss[loss=0.1791, simple_loss=0.244, pruned_loss=0.05706, over 973133.17 frames.], batch size: 19, lr: 8.00e-04 +2022-05-04 02:50:51,592 INFO [train.py:715] (7/8) Epoch 1, batch 33300, loss[loss=0.2162, simple_loss=0.2839, pruned_loss=0.07425, over 4806.00 frames.], tot_loss[loss=0.1788, simple_loss=0.2438, pruned_loss=0.05691, over 974149.59 frames.], batch size: 21, lr: 8.00e-04 +2022-05-04 02:51:31,059 INFO [train.py:715] (7/8) Epoch 1, batch 33350, loss[loss=0.1517, simple_loss=0.2183, pruned_loss=0.04254, over 4841.00 frames.], tot_loss[loss=0.1782, simple_loss=0.2432, pruned_loss=0.0566, over 973549.66 frames.], batch size: 13, lr: 7.99e-04 +2022-05-04 02:52:11,436 INFO [train.py:715] (7/8) Epoch 1, batch 33400, loss[loss=0.1698, simple_loss=0.2393, pruned_loss=0.05012, over 4940.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2433, pruned_loss=0.05696, over 973002.82 frames.], batch size: 23, lr: 7.99e-04 +2022-05-04 02:52:51,300 INFO [train.py:715] (7/8) Epoch 1, batch 33450, loss[loss=0.1913, simple_loss=0.2638, pruned_loss=0.05939, over 4909.00 frames.], tot_loss[loss=0.1788, simple_loss=0.2436, pruned_loss=0.05704, over 972991.00 frames.], batch size: 17, lr: 7.99e-04 +2022-05-04 02:53:30,411 INFO [train.py:715] (7/8) Epoch 1, batch 33500, loss[loss=0.1698, simple_loss=0.2299, pruned_loss=0.05482, over 4974.00 frames.], tot_loss[loss=0.1785, simple_loss=0.2435, pruned_loss=0.05675, over 972723.26 frames.], batch size: 14, lr: 7.98e-04 +2022-05-04 02:54:10,340 INFO [train.py:715] (7/8) Epoch 1, batch 33550, loss[loss=0.1762, simple_loss=0.2407, pruned_loss=0.05589, over 4966.00 frames.], tot_loss[loss=0.1782, simple_loss=0.2428, pruned_loss=0.05677, over 972317.07 frames.], batch size: 35, lr: 7.98e-04 +2022-05-04 02:54:50,185 INFO [train.py:715] (7/8) Epoch 1, batch 33600, loss[loss=0.1622, simple_loss=0.2243, pruned_loss=0.05011, over 4820.00 frames.], tot_loss[loss=0.1787, simple_loss=0.2436, pruned_loss=0.05696, over 972455.62 frames.], batch size: 26, lr: 7.98e-04 +2022-05-04 02:55:29,604 INFO [train.py:715] (7/8) Epoch 1, batch 33650, loss[loss=0.1763, simple_loss=0.2426, pruned_loss=0.05505, over 4956.00 frames.], tot_loss[loss=0.1789, simple_loss=0.2439, pruned_loss=0.05698, over 972564.25 frames.], batch size: 24, lr: 7.97e-04 +2022-05-04 02:56:08,653 INFO [train.py:715] (7/8) Epoch 1, batch 33700, loss[loss=0.1798, simple_loss=0.2512, pruned_loss=0.05415, over 4958.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2433, pruned_loss=0.05649, over 972915.58 frames.], batch size: 29, lr: 7.97e-04 +2022-05-04 02:56:47,810 INFO [train.py:715] (7/8) Epoch 1, batch 33750, loss[loss=0.1622, simple_loss=0.237, pruned_loss=0.04372, over 4959.00 frames.], tot_loss[loss=0.1767, simple_loss=0.2421, pruned_loss=0.05568, over 972516.56 frames.], batch size: 24, lr: 7.97e-04 +2022-05-04 02:57:27,456 INFO [train.py:715] (7/8) Epoch 1, batch 33800, loss[loss=0.1686, simple_loss=0.2487, pruned_loss=0.04422, over 4980.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2424, pruned_loss=0.0559, over 972843.31 frames.], batch size: 15, lr: 7.97e-04 +2022-05-04 02:58:06,286 INFO [train.py:715] (7/8) Epoch 1, batch 33850, loss[loss=0.1453, simple_loss=0.2215, pruned_loss=0.03454, over 4903.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2441, pruned_loss=0.05705, over 972532.16 frames.], batch size: 18, lr: 7.96e-04 +2022-05-04 02:58:45,805 INFO [train.py:715] (7/8) Epoch 1, batch 33900, loss[loss=0.1545, simple_loss=0.2138, pruned_loss=0.04757, over 4736.00 frames.], tot_loss[loss=0.178, simple_loss=0.2435, pruned_loss=0.05628, over 972398.29 frames.], batch size: 16, lr: 7.96e-04 +2022-05-04 02:59:25,370 INFO [train.py:715] (7/8) Epoch 1, batch 33950, loss[loss=0.1871, simple_loss=0.2444, pruned_loss=0.0649, over 4845.00 frames.], tot_loss[loss=0.1782, simple_loss=0.2434, pruned_loss=0.05651, over 972179.18 frames.], batch size: 15, lr: 7.96e-04 +2022-05-04 03:00:05,094 INFO [train.py:715] (7/8) Epoch 1, batch 34000, loss[loss=0.1604, simple_loss=0.2313, pruned_loss=0.04472, over 4735.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2433, pruned_loss=0.05669, over 972436.44 frames.], batch size: 16, lr: 7.95e-04 +2022-05-04 03:00:44,415 INFO [train.py:715] (7/8) Epoch 1, batch 34050, loss[loss=0.1632, simple_loss=0.2316, pruned_loss=0.04735, over 4769.00 frames.], tot_loss[loss=0.1779, simple_loss=0.2432, pruned_loss=0.05634, over 973169.56 frames.], batch size: 14, lr: 7.95e-04 +2022-05-04 03:01:23,802 INFO [train.py:715] (7/8) Epoch 1, batch 34100, loss[loss=0.2186, simple_loss=0.2783, pruned_loss=0.0795, over 4965.00 frames.], tot_loss[loss=0.1788, simple_loss=0.2436, pruned_loss=0.05703, over 973293.13 frames.], batch size: 35, lr: 7.95e-04 +2022-05-04 03:02:03,183 INFO [train.py:715] (7/8) Epoch 1, batch 34150, loss[loss=0.1568, simple_loss=0.2275, pruned_loss=0.04309, over 4789.00 frames.], tot_loss[loss=0.1802, simple_loss=0.2447, pruned_loss=0.05791, over 973662.69 frames.], batch size: 14, lr: 7.95e-04 +2022-05-04 03:02:42,209 INFO [train.py:715] (7/8) Epoch 1, batch 34200, loss[loss=0.1488, simple_loss=0.207, pruned_loss=0.04534, over 4968.00 frames.], tot_loss[loss=0.1792, simple_loss=0.2435, pruned_loss=0.05745, over 972184.06 frames.], batch size: 14, lr: 7.94e-04 +2022-05-04 03:03:21,762 INFO [train.py:715] (7/8) Epoch 1, batch 34250, loss[loss=0.2065, simple_loss=0.2677, pruned_loss=0.07268, over 4841.00 frames.], tot_loss[loss=0.1784, simple_loss=0.243, pruned_loss=0.05692, over 972606.41 frames.], batch size: 32, lr: 7.94e-04 +2022-05-04 03:04:01,440 INFO [train.py:715] (7/8) Epoch 1, batch 34300, loss[loss=0.1511, simple_loss=0.225, pruned_loss=0.0386, over 4961.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2432, pruned_loss=0.05698, over 972615.41 frames.], batch size: 15, lr: 7.94e-04 +2022-05-04 03:04:40,850 INFO [train.py:715] (7/8) Epoch 1, batch 34350, loss[loss=0.2109, simple_loss=0.2707, pruned_loss=0.07555, over 4762.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2432, pruned_loss=0.05674, over 972784.41 frames.], batch size: 14, lr: 7.93e-04 +2022-05-04 03:05:19,754 INFO [train.py:715] (7/8) Epoch 1, batch 34400, loss[loss=0.1517, simple_loss=0.2327, pruned_loss=0.03528, over 4798.00 frames.], tot_loss[loss=0.178, simple_loss=0.2427, pruned_loss=0.05666, over 973294.44 frames.], batch size: 21, lr: 7.93e-04 +2022-05-04 03:05:59,257 INFO [train.py:715] (7/8) Epoch 1, batch 34450, loss[loss=0.1776, simple_loss=0.2371, pruned_loss=0.05899, over 4903.00 frames.], tot_loss[loss=0.1776, simple_loss=0.2428, pruned_loss=0.05619, over 973282.83 frames.], batch size: 17, lr: 7.93e-04 +2022-05-04 03:06:38,482 INFO [train.py:715] (7/8) Epoch 1, batch 34500, loss[loss=0.1771, simple_loss=0.2497, pruned_loss=0.05226, over 4949.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2432, pruned_loss=0.05671, over 973589.53 frames.], batch size: 21, lr: 7.93e-04 +2022-05-04 03:07:17,763 INFO [train.py:715] (7/8) Epoch 1, batch 34550, loss[loss=0.2253, simple_loss=0.271, pruned_loss=0.08982, over 4697.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2433, pruned_loss=0.05644, over 973011.31 frames.], batch size: 15, lr: 7.92e-04 +2022-05-04 03:07:57,343 INFO [train.py:715] (7/8) Epoch 1, batch 34600, loss[loss=0.2087, simple_loss=0.2666, pruned_loss=0.07543, over 4872.00 frames.], tot_loss[loss=0.1784, simple_loss=0.2436, pruned_loss=0.05663, over 973274.30 frames.], batch size: 16, lr: 7.92e-04 +2022-05-04 03:08:37,230 INFO [train.py:715] (7/8) Epoch 1, batch 34650, loss[loss=0.1646, simple_loss=0.214, pruned_loss=0.05755, over 4842.00 frames.], tot_loss[loss=0.1778, simple_loss=0.2428, pruned_loss=0.05643, over 973357.23 frames.], batch size: 13, lr: 7.92e-04 +2022-05-04 03:09:17,432 INFO [train.py:715] (7/8) Epoch 1, batch 34700, loss[loss=0.2057, simple_loss=0.2765, pruned_loss=0.06744, over 4953.00 frames.], tot_loss[loss=0.1785, simple_loss=0.2431, pruned_loss=0.057, over 972931.22 frames.], batch size: 24, lr: 7.91e-04 +2022-05-04 03:09:55,742 INFO [train.py:715] (7/8) Epoch 1, batch 34750, loss[loss=0.1779, simple_loss=0.2451, pruned_loss=0.0554, over 4810.00 frames.], tot_loss[loss=0.1798, simple_loss=0.2441, pruned_loss=0.05773, over 973032.48 frames.], batch size: 27, lr: 7.91e-04 +2022-05-04 03:10:32,247 INFO [train.py:715] (7/8) Epoch 1, batch 34800, loss[loss=0.1568, simple_loss=0.2289, pruned_loss=0.04236, over 4932.00 frames.], tot_loss[loss=0.1782, simple_loss=0.243, pruned_loss=0.05675, over 973911.76 frames.], batch size: 21, lr: 7.91e-04 +2022-05-04 03:11:25,708 INFO [train.py:715] (7/8) Epoch 2, batch 0, loss[loss=0.1968, simple_loss=0.2615, pruned_loss=0.06604, over 4814.00 frames.], tot_loss[loss=0.1968, simple_loss=0.2615, pruned_loss=0.06604, over 4814.00 frames.], batch size: 27, lr: 7.59e-04 +2022-05-04 03:12:05,766 INFO [train.py:715] (7/8) Epoch 2, batch 50, loss[loss=0.2189, simple_loss=0.276, pruned_loss=0.08084, over 4931.00 frames.], tot_loss[loss=0.1779, simple_loss=0.2426, pruned_loss=0.05661, over 219356.39 frames.], batch size: 39, lr: 7.59e-04 +2022-05-04 03:12:46,583 INFO [train.py:715] (7/8) Epoch 2, batch 100, loss[loss=0.1832, simple_loss=0.2428, pruned_loss=0.06178, over 4972.00 frames.], tot_loss[loss=0.1775, simple_loss=0.2424, pruned_loss=0.05634, over 386059.04 frames.], batch size: 15, lr: 7.59e-04 +2022-05-04 03:13:27,200 INFO [train.py:715] (7/8) Epoch 2, batch 150, loss[loss=0.2022, simple_loss=0.273, pruned_loss=0.06568, over 4903.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2403, pruned_loss=0.05475, over 515395.59 frames.], batch size: 18, lr: 7.59e-04 +2022-05-04 03:14:07,251 INFO [train.py:715] (7/8) Epoch 2, batch 200, loss[loss=0.2101, simple_loss=0.2698, pruned_loss=0.07524, over 4858.00 frames.], tot_loss[loss=0.1774, simple_loss=0.2421, pruned_loss=0.05635, over 616677.57 frames.], batch size: 20, lr: 7.58e-04 +2022-05-04 03:14:48,007 INFO [train.py:715] (7/8) Epoch 2, batch 250, loss[loss=0.1549, simple_loss=0.2213, pruned_loss=0.04428, over 4886.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2417, pruned_loss=0.05642, over 695521.42 frames.], batch size: 16, lr: 7.58e-04 +2022-05-04 03:15:29,357 INFO [train.py:715] (7/8) Epoch 2, batch 300, loss[loss=0.2005, simple_loss=0.2722, pruned_loss=0.06445, over 4916.00 frames.], tot_loss[loss=0.1774, simple_loss=0.242, pruned_loss=0.05636, over 756577.09 frames.], batch size: 39, lr: 7.58e-04 +2022-05-04 03:16:10,306 INFO [train.py:715] (7/8) Epoch 2, batch 350, loss[loss=0.1443, simple_loss=0.2145, pruned_loss=0.03708, over 4824.00 frames.], tot_loss[loss=0.1768, simple_loss=0.2414, pruned_loss=0.05605, over 803515.85 frames.], batch size: 12, lr: 7.57e-04 +2022-05-04 03:16:49,968 INFO [train.py:715] (7/8) Epoch 2, batch 400, loss[loss=0.1831, simple_loss=0.2367, pruned_loss=0.06481, over 4703.00 frames.], tot_loss[loss=0.1762, simple_loss=0.2414, pruned_loss=0.05553, over 841598.99 frames.], batch size: 15, lr: 7.57e-04 +2022-05-04 03:17:30,474 INFO [train.py:715] (7/8) Epoch 2, batch 450, loss[loss=0.2284, simple_loss=0.2922, pruned_loss=0.0823, over 4818.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2427, pruned_loss=0.05595, over 870298.39 frames.], batch size: 27, lr: 7.57e-04 +2022-05-04 03:18:11,619 INFO [train.py:715] (7/8) Epoch 2, batch 500, loss[loss=0.1903, simple_loss=0.2631, pruned_loss=0.05877, over 4894.00 frames.], tot_loss[loss=0.1768, simple_loss=0.2418, pruned_loss=0.05588, over 892294.06 frames.], batch size: 22, lr: 7.57e-04 +2022-05-04 03:18:51,553 INFO [train.py:715] (7/8) Epoch 2, batch 550, loss[loss=0.1643, simple_loss=0.2308, pruned_loss=0.04892, over 4935.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2421, pruned_loss=0.05623, over 910031.76 frames.], batch size: 35, lr: 7.56e-04 +2022-05-04 03:19:31,921 INFO [train.py:715] (7/8) Epoch 2, batch 600, loss[loss=0.1464, simple_loss=0.2187, pruned_loss=0.03708, over 4923.00 frames.], tot_loss[loss=0.1778, simple_loss=0.2426, pruned_loss=0.05644, over 923707.73 frames.], batch size: 18, lr: 7.56e-04 +2022-05-04 03:20:12,758 INFO [train.py:715] (7/8) Epoch 2, batch 650, loss[loss=0.1931, simple_loss=0.2497, pruned_loss=0.06825, over 4838.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2414, pruned_loss=0.05563, over 934289.47 frames.], batch size: 30, lr: 7.56e-04 +2022-05-04 03:20:53,349 INFO [train.py:715] (7/8) Epoch 2, batch 700, loss[loss=0.1513, simple_loss=0.2292, pruned_loss=0.03669, over 4741.00 frames.], tot_loss[loss=0.1755, simple_loss=0.2412, pruned_loss=0.05488, over 942385.78 frames.], batch size: 16, lr: 7.56e-04 +2022-05-04 03:21:32,905 INFO [train.py:715] (7/8) Epoch 2, batch 750, loss[loss=0.1965, simple_loss=0.2608, pruned_loss=0.06608, over 4884.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2402, pruned_loss=0.05472, over 948659.85 frames.], batch size: 32, lr: 7.55e-04 +2022-05-04 03:22:13,349 INFO [train.py:715] (7/8) Epoch 2, batch 800, loss[loss=0.1717, simple_loss=0.2495, pruned_loss=0.047, over 4921.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2403, pruned_loss=0.05424, over 954108.38 frames.], batch size: 18, lr: 7.55e-04 +2022-05-04 03:22:53,990 INFO [train.py:715] (7/8) Epoch 2, batch 850, loss[loss=0.1677, simple_loss=0.2419, pruned_loss=0.04676, over 4901.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2396, pruned_loss=0.05426, over 958123.73 frames.], batch size: 22, lr: 7.55e-04 +2022-05-04 03:23:34,294 INFO [train.py:715] (7/8) Epoch 2, batch 900, loss[loss=0.2133, simple_loss=0.2682, pruned_loss=0.07922, over 4852.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2396, pruned_loss=0.05463, over 961250.79 frames.], batch size: 32, lr: 7.55e-04 +2022-05-04 03:24:14,713 INFO [train.py:715] (7/8) Epoch 2, batch 950, loss[loss=0.1773, simple_loss=0.2311, pruned_loss=0.0618, over 4976.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2396, pruned_loss=0.05482, over 963436.43 frames.], batch size: 35, lr: 7.54e-04 +2022-05-04 03:24:55,404 INFO [train.py:715] (7/8) Epoch 2, batch 1000, loss[loss=0.1922, simple_loss=0.2443, pruned_loss=0.0701, over 4989.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2404, pruned_loss=0.05567, over 965160.16 frames.], batch size: 31, lr: 7.54e-04 +2022-05-04 03:25:36,200 INFO [train.py:715] (7/8) Epoch 2, batch 1050, loss[loss=0.1556, simple_loss=0.2146, pruned_loss=0.04835, over 4834.00 frames.], tot_loss[loss=0.1755, simple_loss=0.2402, pruned_loss=0.05538, over 966340.53 frames.], batch size: 15, lr: 7.54e-04 +2022-05-04 03:26:15,806 INFO [train.py:715] (7/8) Epoch 2, batch 1100, loss[loss=0.242, simple_loss=0.297, pruned_loss=0.09348, over 4797.00 frames.], tot_loss[loss=0.1765, simple_loss=0.2414, pruned_loss=0.05583, over 966640.04 frames.], batch size: 17, lr: 7.53e-04 +2022-05-04 03:26:56,305 INFO [train.py:715] (7/8) Epoch 2, batch 1150, loss[loss=0.1823, simple_loss=0.2419, pruned_loss=0.06135, over 4865.00 frames.], tot_loss[loss=0.1768, simple_loss=0.2418, pruned_loss=0.05595, over 968306.55 frames.], batch size: 20, lr: 7.53e-04 +2022-05-04 03:27:37,639 INFO [train.py:715] (7/8) Epoch 2, batch 1200, loss[loss=0.1734, simple_loss=0.2471, pruned_loss=0.04988, over 4986.00 frames.], tot_loss[loss=0.1765, simple_loss=0.2418, pruned_loss=0.05558, over 968987.21 frames.], batch size: 28, lr: 7.53e-04 +2022-05-04 03:28:18,255 INFO [train.py:715] (7/8) Epoch 2, batch 1250, loss[loss=0.1626, simple_loss=0.2249, pruned_loss=0.05016, over 4911.00 frames.], tot_loss[loss=0.1769, simple_loss=0.242, pruned_loss=0.05584, over 968868.19 frames.], batch size: 18, lr: 7.53e-04 +2022-05-04 03:28:57,936 INFO [train.py:715] (7/8) Epoch 2, batch 1300, loss[loss=0.1725, simple_loss=0.2296, pruned_loss=0.05768, over 4976.00 frames.], tot_loss[loss=0.1767, simple_loss=0.2419, pruned_loss=0.05573, over 970537.06 frames.], batch size: 25, lr: 7.52e-04 +2022-05-04 03:29:38,476 INFO [train.py:715] (7/8) Epoch 2, batch 1350, loss[loss=0.1571, simple_loss=0.2195, pruned_loss=0.04728, over 4789.00 frames.], tot_loss[loss=0.178, simple_loss=0.2426, pruned_loss=0.05667, over 970093.75 frames.], batch size: 17, lr: 7.52e-04 +2022-05-04 03:30:19,113 INFO [train.py:715] (7/8) Epoch 2, batch 1400, loss[loss=0.173, simple_loss=0.2437, pruned_loss=0.05117, over 4767.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2432, pruned_loss=0.05672, over 971026.25 frames.], batch size: 14, lr: 7.52e-04 +2022-05-04 03:30:59,080 INFO [train.py:715] (7/8) Epoch 2, batch 1450, loss[loss=0.1843, simple_loss=0.2534, pruned_loss=0.05755, over 4776.00 frames.], tot_loss[loss=0.1776, simple_loss=0.2428, pruned_loss=0.05621, over 970730.14 frames.], batch size: 17, lr: 7.52e-04 +2022-05-04 03:31:39,484 INFO [train.py:715] (7/8) Epoch 2, batch 1500, loss[loss=0.1742, simple_loss=0.2269, pruned_loss=0.06073, over 4690.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2419, pruned_loss=0.05562, over 970346.75 frames.], batch size: 15, lr: 7.51e-04 +2022-05-04 03:32:20,465 INFO [train.py:715] (7/8) Epoch 2, batch 1550, loss[loss=0.1728, simple_loss=0.2428, pruned_loss=0.05142, over 4877.00 frames.], tot_loss[loss=0.1765, simple_loss=0.2426, pruned_loss=0.05519, over 970412.54 frames.], batch size: 22, lr: 7.51e-04 +2022-05-04 03:33:00,543 INFO [train.py:715] (7/8) Epoch 2, batch 1600, loss[loss=0.1659, simple_loss=0.2278, pruned_loss=0.05203, over 4937.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2418, pruned_loss=0.05498, over 970896.31 frames.], batch size: 29, lr: 7.51e-04 +2022-05-04 03:33:40,364 INFO [train.py:715] (7/8) Epoch 2, batch 1650, loss[loss=0.2286, simple_loss=0.2803, pruned_loss=0.08839, over 4913.00 frames.], tot_loss[loss=0.1757, simple_loss=0.2418, pruned_loss=0.05479, over 971236.46 frames.], batch size: 23, lr: 7.51e-04 +2022-05-04 03:34:21,231 INFO [train.py:715] (7/8) Epoch 2, batch 1700, loss[loss=0.1532, simple_loss=0.217, pruned_loss=0.04471, over 4858.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2414, pruned_loss=0.05487, over 972594.24 frames.], batch size: 13, lr: 7.50e-04 +2022-05-04 03:35:02,279 INFO [train.py:715] (7/8) Epoch 2, batch 1750, loss[loss=0.1796, simple_loss=0.2475, pruned_loss=0.05583, over 4830.00 frames.], tot_loss[loss=0.177, simple_loss=0.2422, pruned_loss=0.05588, over 972041.59 frames.], batch size: 15, lr: 7.50e-04 +2022-05-04 03:35:42,182 INFO [train.py:715] (7/8) Epoch 2, batch 1800, loss[loss=0.1688, simple_loss=0.2325, pruned_loss=0.05258, over 4966.00 frames.], tot_loss[loss=0.1774, simple_loss=0.2429, pruned_loss=0.05593, over 972409.15 frames.], batch size: 35, lr: 7.50e-04 +2022-05-04 03:36:22,546 INFO [train.py:715] (7/8) Epoch 2, batch 1850, loss[loss=0.1524, simple_loss=0.2152, pruned_loss=0.04479, over 4771.00 frames.], tot_loss[loss=0.1775, simple_loss=0.2431, pruned_loss=0.05595, over 971911.74 frames.], batch size: 17, lr: 7.50e-04 +2022-05-04 03:37:03,511 INFO [train.py:715] (7/8) Epoch 2, batch 1900, loss[loss=0.1829, simple_loss=0.2444, pruned_loss=0.06065, over 4789.00 frames.], tot_loss[loss=0.1767, simple_loss=0.2425, pruned_loss=0.05547, over 971144.49 frames.], batch size: 14, lr: 7.49e-04 +2022-05-04 03:37:44,305 INFO [train.py:715] (7/8) Epoch 2, batch 1950, loss[loss=0.1529, simple_loss=0.2246, pruned_loss=0.04054, over 4788.00 frames.], tot_loss[loss=0.1768, simple_loss=0.2426, pruned_loss=0.05552, over 971132.02 frames.], batch size: 21, lr: 7.49e-04 +2022-05-04 03:38:24,090 INFO [train.py:715] (7/8) Epoch 2, batch 2000, loss[loss=0.1377, simple_loss=0.2079, pruned_loss=0.03376, over 4916.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2423, pruned_loss=0.05514, over 971590.22 frames.], batch size: 17, lr: 7.49e-04 +2022-05-04 03:39:04,264 INFO [train.py:715] (7/8) Epoch 2, batch 2050, loss[loss=0.1803, simple_loss=0.2533, pruned_loss=0.05364, over 4868.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2426, pruned_loss=0.05525, over 971789.07 frames.], batch size: 16, lr: 7.48e-04 +2022-05-04 03:39:45,390 INFO [train.py:715] (7/8) Epoch 2, batch 2100, loss[loss=0.1775, simple_loss=0.2364, pruned_loss=0.0593, over 4680.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2418, pruned_loss=0.05488, over 971527.93 frames.], batch size: 15, lr: 7.48e-04 +2022-05-04 03:40:25,369 INFO [train.py:715] (7/8) Epoch 2, batch 2150, loss[loss=0.1662, simple_loss=0.2301, pruned_loss=0.0512, over 4690.00 frames.], tot_loss[loss=0.177, simple_loss=0.243, pruned_loss=0.05551, over 970994.96 frames.], batch size: 15, lr: 7.48e-04 +2022-05-04 03:41:04,899 INFO [train.py:715] (7/8) Epoch 2, batch 2200, loss[loss=0.2044, simple_loss=0.2661, pruned_loss=0.07134, over 4923.00 frames.], tot_loss[loss=0.1776, simple_loss=0.2435, pruned_loss=0.05587, over 970939.62 frames.], batch size: 17, lr: 7.48e-04 +2022-05-04 03:41:45,612 INFO [train.py:715] (7/8) Epoch 2, batch 2250, loss[loss=0.2244, simple_loss=0.2801, pruned_loss=0.08429, over 4940.00 frames.], tot_loss[loss=0.177, simple_loss=0.2429, pruned_loss=0.05556, over 971056.82 frames.], batch size: 29, lr: 7.47e-04 +2022-05-04 03:42:26,412 INFO [train.py:715] (7/8) Epoch 2, batch 2300, loss[loss=0.1909, simple_loss=0.2567, pruned_loss=0.06258, over 4779.00 frames.], tot_loss[loss=0.1754, simple_loss=0.2413, pruned_loss=0.05478, over 971376.10 frames.], batch size: 17, lr: 7.47e-04 +2022-05-04 03:43:05,609 INFO [train.py:715] (7/8) Epoch 2, batch 2350, loss[loss=0.1527, simple_loss=0.2221, pruned_loss=0.04165, over 4916.00 frames.], tot_loss[loss=0.1761, simple_loss=0.2417, pruned_loss=0.05526, over 971728.23 frames.], batch size: 17, lr: 7.47e-04 +2022-05-04 03:43:48,333 INFO [train.py:715] (7/8) Epoch 2, batch 2400, loss[loss=0.1566, simple_loss=0.2238, pruned_loss=0.0447, over 4927.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2414, pruned_loss=0.0551, over 972481.60 frames.], batch size: 23, lr: 7.47e-04 +2022-05-04 03:44:29,320 INFO [train.py:715] (7/8) Epoch 2, batch 2450, loss[loss=0.1405, simple_loss=0.2202, pruned_loss=0.03042, over 4968.00 frames.], tot_loss[loss=0.1752, simple_loss=0.2409, pruned_loss=0.05475, over 973190.75 frames.], batch size: 14, lr: 7.46e-04 +2022-05-04 03:45:09,459 INFO [train.py:715] (7/8) Epoch 2, batch 2500, loss[loss=0.1797, simple_loss=0.2529, pruned_loss=0.05324, over 4906.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2414, pruned_loss=0.0549, over 972179.54 frames.], batch size: 19, lr: 7.46e-04 +2022-05-04 03:45:49,052 INFO [train.py:715] (7/8) Epoch 2, batch 2550, loss[loss=0.1709, simple_loss=0.2311, pruned_loss=0.05535, over 4840.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2413, pruned_loss=0.05498, over 971319.17 frames.], batch size: 30, lr: 7.46e-04 +2022-05-04 03:46:29,872 INFO [train.py:715] (7/8) Epoch 2, batch 2600, loss[loss=0.1784, simple_loss=0.2407, pruned_loss=0.05809, over 4951.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2415, pruned_loss=0.05509, over 971838.40 frames.], batch size: 35, lr: 7.46e-04 +2022-05-04 03:47:10,396 INFO [train.py:715] (7/8) Epoch 2, batch 2650, loss[loss=0.1493, simple_loss=0.2176, pruned_loss=0.04044, over 4704.00 frames.], tot_loss[loss=0.1757, simple_loss=0.2416, pruned_loss=0.05493, over 971449.99 frames.], batch size: 15, lr: 7.45e-04 +2022-05-04 03:47:49,305 INFO [train.py:715] (7/8) Epoch 2, batch 2700, loss[loss=0.2112, simple_loss=0.2694, pruned_loss=0.07652, over 4769.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2409, pruned_loss=0.05445, over 971990.04 frames.], batch size: 18, lr: 7.45e-04 +2022-05-04 03:48:29,312 INFO [train.py:715] (7/8) Epoch 2, batch 2750, loss[loss=0.176, simple_loss=0.2379, pruned_loss=0.05706, over 4773.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2408, pruned_loss=0.05447, over 971996.84 frames.], batch size: 17, lr: 7.45e-04 +2022-05-04 03:49:10,368 INFO [train.py:715] (7/8) Epoch 2, batch 2800, loss[loss=0.1848, simple_loss=0.2466, pruned_loss=0.06145, over 4779.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2408, pruned_loss=0.05451, over 971052.92 frames.], batch size: 14, lr: 7.45e-04 +2022-05-04 03:49:50,285 INFO [train.py:715] (7/8) Epoch 2, batch 2850, loss[loss=0.1583, simple_loss=0.2307, pruned_loss=0.04292, over 4888.00 frames.], tot_loss[loss=0.1755, simple_loss=0.2414, pruned_loss=0.0548, over 971555.73 frames.], batch size: 17, lr: 7.44e-04 +2022-05-04 03:50:29,541 INFO [train.py:715] (7/8) Epoch 2, batch 2900, loss[loss=0.1372, simple_loss=0.2076, pruned_loss=0.03345, over 4710.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2411, pruned_loss=0.05453, over 971777.52 frames.], batch size: 15, lr: 7.44e-04 +2022-05-04 03:51:09,906 INFO [train.py:715] (7/8) Epoch 2, batch 2950, loss[loss=0.2057, simple_loss=0.2648, pruned_loss=0.07327, over 4935.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2423, pruned_loss=0.05545, over 971871.12 frames.], batch size: 21, lr: 7.44e-04 +2022-05-04 03:51:50,591 INFO [train.py:715] (7/8) Epoch 2, batch 3000, loss[loss=0.1821, simple_loss=0.2584, pruned_loss=0.05289, over 4749.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2419, pruned_loss=0.05538, over 971074.14 frames.], batch size: 12, lr: 7.44e-04 +2022-05-04 03:51:50,592 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 03:52:00,002 INFO [train.py:742] (7/8) Epoch 2, validation: loss=0.1191, simple_loss=0.2058, pruned_loss=0.01615, over 914524.00 frames. +2022-05-04 03:52:40,630 INFO [train.py:715] (7/8) Epoch 2, batch 3050, loss[loss=0.1591, simple_loss=0.224, pruned_loss=0.04713, over 4981.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2427, pruned_loss=0.05595, over 971766.18 frames.], batch size: 14, lr: 7.43e-04 +2022-05-04 03:53:19,881 INFO [train.py:715] (7/8) Epoch 2, batch 3100, loss[loss=0.1465, simple_loss=0.2184, pruned_loss=0.03725, over 4969.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2415, pruned_loss=0.05503, over 971336.64 frames.], batch size: 25, lr: 7.43e-04 +2022-05-04 03:53:59,888 INFO [train.py:715] (7/8) Epoch 2, batch 3150, loss[loss=0.1858, simple_loss=0.2529, pruned_loss=0.05937, over 4958.00 frames.], tot_loss[loss=0.1757, simple_loss=0.241, pruned_loss=0.05517, over 971534.84 frames.], batch size: 21, lr: 7.43e-04 +2022-05-04 03:54:40,144 INFO [train.py:715] (7/8) Epoch 2, batch 3200, loss[loss=0.1833, simple_loss=0.246, pruned_loss=0.06024, over 4848.00 frames.], tot_loss[loss=0.175, simple_loss=0.2404, pruned_loss=0.05478, over 972114.75 frames.], batch size: 16, lr: 7.43e-04 +2022-05-04 03:55:19,795 INFO [train.py:715] (7/8) Epoch 2, batch 3250, loss[loss=0.1707, simple_loss=0.2307, pruned_loss=0.0554, over 4851.00 frames.], tot_loss[loss=0.1753, simple_loss=0.2407, pruned_loss=0.05495, over 972219.04 frames.], batch size: 30, lr: 7.42e-04 +2022-05-04 03:55:59,355 INFO [train.py:715] (7/8) Epoch 2, batch 3300, loss[loss=0.2188, simple_loss=0.2761, pruned_loss=0.0807, over 4742.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2405, pruned_loss=0.05447, over 972232.34 frames.], batch size: 16, lr: 7.42e-04 +2022-05-04 03:56:39,598 INFO [train.py:715] (7/8) Epoch 2, batch 3350, loss[loss=0.1856, simple_loss=0.2538, pruned_loss=0.05867, over 4751.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2403, pruned_loss=0.05436, over 973563.50 frames.], batch size: 16, lr: 7.42e-04 +2022-05-04 03:57:20,091 INFO [train.py:715] (7/8) Epoch 2, batch 3400, loss[loss=0.174, simple_loss=0.2379, pruned_loss=0.05511, over 4922.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2413, pruned_loss=0.0552, over 972960.57 frames.], batch size: 18, lr: 7.42e-04 +2022-05-04 03:57:58,920 INFO [train.py:715] (7/8) Epoch 2, batch 3450, loss[loss=0.1783, simple_loss=0.247, pruned_loss=0.05479, over 4823.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2414, pruned_loss=0.05511, over 972922.76 frames.], batch size: 27, lr: 7.41e-04 +2022-05-04 03:58:38,940 INFO [train.py:715] (7/8) Epoch 2, batch 3500, loss[loss=0.1631, simple_loss=0.2292, pruned_loss=0.04846, over 4913.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2408, pruned_loss=0.05467, over 973736.09 frames.], batch size: 17, lr: 7.41e-04 +2022-05-04 03:59:19,007 INFO [train.py:715] (7/8) Epoch 2, batch 3550, loss[loss=0.1687, simple_loss=0.2347, pruned_loss=0.05139, over 4706.00 frames.], tot_loss[loss=0.1762, simple_loss=0.2419, pruned_loss=0.05523, over 974008.46 frames.], batch size: 15, lr: 7.41e-04 +2022-05-04 03:59:58,780 INFO [train.py:715] (7/8) Epoch 2, batch 3600, loss[loss=0.177, simple_loss=0.2427, pruned_loss=0.05564, over 4762.00 frames.], tot_loss[loss=0.176, simple_loss=0.2417, pruned_loss=0.05516, over 973810.78 frames.], batch size: 16, lr: 7.41e-04 +2022-05-04 04:00:37,771 INFO [train.py:715] (7/8) Epoch 2, batch 3650, loss[loss=0.1585, simple_loss=0.2262, pruned_loss=0.04536, over 4940.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2426, pruned_loss=0.05597, over 974272.00 frames.], batch size: 18, lr: 7.40e-04 +2022-05-04 04:01:18,182 INFO [train.py:715] (7/8) Epoch 2, batch 3700, loss[loss=0.1679, simple_loss=0.2462, pruned_loss=0.0448, over 4919.00 frames.], tot_loss[loss=0.1764, simple_loss=0.242, pruned_loss=0.05542, over 974454.11 frames.], batch size: 17, lr: 7.40e-04 +2022-05-04 04:01:58,355 INFO [train.py:715] (7/8) Epoch 2, batch 3750, loss[loss=0.1922, simple_loss=0.2561, pruned_loss=0.06413, over 4829.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2414, pruned_loss=0.0552, over 974370.08 frames.], batch size: 26, lr: 7.40e-04 +2022-05-04 04:02:37,085 INFO [train.py:715] (7/8) Epoch 2, batch 3800, loss[loss=0.1702, simple_loss=0.2338, pruned_loss=0.05328, over 4798.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2415, pruned_loss=0.05487, over 973492.55 frames.], batch size: 13, lr: 7.40e-04 +2022-05-04 04:03:17,278 INFO [train.py:715] (7/8) Epoch 2, batch 3850, loss[loss=0.1707, simple_loss=0.2388, pruned_loss=0.05132, over 4932.00 frames.], tot_loss[loss=0.176, simple_loss=0.2418, pruned_loss=0.05514, over 972185.92 frames.], batch size: 21, lr: 7.39e-04 +2022-05-04 04:03:57,614 INFO [train.py:715] (7/8) Epoch 2, batch 3900, loss[loss=0.1922, simple_loss=0.2672, pruned_loss=0.05864, over 4913.00 frames.], tot_loss[loss=0.1754, simple_loss=0.2411, pruned_loss=0.0548, over 972928.83 frames.], batch size: 17, lr: 7.39e-04 +2022-05-04 04:04:36,839 INFO [train.py:715] (7/8) Epoch 2, batch 3950, loss[loss=0.1834, simple_loss=0.2395, pruned_loss=0.06366, over 4839.00 frames.], tot_loss[loss=0.1754, simple_loss=0.2414, pruned_loss=0.05474, over 973624.41 frames.], batch size: 30, lr: 7.39e-04 +2022-05-04 04:05:16,467 INFO [train.py:715] (7/8) Epoch 2, batch 4000, loss[loss=0.1523, simple_loss=0.2296, pruned_loss=0.03752, over 4761.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2404, pruned_loss=0.05441, over 973776.51 frames.], batch size: 19, lr: 7.39e-04 +2022-05-04 04:05:57,032 INFO [train.py:715] (7/8) Epoch 2, batch 4050, loss[loss=0.1663, simple_loss=0.2442, pruned_loss=0.04421, over 4774.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2409, pruned_loss=0.05424, over 973008.41 frames.], batch size: 17, lr: 7.38e-04 +2022-05-04 04:06:37,526 INFO [train.py:715] (7/8) Epoch 2, batch 4100, loss[loss=0.1795, simple_loss=0.2584, pruned_loss=0.05031, over 4814.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2418, pruned_loss=0.05466, over 972108.90 frames.], batch size: 25, lr: 7.38e-04 +2022-05-04 04:07:16,033 INFO [train.py:715] (7/8) Epoch 2, batch 4150, loss[loss=0.2075, simple_loss=0.2745, pruned_loss=0.07024, over 4977.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2409, pruned_loss=0.05418, over 972458.20 frames.], batch size: 15, lr: 7.38e-04 +2022-05-04 04:07:55,386 INFO [train.py:715] (7/8) Epoch 2, batch 4200, loss[loss=0.1496, simple_loss=0.2316, pruned_loss=0.03382, over 4887.00 frames.], tot_loss[loss=0.1734, simple_loss=0.2396, pruned_loss=0.0536, over 973048.67 frames.], batch size: 16, lr: 7.38e-04 +2022-05-04 04:08:35,832 INFO [train.py:715] (7/8) Epoch 2, batch 4250, loss[loss=0.1524, simple_loss=0.2248, pruned_loss=0.03996, over 4824.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2394, pruned_loss=0.05379, over 972232.00 frames.], batch size: 13, lr: 7.37e-04 +2022-05-04 04:09:15,084 INFO [train.py:715] (7/8) Epoch 2, batch 4300, loss[loss=0.1274, simple_loss=0.2045, pruned_loss=0.02517, over 4656.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2395, pruned_loss=0.05429, over 971755.29 frames.], batch size: 13, lr: 7.37e-04 +2022-05-04 04:09:54,867 INFO [train.py:715] (7/8) Epoch 2, batch 4350, loss[loss=0.1621, simple_loss=0.2419, pruned_loss=0.04116, over 4905.00 frames.], tot_loss[loss=0.174, simple_loss=0.2396, pruned_loss=0.05416, over 972170.89 frames.], batch size: 17, lr: 7.37e-04 +2022-05-04 04:10:34,724 INFO [train.py:715] (7/8) Epoch 2, batch 4400, loss[loss=0.1412, simple_loss=0.2203, pruned_loss=0.03104, over 4885.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2398, pruned_loss=0.05378, over 972290.99 frames.], batch size: 22, lr: 7.37e-04 +2022-05-04 04:11:14,732 INFO [train.py:715] (7/8) Epoch 2, batch 4450, loss[loss=0.1553, simple_loss=0.2409, pruned_loss=0.03489, over 4943.00 frames.], tot_loss[loss=0.174, simple_loss=0.2404, pruned_loss=0.05383, over 972324.87 frames.], batch size: 29, lr: 7.36e-04 +2022-05-04 04:11:53,881 INFO [train.py:715] (7/8) Epoch 2, batch 4500, loss[loss=0.156, simple_loss=0.2286, pruned_loss=0.04167, over 4889.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2402, pruned_loss=0.05396, over 972428.17 frames.], batch size: 19, lr: 7.36e-04 +2022-05-04 04:12:33,895 INFO [train.py:715] (7/8) Epoch 2, batch 4550, loss[loss=0.162, simple_loss=0.2379, pruned_loss=0.043, over 4935.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2401, pruned_loss=0.05376, over 972634.35 frames.], batch size: 21, lr: 7.36e-04 +2022-05-04 04:13:14,646 INFO [train.py:715] (7/8) Epoch 2, batch 4600, loss[loss=0.1478, simple_loss=0.2158, pruned_loss=0.03987, over 4787.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2396, pruned_loss=0.05346, over 972317.04 frames.], batch size: 14, lr: 7.36e-04 +2022-05-04 04:13:53,701 INFO [train.py:715] (7/8) Epoch 2, batch 4650, loss[loss=0.2169, simple_loss=0.2571, pruned_loss=0.08834, over 4888.00 frames.], tot_loss[loss=0.174, simple_loss=0.2401, pruned_loss=0.05399, over 971871.72 frames.], batch size: 16, lr: 7.35e-04 +2022-05-04 04:14:33,009 INFO [train.py:715] (7/8) Epoch 2, batch 4700, loss[loss=0.1791, simple_loss=0.2667, pruned_loss=0.04572, over 4806.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2399, pruned_loss=0.05383, over 971493.95 frames.], batch size: 25, lr: 7.35e-04 +2022-05-04 04:15:13,199 INFO [train.py:715] (7/8) Epoch 2, batch 4750, loss[loss=0.1509, simple_loss=0.2325, pruned_loss=0.03459, over 4808.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2401, pruned_loss=0.05425, over 972067.18 frames.], batch size: 26, lr: 7.35e-04 +2022-05-04 04:15:53,748 INFO [train.py:715] (7/8) Epoch 2, batch 4800, loss[loss=0.1901, simple_loss=0.2601, pruned_loss=0.06008, over 4856.00 frames.], tot_loss[loss=0.1755, simple_loss=0.2412, pruned_loss=0.05494, over 972027.61 frames.], batch size: 20, lr: 7.35e-04 +2022-05-04 04:16:33,018 INFO [train.py:715] (7/8) Epoch 2, batch 4850, loss[loss=0.1975, simple_loss=0.2748, pruned_loss=0.0601, over 4792.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2413, pruned_loss=0.05518, over 971642.63 frames.], batch size: 18, lr: 7.34e-04 +2022-05-04 04:17:12,486 INFO [train.py:715] (7/8) Epoch 2, batch 4900, loss[loss=0.145, simple_loss=0.2191, pruned_loss=0.03545, over 4936.00 frames.], tot_loss[loss=0.1746, simple_loss=0.24, pruned_loss=0.05456, over 972157.49 frames.], batch size: 21, lr: 7.34e-04 +2022-05-04 04:17:52,929 INFO [train.py:715] (7/8) Epoch 2, batch 4950, loss[loss=0.2179, simple_loss=0.2718, pruned_loss=0.08199, over 4703.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2395, pruned_loss=0.05449, over 971987.69 frames.], batch size: 15, lr: 7.34e-04 +2022-05-04 04:18:32,546 INFO [train.py:715] (7/8) Epoch 2, batch 5000, loss[loss=0.202, simple_loss=0.2595, pruned_loss=0.07227, over 4873.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2393, pruned_loss=0.05441, over 972585.53 frames.], batch size: 32, lr: 7.34e-04 +2022-05-04 04:19:12,100 INFO [train.py:715] (7/8) Epoch 2, batch 5050, loss[loss=0.2142, simple_loss=0.2651, pruned_loss=0.08167, over 4866.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2395, pruned_loss=0.05458, over 972384.11 frames.], batch size: 32, lr: 7.33e-04 +2022-05-04 04:19:53,172 INFO [train.py:715] (7/8) Epoch 2, batch 5100, loss[loss=0.1566, simple_loss=0.2267, pruned_loss=0.04326, over 4842.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2396, pruned_loss=0.05445, over 972714.05 frames.], batch size: 12, lr: 7.33e-04 +2022-05-04 04:20:34,125 INFO [train.py:715] (7/8) Epoch 2, batch 5150, loss[loss=0.1594, simple_loss=0.227, pruned_loss=0.04584, over 4821.00 frames.], tot_loss[loss=0.1724, simple_loss=0.238, pruned_loss=0.05345, over 972667.93 frames.], batch size: 25, lr: 7.33e-04 +2022-05-04 04:21:13,073 INFO [train.py:715] (7/8) Epoch 2, batch 5200, loss[loss=0.1514, simple_loss=0.2215, pruned_loss=0.04063, over 4987.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2394, pruned_loss=0.05385, over 972591.55 frames.], batch size: 15, lr: 7.33e-04 +2022-05-04 04:21:52,857 INFO [train.py:715] (7/8) Epoch 2, batch 5250, loss[loss=0.1798, simple_loss=0.2428, pruned_loss=0.05841, over 4905.00 frames.], tot_loss[loss=0.1742, simple_loss=0.24, pruned_loss=0.05425, over 973226.29 frames.], batch size: 17, lr: 7.32e-04 +2022-05-04 04:22:33,069 INFO [train.py:715] (7/8) Epoch 2, batch 5300, loss[loss=0.1192, simple_loss=0.1854, pruned_loss=0.02651, over 4729.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2398, pruned_loss=0.05387, over 973322.66 frames.], batch size: 12, lr: 7.32e-04 +2022-05-04 04:23:12,248 INFO [train.py:715] (7/8) Epoch 2, batch 5350, loss[loss=0.1515, simple_loss=0.2235, pruned_loss=0.03969, over 4918.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2403, pruned_loss=0.05422, over 973627.10 frames.], batch size: 17, lr: 7.32e-04 +2022-05-04 04:23:51,610 INFO [train.py:715] (7/8) Epoch 2, batch 5400, loss[loss=0.1482, simple_loss=0.2184, pruned_loss=0.03898, over 4809.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2399, pruned_loss=0.05424, over 972793.07 frames.], batch size: 25, lr: 7.32e-04 +2022-05-04 04:24:32,282 INFO [train.py:715] (7/8) Epoch 2, batch 5450, loss[loss=0.1831, simple_loss=0.2559, pruned_loss=0.05516, over 4870.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2403, pruned_loss=0.05437, over 972106.98 frames.], batch size: 20, lr: 7.31e-04 +2022-05-04 04:25:12,076 INFO [train.py:715] (7/8) Epoch 2, batch 5500, loss[loss=0.1563, simple_loss=0.2237, pruned_loss=0.04449, over 4862.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2404, pruned_loss=0.05465, over 972165.48 frames.], batch size: 20, lr: 7.31e-04 +2022-05-04 04:25:51,712 INFO [train.py:715] (7/8) Epoch 2, batch 5550, loss[loss=0.1872, simple_loss=0.2467, pruned_loss=0.06385, over 4888.00 frames.], tot_loss[loss=0.175, simple_loss=0.2404, pruned_loss=0.05482, over 972414.96 frames.], batch size: 22, lr: 7.31e-04 +2022-05-04 04:26:32,210 INFO [train.py:715] (7/8) Epoch 2, batch 5600, loss[loss=0.1628, simple_loss=0.2351, pruned_loss=0.04523, over 4969.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2403, pruned_loss=0.055, over 971980.35 frames.], batch size: 24, lr: 7.31e-04 +2022-05-04 04:27:13,270 INFO [train.py:715] (7/8) Epoch 2, batch 5650, loss[loss=0.2004, simple_loss=0.2576, pruned_loss=0.0716, over 4943.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2412, pruned_loss=0.05572, over 972140.10 frames.], batch size: 29, lr: 7.30e-04 +2022-05-04 04:27:53,191 INFO [train.py:715] (7/8) Epoch 2, batch 5700, loss[loss=0.1852, simple_loss=0.2452, pruned_loss=0.06258, over 4894.00 frames.], tot_loss[loss=0.1762, simple_loss=0.2415, pruned_loss=0.05544, over 972550.89 frames.], batch size: 17, lr: 7.30e-04 +2022-05-04 04:28:33,027 INFO [train.py:715] (7/8) Epoch 2, batch 5750, loss[loss=0.1667, simple_loss=0.2373, pruned_loss=0.04804, over 4889.00 frames.], tot_loss[loss=0.1757, simple_loss=0.2411, pruned_loss=0.05512, over 972929.97 frames.], batch size: 22, lr: 7.30e-04 +2022-05-04 04:29:13,950 INFO [train.py:715] (7/8) Epoch 2, batch 5800, loss[loss=0.1762, simple_loss=0.254, pruned_loss=0.04915, over 4863.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2408, pruned_loss=0.05435, over 972235.84 frames.], batch size: 20, lr: 7.30e-04 +2022-05-04 04:29:55,104 INFO [train.py:715] (7/8) Epoch 2, batch 5850, loss[loss=0.211, simple_loss=0.2633, pruned_loss=0.0794, over 4856.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2406, pruned_loss=0.05425, over 972455.13 frames.], batch size: 32, lr: 7.29e-04 +2022-05-04 04:30:34,561 INFO [train.py:715] (7/8) Epoch 2, batch 5900, loss[loss=0.165, simple_loss=0.2303, pruned_loss=0.04981, over 4945.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2404, pruned_loss=0.05457, over 972489.56 frames.], batch size: 23, lr: 7.29e-04 +2022-05-04 04:31:15,154 INFO [train.py:715] (7/8) Epoch 2, batch 5950, loss[loss=0.1998, simple_loss=0.2644, pruned_loss=0.06757, over 4893.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2406, pruned_loss=0.05427, over 972177.61 frames.], batch size: 19, lr: 7.29e-04 +2022-05-04 04:31:56,153 INFO [train.py:715] (7/8) Epoch 2, batch 6000, loss[loss=0.1786, simple_loss=0.2298, pruned_loss=0.06373, over 4764.00 frames.], tot_loss[loss=0.1739, simple_loss=0.24, pruned_loss=0.05395, over 972238.25 frames.], batch size: 14, lr: 7.29e-04 +2022-05-04 04:31:56,153 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 04:32:04,808 INFO [train.py:742] (7/8) Epoch 2, validation: loss=0.1188, simple_loss=0.2054, pruned_loss=0.01614, over 914524.00 frames. +2022-05-04 04:32:46,139 INFO [train.py:715] (7/8) Epoch 2, batch 6050, loss[loss=0.1946, simple_loss=0.2588, pruned_loss=0.06523, over 4835.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2384, pruned_loss=0.05337, over 971867.36 frames.], batch size: 15, lr: 7.29e-04 +2022-05-04 04:33:25,857 INFO [train.py:715] (7/8) Epoch 2, batch 6100, loss[loss=0.1879, simple_loss=0.2478, pruned_loss=0.06405, over 4820.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2381, pruned_loss=0.05269, over 973404.98 frames.], batch size: 15, lr: 7.28e-04 +2022-05-04 04:34:05,823 INFO [train.py:715] (7/8) Epoch 2, batch 6150, loss[loss=0.1893, simple_loss=0.2586, pruned_loss=0.05994, over 4914.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2386, pruned_loss=0.05293, over 972758.41 frames.], batch size: 29, lr: 7.28e-04 +2022-05-04 04:34:46,189 INFO [train.py:715] (7/8) Epoch 2, batch 6200, loss[loss=0.1798, simple_loss=0.2486, pruned_loss=0.05551, over 4901.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2389, pruned_loss=0.05328, over 972881.58 frames.], batch size: 18, lr: 7.28e-04 +2022-05-04 04:35:26,608 INFO [train.py:715] (7/8) Epoch 2, batch 6250, loss[loss=0.175, simple_loss=0.2382, pruned_loss=0.05588, over 4847.00 frames.], tot_loss[loss=0.172, simple_loss=0.2384, pruned_loss=0.05281, over 972351.14 frames.], batch size: 32, lr: 7.28e-04 +2022-05-04 04:36:05,781 INFO [train.py:715] (7/8) Epoch 2, batch 6300, loss[loss=0.1948, simple_loss=0.2495, pruned_loss=0.07004, over 4949.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2394, pruned_loss=0.05356, over 972379.38 frames.], batch size: 35, lr: 7.27e-04 +2022-05-04 04:36:46,023 INFO [train.py:715] (7/8) Epoch 2, batch 6350, loss[loss=0.2213, simple_loss=0.2807, pruned_loss=0.08094, over 4876.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2401, pruned_loss=0.05373, over 972894.50 frames.], batch size: 32, lr: 7.27e-04 +2022-05-04 04:37:26,515 INFO [train.py:715] (7/8) Epoch 2, batch 6400, loss[loss=0.1651, simple_loss=0.2296, pruned_loss=0.05034, over 4777.00 frames.], tot_loss[loss=0.1751, simple_loss=0.241, pruned_loss=0.05453, over 972413.45 frames.], batch size: 17, lr: 7.27e-04 +2022-05-04 04:38:05,325 INFO [train.py:715] (7/8) Epoch 2, batch 6450, loss[loss=0.1634, simple_loss=0.2294, pruned_loss=0.0487, over 4978.00 frames.], tot_loss[loss=0.1744, simple_loss=0.241, pruned_loss=0.05387, over 972273.94 frames.], batch size: 14, lr: 7.27e-04 +2022-05-04 04:38:44,603 INFO [train.py:715] (7/8) Epoch 2, batch 6500, loss[loss=0.1623, simple_loss=0.2212, pruned_loss=0.05171, over 4992.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2407, pruned_loss=0.05397, over 972627.56 frames.], batch size: 14, lr: 7.26e-04 +2022-05-04 04:39:24,832 INFO [train.py:715] (7/8) Epoch 2, batch 6550, loss[loss=0.2168, simple_loss=0.2682, pruned_loss=0.08273, over 4917.00 frames.], tot_loss[loss=0.1754, simple_loss=0.2417, pruned_loss=0.0546, over 973876.79 frames.], batch size: 18, lr: 7.26e-04 +2022-05-04 04:40:04,766 INFO [train.py:715] (7/8) Epoch 2, batch 6600, loss[loss=0.1917, simple_loss=0.2608, pruned_loss=0.06131, over 4922.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2425, pruned_loss=0.05533, over 974049.65 frames.], batch size: 29, lr: 7.26e-04 +2022-05-04 04:40:43,857 INFO [train.py:715] (7/8) Epoch 2, batch 6650, loss[loss=0.1654, simple_loss=0.2435, pruned_loss=0.04369, over 4877.00 frames.], tot_loss[loss=0.1765, simple_loss=0.242, pruned_loss=0.05553, over 973320.92 frames.], batch size: 20, lr: 7.26e-04 +2022-05-04 04:41:23,368 INFO [train.py:715] (7/8) Epoch 2, batch 6700, loss[loss=0.1586, simple_loss=0.2291, pruned_loss=0.04409, over 4766.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2416, pruned_loss=0.05506, over 972668.04 frames.], batch size: 19, lr: 7.25e-04 +2022-05-04 04:42:03,567 INFO [train.py:715] (7/8) Epoch 2, batch 6750, loss[loss=0.1486, simple_loss=0.2238, pruned_loss=0.03676, over 4823.00 frames.], tot_loss[loss=0.1761, simple_loss=0.2417, pruned_loss=0.05518, over 972782.00 frames.], batch size: 26, lr: 7.25e-04 +2022-05-04 04:42:41,715 INFO [train.py:715] (7/8) Epoch 2, batch 6800, loss[loss=0.1634, simple_loss=0.2348, pruned_loss=0.04603, over 4980.00 frames.], tot_loss[loss=0.1741, simple_loss=0.24, pruned_loss=0.05416, over 972952.63 frames.], batch size: 14, lr: 7.25e-04 +2022-05-04 04:43:20,938 INFO [train.py:715] (7/8) Epoch 2, batch 6850, loss[loss=0.1592, simple_loss=0.2239, pruned_loss=0.0473, over 4768.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2403, pruned_loss=0.05402, over 972606.30 frames.], batch size: 14, lr: 7.25e-04 +2022-05-04 04:44:01,038 INFO [train.py:715] (7/8) Epoch 2, batch 6900, loss[loss=0.1682, simple_loss=0.2277, pruned_loss=0.05438, over 4905.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2398, pruned_loss=0.05342, over 972653.87 frames.], batch size: 18, lr: 7.24e-04 +2022-05-04 04:44:41,206 INFO [train.py:715] (7/8) Epoch 2, batch 6950, loss[loss=0.1496, simple_loss=0.2239, pruned_loss=0.03763, over 4958.00 frames.], tot_loss[loss=0.174, simple_loss=0.2409, pruned_loss=0.0535, over 973300.08 frames.], batch size: 21, lr: 7.24e-04 +2022-05-04 04:45:19,405 INFO [train.py:715] (7/8) Epoch 2, batch 7000, loss[loss=0.1742, simple_loss=0.2397, pruned_loss=0.05435, over 4907.00 frames.], tot_loss[loss=0.174, simple_loss=0.241, pruned_loss=0.05352, over 972796.09 frames.], batch size: 19, lr: 7.24e-04 +2022-05-04 04:45:59,978 INFO [train.py:715] (7/8) Epoch 2, batch 7050, loss[loss=0.158, simple_loss=0.2179, pruned_loss=0.04908, over 4957.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2407, pruned_loss=0.05406, over 974326.35 frames.], batch size: 21, lr: 7.24e-04 +2022-05-04 04:46:40,403 INFO [train.py:715] (7/8) Epoch 2, batch 7100, loss[loss=0.1858, simple_loss=0.2574, pruned_loss=0.05709, over 4971.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2409, pruned_loss=0.05394, over 974111.63 frames.], batch size: 15, lr: 7.24e-04 +2022-05-04 04:47:19,794 INFO [train.py:715] (7/8) Epoch 2, batch 7150, loss[loss=0.1928, simple_loss=0.2614, pruned_loss=0.06208, over 4866.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2404, pruned_loss=0.05395, over 973151.17 frames.], batch size: 20, lr: 7.23e-04 +2022-05-04 04:48:00,092 INFO [train.py:715] (7/8) Epoch 2, batch 7200, loss[loss=0.1892, simple_loss=0.2362, pruned_loss=0.07111, over 4721.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2398, pruned_loss=0.05378, over 972989.16 frames.], batch size: 12, lr: 7.23e-04 +2022-05-04 04:48:41,282 INFO [train.py:715] (7/8) Epoch 2, batch 7250, loss[loss=0.1638, simple_loss=0.2343, pruned_loss=0.04669, over 4884.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2409, pruned_loss=0.05427, over 973475.04 frames.], batch size: 16, lr: 7.23e-04 +2022-05-04 04:49:21,918 INFO [train.py:715] (7/8) Epoch 2, batch 7300, loss[loss=0.2351, simple_loss=0.2819, pruned_loss=0.09419, over 4831.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2406, pruned_loss=0.05406, over 973144.45 frames.], batch size: 15, lr: 7.23e-04 +2022-05-04 04:50:01,602 INFO [train.py:715] (7/8) Epoch 2, batch 7350, loss[loss=0.1901, simple_loss=0.2587, pruned_loss=0.06075, over 4908.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2395, pruned_loss=0.05353, over 972530.48 frames.], batch size: 17, lr: 7.22e-04 +2022-05-04 04:50:42,531 INFO [train.py:715] (7/8) Epoch 2, batch 7400, loss[loss=0.1523, simple_loss=0.2151, pruned_loss=0.04475, over 4923.00 frames.], tot_loss[loss=0.1753, simple_loss=0.241, pruned_loss=0.05485, over 972702.83 frames.], batch size: 23, lr: 7.22e-04 +2022-05-04 04:51:24,323 INFO [train.py:715] (7/8) Epoch 2, batch 7450, loss[loss=0.1799, simple_loss=0.2532, pruned_loss=0.05325, over 4872.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2404, pruned_loss=0.0542, over 973110.71 frames.], batch size: 16, lr: 7.22e-04 +2022-05-04 04:52:04,708 INFO [train.py:715] (7/8) Epoch 2, batch 7500, loss[loss=0.1537, simple_loss=0.2173, pruned_loss=0.045, over 4812.00 frames.], tot_loss[loss=0.1739, simple_loss=0.2398, pruned_loss=0.05403, over 972920.56 frames.], batch size: 27, lr: 7.22e-04 +2022-05-04 04:52:45,161 INFO [train.py:715] (7/8) Epoch 2, batch 7550, loss[loss=0.1975, simple_loss=0.2561, pruned_loss=0.06946, over 4830.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2405, pruned_loss=0.05427, over 973577.30 frames.], batch size: 30, lr: 7.21e-04 +2022-05-04 04:53:26,939 INFO [train.py:715] (7/8) Epoch 2, batch 7600, loss[loss=0.1423, simple_loss=0.218, pruned_loss=0.03329, over 4823.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2407, pruned_loss=0.05446, over 973763.49 frames.], batch size: 13, lr: 7.21e-04 +2022-05-04 04:54:08,316 INFO [train.py:715] (7/8) Epoch 2, batch 7650, loss[loss=0.1936, simple_loss=0.2663, pruned_loss=0.06045, over 4848.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2416, pruned_loss=0.05505, over 973517.75 frames.], batch size: 20, lr: 7.21e-04 +2022-05-04 04:54:48,382 INFO [train.py:715] (7/8) Epoch 2, batch 7700, loss[loss=0.2022, simple_loss=0.2601, pruned_loss=0.0722, over 4876.00 frames.], tot_loss[loss=0.1757, simple_loss=0.2414, pruned_loss=0.055, over 972958.07 frames.], batch size: 17, lr: 7.21e-04 +2022-05-04 04:55:29,834 INFO [train.py:715] (7/8) Epoch 2, batch 7750, loss[loss=0.1685, simple_loss=0.2348, pruned_loss=0.05109, over 4751.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2399, pruned_loss=0.05415, over 972381.73 frames.], batch size: 19, lr: 7.21e-04 +2022-05-04 04:56:11,502 INFO [train.py:715] (7/8) Epoch 2, batch 7800, loss[loss=0.1767, simple_loss=0.2433, pruned_loss=0.0551, over 4839.00 frames.], tot_loss[loss=0.174, simple_loss=0.2396, pruned_loss=0.05419, over 971589.80 frames.], batch size: 30, lr: 7.20e-04 +2022-05-04 04:56:52,008 INFO [train.py:715] (7/8) Epoch 2, batch 7850, loss[loss=0.1845, simple_loss=0.2491, pruned_loss=0.05996, over 4972.00 frames.], tot_loss[loss=0.1754, simple_loss=0.241, pruned_loss=0.05488, over 972162.50 frames.], batch size: 15, lr: 7.20e-04 +2022-05-04 04:57:33,358 INFO [train.py:715] (7/8) Epoch 2, batch 7900, loss[loss=0.1406, simple_loss=0.2237, pruned_loss=0.02875, over 4841.00 frames.], tot_loss[loss=0.1762, simple_loss=0.2421, pruned_loss=0.05518, over 972638.80 frames.], batch size: 30, lr: 7.20e-04 +2022-05-04 04:58:15,552 INFO [train.py:715] (7/8) Epoch 2, batch 7950, loss[loss=0.1571, simple_loss=0.2284, pruned_loss=0.04287, over 4956.00 frames.], tot_loss[loss=0.176, simple_loss=0.2417, pruned_loss=0.05513, over 972671.45 frames.], batch size: 15, lr: 7.20e-04 +2022-05-04 04:58:57,047 INFO [train.py:715] (7/8) Epoch 2, batch 8000, loss[loss=0.1652, simple_loss=0.2304, pruned_loss=0.05003, over 4981.00 frames.], tot_loss[loss=0.1753, simple_loss=0.2412, pruned_loss=0.05471, over 972865.05 frames.], batch size: 14, lr: 7.19e-04 +2022-05-04 04:59:37,246 INFO [train.py:715] (7/8) Epoch 2, batch 8050, loss[loss=0.1866, simple_loss=0.2505, pruned_loss=0.06139, over 4787.00 frames.], tot_loss[loss=0.1765, simple_loss=0.2419, pruned_loss=0.05551, over 971012.65 frames.], batch size: 14, lr: 7.19e-04 +2022-05-04 05:00:18,971 INFO [train.py:715] (7/8) Epoch 2, batch 8100, loss[loss=0.153, simple_loss=0.2312, pruned_loss=0.03741, over 4708.00 frames.], tot_loss[loss=0.1757, simple_loss=0.2413, pruned_loss=0.05509, over 970320.14 frames.], batch size: 15, lr: 7.19e-04 +2022-05-04 05:01:00,839 INFO [train.py:715] (7/8) Epoch 2, batch 8150, loss[loss=0.1929, simple_loss=0.2478, pruned_loss=0.06897, over 4693.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2409, pruned_loss=0.0551, over 970627.11 frames.], batch size: 15, lr: 7.19e-04 +2022-05-04 05:01:41,276 INFO [train.py:715] (7/8) Epoch 2, batch 8200, loss[loss=0.156, simple_loss=0.2291, pruned_loss=0.0414, over 4967.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2403, pruned_loss=0.05494, over 971627.10 frames.], batch size: 28, lr: 7.18e-04 +2022-05-04 05:02:22,251 INFO [train.py:715] (7/8) Epoch 2, batch 8250, loss[loss=0.1553, simple_loss=0.2215, pruned_loss=0.04453, over 4864.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2405, pruned_loss=0.0553, over 970479.17 frames.], batch size: 32, lr: 7.18e-04 +2022-05-04 05:03:04,381 INFO [train.py:715] (7/8) Epoch 2, batch 8300, loss[loss=0.1511, simple_loss=0.2254, pruned_loss=0.03841, over 4785.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2391, pruned_loss=0.05427, over 970678.34 frames.], batch size: 17, lr: 7.18e-04 +2022-05-04 05:03:46,076 INFO [train.py:715] (7/8) Epoch 2, batch 8350, loss[loss=0.1491, simple_loss=0.2168, pruned_loss=0.0407, over 4975.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2391, pruned_loss=0.05418, over 972016.20 frames.], batch size: 28, lr: 7.18e-04 +2022-05-04 05:04:26,326 INFO [train.py:715] (7/8) Epoch 2, batch 8400, loss[loss=0.1771, simple_loss=0.2376, pruned_loss=0.05828, over 4820.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2394, pruned_loss=0.05412, over 971964.75 frames.], batch size: 13, lr: 7.18e-04 +2022-05-04 05:05:07,477 INFO [train.py:715] (7/8) Epoch 2, batch 8450, loss[loss=0.2227, simple_loss=0.2762, pruned_loss=0.08457, over 4989.00 frames.], tot_loss[loss=0.1741, simple_loss=0.24, pruned_loss=0.05416, over 972401.12 frames.], batch size: 26, lr: 7.17e-04 +2022-05-04 05:05:49,594 INFO [train.py:715] (7/8) Epoch 2, batch 8500, loss[loss=0.1883, simple_loss=0.2543, pruned_loss=0.06117, over 4927.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2403, pruned_loss=0.05434, over 973222.09 frames.], batch size: 39, lr: 7.17e-04 +2022-05-04 05:06:29,762 INFO [train.py:715] (7/8) Epoch 2, batch 8550, loss[loss=0.1888, simple_loss=0.2558, pruned_loss=0.06093, over 4968.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2395, pruned_loss=0.05405, over 972891.36 frames.], batch size: 28, lr: 7.17e-04 +2022-05-04 05:07:10,946 INFO [train.py:715] (7/8) Epoch 2, batch 8600, loss[loss=0.1265, simple_loss=0.1961, pruned_loss=0.02847, over 4728.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2395, pruned_loss=0.05403, over 973189.37 frames.], batch size: 12, lr: 7.17e-04 +2022-05-04 05:07:52,997 INFO [train.py:715] (7/8) Epoch 2, batch 8650, loss[loss=0.1464, simple_loss=0.2259, pruned_loss=0.03347, over 4864.00 frames.], tot_loss[loss=0.1731, simple_loss=0.2391, pruned_loss=0.05359, over 972914.41 frames.], batch size: 20, lr: 7.16e-04 +2022-05-04 05:08:34,289 INFO [train.py:715] (7/8) Epoch 2, batch 8700, loss[loss=0.1369, simple_loss=0.2123, pruned_loss=0.03076, over 4820.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2389, pruned_loss=0.05313, over 973142.14 frames.], batch size: 27, lr: 7.16e-04 +2022-05-04 05:09:14,832 INFO [train.py:715] (7/8) Epoch 2, batch 8750, loss[loss=0.1583, simple_loss=0.2371, pruned_loss=0.03977, over 4814.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2388, pruned_loss=0.05337, over 973280.80 frames.], batch size: 27, lr: 7.16e-04 +2022-05-04 05:09:56,631 INFO [train.py:715] (7/8) Epoch 2, batch 8800, loss[loss=0.1496, simple_loss=0.2179, pruned_loss=0.04068, over 4821.00 frames.], tot_loss[loss=0.174, simple_loss=0.2396, pruned_loss=0.05416, over 972499.93 frames.], batch size: 13, lr: 7.16e-04 +2022-05-04 05:10:38,736 INFO [train.py:715] (7/8) Epoch 2, batch 8850, loss[loss=0.1718, simple_loss=0.2427, pruned_loss=0.05044, over 4979.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2401, pruned_loss=0.05437, over 971753.37 frames.], batch size: 24, lr: 7.15e-04 +2022-05-04 05:11:18,695 INFO [train.py:715] (7/8) Epoch 2, batch 8900, loss[loss=0.1471, simple_loss=0.2114, pruned_loss=0.04139, over 4858.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2401, pruned_loss=0.05437, over 972574.62 frames.], batch size: 32, lr: 7.15e-04 +2022-05-04 05:12:00,193 INFO [train.py:715] (7/8) Epoch 2, batch 8950, loss[loss=0.1693, simple_loss=0.2433, pruned_loss=0.04762, over 4766.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2406, pruned_loss=0.05463, over 973149.64 frames.], batch size: 14, lr: 7.15e-04 +2022-05-04 05:12:42,402 INFO [train.py:715] (7/8) Epoch 2, batch 9000, loss[loss=0.1909, simple_loss=0.2533, pruned_loss=0.0642, over 4805.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2402, pruned_loss=0.05429, over 972497.61 frames.], batch size: 24, lr: 7.15e-04 +2022-05-04 05:12:42,403 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 05:12:58,992 INFO [train.py:742] (7/8) Epoch 2, validation: loss=0.1181, simple_loss=0.2047, pruned_loss=0.01572, over 914524.00 frames. +2022-05-04 05:13:41,066 INFO [train.py:715] (7/8) Epoch 2, batch 9050, loss[loss=0.2029, simple_loss=0.2677, pruned_loss=0.06905, over 4833.00 frames.], tot_loss[loss=0.1752, simple_loss=0.241, pruned_loss=0.05472, over 973123.50 frames.], batch size: 26, lr: 7.15e-04 +2022-05-04 05:14:21,246 INFO [train.py:715] (7/8) Epoch 2, batch 9100, loss[loss=0.167, simple_loss=0.2383, pruned_loss=0.04782, over 4960.00 frames.], tot_loss[loss=0.1755, simple_loss=0.2409, pruned_loss=0.05501, over 972852.34 frames.], batch size: 14, lr: 7.14e-04 +2022-05-04 05:15:02,340 INFO [train.py:715] (7/8) Epoch 2, batch 9150, loss[loss=0.2169, simple_loss=0.2681, pruned_loss=0.08286, over 4704.00 frames.], tot_loss[loss=0.1745, simple_loss=0.24, pruned_loss=0.05448, over 972244.74 frames.], batch size: 15, lr: 7.14e-04 +2022-05-04 05:15:43,582 INFO [train.py:715] (7/8) Epoch 2, batch 9200, loss[loss=0.16, simple_loss=0.224, pruned_loss=0.04798, over 4905.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2397, pruned_loss=0.05466, over 972171.51 frames.], batch size: 22, lr: 7.14e-04 +2022-05-04 05:16:25,106 INFO [train.py:715] (7/8) Epoch 2, batch 9250, loss[loss=0.1405, simple_loss=0.2086, pruned_loss=0.03625, over 4980.00 frames.], tot_loss[loss=0.175, simple_loss=0.2405, pruned_loss=0.05476, over 972166.69 frames.], batch size: 25, lr: 7.14e-04 +2022-05-04 05:17:05,074 INFO [train.py:715] (7/8) Epoch 2, batch 9300, loss[loss=0.1504, simple_loss=0.2229, pruned_loss=0.03901, over 4870.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2405, pruned_loss=0.05446, over 972546.66 frames.], batch size: 22, lr: 7.13e-04 +2022-05-04 05:17:46,770 INFO [train.py:715] (7/8) Epoch 2, batch 9350, loss[loss=0.1701, simple_loss=0.2314, pruned_loss=0.05444, over 4766.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2406, pruned_loss=0.05449, over 972228.68 frames.], batch size: 19, lr: 7.13e-04 +2022-05-04 05:18:28,859 INFO [train.py:715] (7/8) Epoch 2, batch 9400, loss[loss=0.1484, simple_loss=0.2169, pruned_loss=0.03996, over 4810.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2401, pruned_loss=0.05409, over 972038.86 frames.], batch size: 25, lr: 7.13e-04 +2022-05-04 05:19:08,501 INFO [train.py:715] (7/8) Epoch 2, batch 9450, loss[loss=0.1648, simple_loss=0.2457, pruned_loss=0.04195, over 4807.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2401, pruned_loss=0.0535, over 972666.46 frames.], batch size: 25, lr: 7.13e-04 +2022-05-04 05:19:48,361 INFO [train.py:715] (7/8) Epoch 2, batch 9500, loss[loss=0.1609, simple_loss=0.2281, pruned_loss=0.04682, over 4794.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2399, pruned_loss=0.05365, over 973051.87 frames.], batch size: 21, lr: 7.13e-04 +2022-05-04 05:20:28,634 INFO [train.py:715] (7/8) Epoch 2, batch 9550, loss[loss=0.1584, simple_loss=0.2181, pruned_loss=0.0493, over 4814.00 frames.], tot_loss[loss=0.1739, simple_loss=0.2402, pruned_loss=0.05377, over 972834.00 frames.], batch size: 27, lr: 7.12e-04 +2022-05-04 05:21:08,641 INFO [train.py:715] (7/8) Epoch 2, batch 9600, loss[loss=0.1819, simple_loss=0.2386, pruned_loss=0.06264, over 4882.00 frames.], tot_loss[loss=0.173, simple_loss=0.2394, pruned_loss=0.05334, over 972956.19 frames.], batch size: 19, lr: 7.12e-04 +2022-05-04 05:21:47,541 INFO [train.py:715] (7/8) Epoch 2, batch 9650, loss[loss=0.1969, simple_loss=0.2619, pruned_loss=0.06592, over 4865.00 frames.], tot_loss[loss=0.174, simple_loss=0.2402, pruned_loss=0.05384, over 973863.79 frames.], batch size: 20, lr: 7.12e-04 +2022-05-04 05:22:27,781 INFO [train.py:715] (7/8) Epoch 2, batch 9700, loss[loss=0.1908, simple_loss=0.2694, pruned_loss=0.05606, over 4822.00 frames.], tot_loss[loss=0.174, simple_loss=0.2401, pruned_loss=0.05393, over 973323.59 frames.], batch size: 27, lr: 7.12e-04 +2022-05-04 05:23:08,412 INFO [train.py:715] (7/8) Epoch 2, batch 9750, loss[loss=0.1955, simple_loss=0.2599, pruned_loss=0.06549, over 4969.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2398, pruned_loss=0.05368, over 972358.08 frames.], batch size: 35, lr: 7.11e-04 +2022-05-04 05:23:47,698 INFO [train.py:715] (7/8) Epoch 2, batch 9800, loss[loss=0.1407, simple_loss=0.2063, pruned_loss=0.0375, over 4750.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2398, pruned_loss=0.05375, over 972638.13 frames.], batch size: 16, lr: 7.11e-04 +2022-05-04 05:24:26,799 INFO [train.py:715] (7/8) Epoch 2, batch 9850, loss[loss=0.1727, simple_loss=0.2511, pruned_loss=0.04719, over 4907.00 frames.], tot_loss[loss=0.1734, simple_loss=0.2395, pruned_loss=0.05366, over 972812.19 frames.], batch size: 17, lr: 7.11e-04 +2022-05-04 05:25:06,820 INFO [train.py:715] (7/8) Epoch 2, batch 9900, loss[loss=0.159, simple_loss=0.2123, pruned_loss=0.05287, over 4803.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2381, pruned_loss=0.05275, over 972578.97 frames.], batch size: 13, lr: 7.11e-04 +2022-05-04 05:25:46,412 INFO [train.py:715] (7/8) Epoch 2, batch 9950, loss[loss=0.2245, simple_loss=0.2823, pruned_loss=0.08341, over 4955.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2385, pruned_loss=0.05287, over 972935.69 frames.], batch size: 15, lr: 7.11e-04 +2022-05-04 05:26:25,434 INFO [train.py:715] (7/8) Epoch 2, batch 10000, loss[loss=0.1697, simple_loss=0.2499, pruned_loss=0.04471, over 4741.00 frames.], tot_loss[loss=0.1726, simple_loss=0.239, pruned_loss=0.05306, over 972510.92 frames.], batch size: 16, lr: 7.10e-04 +2022-05-04 05:27:06,104 INFO [train.py:715] (7/8) Epoch 2, batch 10050, loss[loss=0.1502, simple_loss=0.2214, pruned_loss=0.03954, over 4957.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2391, pruned_loss=0.05318, over 972501.11 frames.], batch size: 35, lr: 7.10e-04 +2022-05-04 05:27:45,913 INFO [train.py:715] (7/8) Epoch 2, batch 10100, loss[loss=0.1878, simple_loss=0.2486, pruned_loss=0.0635, over 4765.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2388, pruned_loss=0.0529, over 971907.04 frames.], batch size: 12, lr: 7.10e-04 +2022-05-04 05:28:25,925 INFO [train.py:715] (7/8) Epoch 2, batch 10150, loss[loss=0.169, simple_loss=0.2426, pruned_loss=0.04769, over 4805.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2386, pruned_loss=0.05247, over 971706.00 frames.], batch size: 12, lr: 7.10e-04 +2022-05-04 05:29:06,176 INFO [train.py:715] (7/8) Epoch 2, batch 10200, loss[loss=0.1448, simple_loss=0.2154, pruned_loss=0.0371, over 4876.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2391, pruned_loss=0.05287, over 971954.69 frames.], batch size: 20, lr: 7.09e-04 +2022-05-04 05:29:47,602 INFO [train.py:715] (7/8) Epoch 2, batch 10250, loss[loss=0.1909, simple_loss=0.2611, pruned_loss=0.06033, over 4966.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2394, pruned_loss=0.05287, over 972286.05 frames.], batch size: 15, lr: 7.09e-04 +2022-05-04 05:30:27,424 INFO [train.py:715] (7/8) Epoch 2, batch 10300, loss[loss=0.1573, simple_loss=0.2245, pruned_loss=0.04502, over 4754.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2401, pruned_loss=0.05326, over 972076.18 frames.], batch size: 16, lr: 7.09e-04 +2022-05-04 05:31:07,039 INFO [train.py:715] (7/8) Epoch 2, batch 10350, loss[loss=0.1739, simple_loss=0.2313, pruned_loss=0.05831, over 4855.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2399, pruned_loss=0.05322, over 972208.61 frames.], batch size: 32, lr: 7.09e-04 +2022-05-04 05:31:49,855 INFO [train.py:715] (7/8) Epoch 2, batch 10400, loss[loss=0.1862, simple_loss=0.2406, pruned_loss=0.06589, over 4984.00 frames.], tot_loss[loss=0.1739, simple_loss=0.2404, pruned_loss=0.05371, over 971624.94 frames.], batch size: 25, lr: 7.09e-04 +2022-05-04 05:32:31,024 INFO [train.py:715] (7/8) Epoch 2, batch 10450, loss[loss=0.1427, simple_loss=0.2099, pruned_loss=0.03772, over 4940.00 frames.], tot_loss[loss=0.1732, simple_loss=0.2396, pruned_loss=0.05342, over 971730.66 frames.], batch size: 23, lr: 7.08e-04 +2022-05-04 05:33:11,277 INFO [train.py:715] (7/8) Epoch 2, batch 10500, loss[loss=0.175, simple_loss=0.242, pruned_loss=0.05403, over 4791.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2389, pruned_loss=0.05323, over 972046.66 frames.], batch size: 17, lr: 7.08e-04 +2022-05-04 05:33:50,626 INFO [train.py:715] (7/8) Epoch 2, batch 10550, loss[loss=0.1703, simple_loss=0.2441, pruned_loss=0.04828, over 4865.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2394, pruned_loss=0.0531, over 972657.58 frames.], batch size: 20, lr: 7.08e-04 +2022-05-04 05:34:31,851 INFO [train.py:715] (7/8) Epoch 2, batch 10600, loss[loss=0.1678, simple_loss=0.2265, pruned_loss=0.05449, over 4984.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2385, pruned_loss=0.05258, over 972422.52 frames.], batch size: 31, lr: 7.08e-04 +2022-05-04 05:35:12,042 INFO [train.py:715] (7/8) Epoch 2, batch 10650, loss[loss=0.1886, simple_loss=0.2609, pruned_loss=0.05817, over 4826.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2392, pruned_loss=0.05277, over 972404.06 frames.], batch size: 26, lr: 7.07e-04 +2022-05-04 05:35:51,939 INFO [train.py:715] (7/8) Epoch 2, batch 10700, loss[loss=0.1955, simple_loss=0.2578, pruned_loss=0.06656, over 4767.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2393, pruned_loss=0.0532, over 973423.25 frames.], batch size: 14, lr: 7.07e-04 +2022-05-04 05:36:32,507 INFO [train.py:715] (7/8) Epoch 2, batch 10750, loss[loss=0.1548, simple_loss=0.2134, pruned_loss=0.04806, over 4969.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2392, pruned_loss=0.0533, over 973012.89 frames.], batch size: 15, lr: 7.07e-04 +2022-05-04 05:37:13,630 INFO [train.py:715] (7/8) Epoch 2, batch 10800, loss[loss=0.1544, simple_loss=0.2299, pruned_loss=0.0395, over 4838.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2385, pruned_loss=0.05296, over 972944.85 frames.], batch size: 20, lr: 7.07e-04 +2022-05-04 05:37:53,812 INFO [train.py:715] (7/8) Epoch 2, batch 10850, loss[loss=0.1674, simple_loss=0.2378, pruned_loss=0.04851, over 4938.00 frames.], tot_loss[loss=0.1731, simple_loss=0.2394, pruned_loss=0.05343, over 972126.69 frames.], batch size: 23, lr: 7.07e-04 +2022-05-04 05:38:33,325 INFO [train.py:715] (7/8) Epoch 2, batch 10900, loss[loss=0.1676, simple_loss=0.2344, pruned_loss=0.05036, over 4913.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2388, pruned_loss=0.05293, over 971286.75 frames.], batch size: 17, lr: 7.06e-04 +2022-05-04 05:39:14,363 INFO [train.py:715] (7/8) Epoch 2, batch 10950, loss[loss=0.1704, simple_loss=0.2463, pruned_loss=0.04722, over 4816.00 frames.], tot_loss[loss=0.1725, simple_loss=0.239, pruned_loss=0.05301, over 971906.27 frames.], batch size: 25, lr: 7.06e-04 +2022-05-04 05:39:54,172 INFO [train.py:715] (7/8) Epoch 2, batch 11000, loss[loss=0.1567, simple_loss=0.2296, pruned_loss=0.04186, over 4760.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2384, pruned_loss=0.05291, over 972076.77 frames.], batch size: 18, lr: 7.06e-04 +2022-05-04 05:40:33,760 INFO [train.py:715] (7/8) Epoch 2, batch 11050, loss[loss=0.1476, simple_loss=0.2174, pruned_loss=0.03886, over 4784.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2384, pruned_loss=0.05311, over 971860.68 frames.], batch size: 17, lr: 7.06e-04 +2022-05-04 05:41:14,437 INFO [train.py:715] (7/8) Epoch 2, batch 11100, loss[loss=0.2069, simple_loss=0.2576, pruned_loss=0.07805, over 4921.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2383, pruned_loss=0.05344, over 972226.04 frames.], batch size: 39, lr: 7.05e-04 +2022-05-04 05:41:54,870 INFO [train.py:715] (7/8) Epoch 2, batch 11150, loss[loss=0.2372, simple_loss=0.2953, pruned_loss=0.08952, over 4898.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2397, pruned_loss=0.05446, over 972457.39 frames.], batch size: 19, lr: 7.05e-04 +2022-05-04 05:42:35,647 INFO [train.py:715] (7/8) Epoch 2, batch 11200, loss[loss=0.1832, simple_loss=0.2479, pruned_loss=0.05925, over 4817.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2396, pruned_loss=0.05431, over 972666.44 frames.], batch size: 27, lr: 7.05e-04 +2022-05-04 05:43:15,662 INFO [train.py:715] (7/8) Epoch 2, batch 11250, loss[loss=0.1493, simple_loss=0.2189, pruned_loss=0.03986, over 4851.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2399, pruned_loss=0.05424, over 973066.57 frames.], batch size: 20, lr: 7.05e-04 +2022-05-04 05:43:56,720 INFO [train.py:715] (7/8) Epoch 2, batch 11300, loss[loss=0.2196, simple_loss=0.2745, pruned_loss=0.08241, over 4929.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2403, pruned_loss=0.05439, over 972653.15 frames.], batch size: 18, lr: 7.05e-04 +2022-05-04 05:44:37,065 INFO [train.py:715] (7/8) Epoch 2, batch 11350, loss[loss=0.1632, simple_loss=0.2233, pruned_loss=0.05159, over 4864.00 frames.], tot_loss[loss=0.1729, simple_loss=0.239, pruned_loss=0.05342, over 972102.43 frames.], batch size: 32, lr: 7.04e-04 +2022-05-04 05:45:16,687 INFO [train.py:715] (7/8) Epoch 2, batch 11400, loss[loss=0.1627, simple_loss=0.2353, pruned_loss=0.04502, over 4870.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2388, pruned_loss=0.05334, over 971903.72 frames.], batch size: 20, lr: 7.04e-04 +2022-05-04 05:45:56,743 INFO [train.py:715] (7/8) Epoch 2, batch 11450, loss[loss=0.177, simple_loss=0.2462, pruned_loss=0.05387, over 4864.00 frames.], tot_loss[loss=0.172, simple_loss=0.2384, pruned_loss=0.05276, over 971831.12 frames.], batch size: 20, lr: 7.04e-04 +2022-05-04 05:46:37,331 INFO [train.py:715] (7/8) Epoch 2, batch 11500, loss[loss=0.1677, simple_loss=0.2306, pruned_loss=0.05241, over 4891.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2374, pruned_loss=0.0522, over 971288.67 frames.], batch size: 38, lr: 7.04e-04 +2022-05-04 05:47:18,054 INFO [train.py:715] (7/8) Epoch 2, batch 11550, loss[loss=0.2007, simple_loss=0.2617, pruned_loss=0.06989, over 4917.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2374, pruned_loss=0.05218, over 971449.11 frames.], batch size: 23, lr: 7.04e-04 +2022-05-04 05:47:58,027 INFO [train.py:715] (7/8) Epoch 2, batch 11600, loss[loss=0.1688, simple_loss=0.2355, pruned_loss=0.05104, over 4989.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2368, pruned_loss=0.05185, over 972119.87 frames.], batch size: 27, lr: 7.03e-04 +2022-05-04 05:48:39,181 INFO [train.py:715] (7/8) Epoch 2, batch 11650, loss[loss=0.1446, simple_loss=0.2247, pruned_loss=0.03222, over 4981.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2374, pruned_loss=0.05213, over 972976.17 frames.], batch size: 25, lr: 7.03e-04 +2022-05-04 05:49:19,427 INFO [train.py:715] (7/8) Epoch 2, batch 11700, loss[loss=0.162, simple_loss=0.2233, pruned_loss=0.05031, over 4866.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2366, pruned_loss=0.05148, over 972440.92 frames.], batch size: 16, lr: 7.03e-04 +2022-05-04 05:49:59,625 INFO [train.py:715] (7/8) Epoch 2, batch 11750, loss[loss=0.1703, simple_loss=0.2537, pruned_loss=0.0435, over 4796.00 frames.], tot_loss[loss=0.1701, simple_loss=0.2371, pruned_loss=0.05158, over 971767.75 frames.], batch size: 24, lr: 7.03e-04 +2022-05-04 05:50:40,410 INFO [train.py:715] (7/8) Epoch 2, batch 11800, loss[loss=0.1536, simple_loss=0.2341, pruned_loss=0.03649, over 4937.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2367, pruned_loss=0.05101, over 972688.22 frames.], batch size: 29, lr: 7.02e-04 +2022-05-04 05:51:20,992 INFO [train.py:715] (7/8) Epoch 2, batch 11850, loss[loss=0.2209, simple_loss=0.278, pruned_loss=0.08192, over 4849.00 frames.], tot_loss[loss=0.17, simple_loss=0.2373, pruned_loss=0.05134, over 971869.07 frames.], batch size: 32, lr: 7.02e-04 +2022-05-04 05:52:00,407 INFO [train.py:715] (7/8) Epoch 2, batch 11900, loss[loss=0.1694, simple_loss=0.2453, pruned_loss=0.04682, over 4941.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2377, pruned_loss=0.05179, over 971783.93 frames.], batch size: 21, lr: 7.02e-04 +2022-05-04 05:52:40,337 INFO [train.py:715] (7/8) Epoch 2, batch 11950, loss[loss=0.1953, simple_loss=0.2593, pruned_loss=0.06571, over 4864.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2371, pruned_loss=0.05181, over 971439.63 frames.], batch size: 20, lr: 7.02e-04 +2022-05-04 05:53:21,662 INFO [train.py:715] (7/8) Epoch 2, batch 12000, loss[loss=0.1522, simple_loss=0.2267, pruned_loss=0.03885, over 4794.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2381, pruned_loss=0.05207, over 972019.04 frames.], batch size: 21, lr: 7.02e-04 +2022-05-04 05:53:21,663 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 05:53:45,623 INFO [train.py:742] (7/8) Epoch 2, validation: loss=0.1181, simple_loss=0.2049, pruned_loss=0.01568, over 914524.00 frames. +2022-05-04 05:54:27,031 INFO [train.py:715] (7/8) Epoch 2, batch 12050, loss[loss=0.1692, simple_loss=0.2438, pruned_loss=0.04729, over 4801.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2386, pruned_loss=0.05251, over 971593.01 frames.], batch size: 25, lr: 7.01e-04 +2022-05-04 05:55:07,120 INFO [train.py:715] (7/8) Epoch 2, batch 12100, loss[loss=0.1742, simple_loss=0.2333, pruned_loss=0.05755, over 4766.00 frames.], tot_loss[loss=0.173, simple_loss=0.2395, pruned_loss=0.05325, over 971812.11 frames.], batch size: 18, lr: 7.01e-04 +2022-05-04 05:55:47,109 INFO [train.py:715] (7/8) Epoch 2, batch 12150, loss[loss=0.2117, simple_loss=0.2662, pruned_loss=0.0786, over 4745.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2396, pruned_loss=0.05386, over 972035.52 frames.], batch size: 19, lr: 7.01e-04 +2022-05-04 05:56:27,812 INFO [train.py:715] (7/8) Epoch 2, batch 12200, loss[loss=0.1906, simple_loss=0.262, pruned_loss=0.05964, over 4865.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2385, pruned_loss=0.05296, over 971409.83 frames.], batch size: 38, lr: 7.01e-04 +2022-05-04 05:57:07,983 INFO [train.py:715] (7/8) Epoch 2, batch 12250, loss[loss=0.2019, simple_loss=0.277, pruned_loss=0.06333, over 4762.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2387, pruned_loss=0.05279, over 972287.20 frames.], batch size: 14, lr: 7.01e-04 +2022-05-04 05:57:48,419 INFO [train.py:715] (7/8) Epoch 2, batch 12300, loss[loss=0.1725, simple_loss=0.2384, pruned_loss=0.05326, over 4870.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2391, pruned_loss=0.05312, over 972100.44 frames.], batch size: 17, lr: 7.00e-04 +2022-05-04 05:58:28,544 INFO [train.py:715] (7/8) Epoch 2, batch 12350, loss[loss=0.1743, simple_loss=0.2315, pruned_loss=0.05853, over 4815.00 frames.], tot_loss[loss=0.1728, simple_loss=0.239, pruned_loss=0.05324, over 972562.15 frames.], batch size: 14, lr: 7.00e-04 +2022-05-04 05:59:09,760 INFO [train.py:715] (7/8) Epoch 2, batch 12400, loss[loss=0.1876, simple_loss=0.2626, pruned_loss=0.05633, over 4941.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2382, pruned_loss=0.05276, over 971985.31 frames.], batch size: 21, lr: 7.00e-04 +2022-05-04 05:59:50,019 INFO [train.py:715] (7/8) Epoch 2, batch 12450, loss[loss=0.1613, simple_loss=0.2311, pruned_loss=0.04582, over 4704.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2373, pruned_loss=0.05249, over 972192.85 frames.], batch size: 15, lr: 7.00e-04 +2022-05-04 06:00:29,875 INFO [train.py:715] (7/8) Epoch 2, batch 12500, loss[loss=0.1536, simple_loss=0.2318, pruned_loss=0.03769, over 4922.00 frames.], tot_loss[loss=0.171, simple_loss=0.2373, pruned_loss=0.05239, over 973165.06 frames.], batch size: 23, lr: 6.99e-04 +2022-05-04 06:01:10,539 INFO [train.py:715] (7/8) Epoch 2, batch 12550, loss[loss=0.1721, simple_loss=0.2362, pruned_loss=0.05402, over 4983.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2372, pruned_loss=0.05207, over 973223.64 frames.], batch size: 28, lr: 6.99e-04 +2022-05-04 06:01:50,874 INFO [train.py:715] (7/8) Epoch 2, batch 12600, loss[loss=0.1777, simple_loss=0.2434, pruned_loss=0.05602, over 4806.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2385, pruned_loss=0.05305, over 973319.61 frames.], batch size: 24, lr: 6.99e-04 +2022-05-04 06:02:30,895 INFO [train.py:715] (7/8) Epoch 2, batch 12650, loss[loss=0.1842, simple_loss=0.2598, pruned_loss=0.05433, over 4832.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2391, pruned_loss=0.0534, over 972710.98 frames.], batch size: 25, lr: 6.99e-04 +2022-05-04 06:03:11,021 INFO [train.py:715] (7/8) Epoch 2, batch 12700, loss[loss=0.1873, simple_loss=0.2592, pruned_loss=0.05772, over 4855.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2396, pruned_loss=0.05396, over 973337.33 frames.], batch size: 20, lr: 6.99e-04 +2022-05-04 06:03:51,757 INFO [train.py:715] (7/8) Epoch 2, batch 12750, loss[loss=0.1442, simple_loss=0.2126, pruned_loss=0.03783, over 4750.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2387, pruned_loss=0.0535, over 972997.59 frames.], batch size: 14, lr: 6.98e-04 +2022-05-04 06:04:31,920 INFO [train.py:715] (7/8) Epoch 2, batch 12800, loss[loss=0.2139, simple_loss=0.2683, pruned_loss=0.07975, over 4766.00 frames.], tot_loss[loss=0.1734, simple_loss=0.2393, pruned_loss=0.05375, over 973050.67 frames.], batch size: 19, lr: 6.98e-04 +2022-05-04 06:05:11,608 INFO [train.py:715] (7/8) Epoch 2, batch 12850, loss[loss=0.1495, simple_loss=0.225, pruned_loss=0.03702, over 4893.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2389, pruned_loss=0.05385, over 973264.96 frames.], batch size: 19, lr: 6.98e-04 +2022-05-04 06:05:52,438 INFO [train.py:715] (7/8) Epoch 2, batch 12900, loss[loss=0.1688, simple_loss=0.2371, pruned_loss=0.05021, over 4955.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2382, pruned_loss=0.0536, over 973105.26 frames.], batch size: 35, lr: 6.98e-04 +2022-05-04 06:06:32,857 INFO [train.py:715] (7/8) Epoch 2, batch 12950, loss[loss=0.1487, simple_loss=0.216, pruned_loss=0.04071, over 4893.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2383, pruned_loss=0.05354, over 973198.36 frames.], batch size: 22, lr: 6.98e-04 +2022-05-04 06:07:12,810 INFO [train.py:715] (7/8) Epoch 2, batch 13000, loss[loss=0.2139, simple_loss=0.2674, pruned_loss=0.08016, over 4945.00 frames.], tot_loss[loss=0.174, simple_loss=0.2396, pruned_loss=0.05424, over 972733.96 frames.], batch size: 29, lr: 6.97e-04 +2022-05-04 06:07:53,255 INFO [train.py:715] (7/8) Epoch 2, batch 13050, loss[loss=0.1463, simple_loss=0.2158, pruned_loss=0.03838, over 4863.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2403, pruned_loss=0.0541, over 972680.85 frames.], batch size: 13, lr: 6.97e-04 +2022-05-04 06:08:34,494 INFO [train.py:715] (7/8) Epoch 2, batch 13100, loss[loss=0.1697, simple_loss=0.2386, pruned_loss=0.05043, over 4775.00 frames.], tot_loss[loss=0.175, simple_loss=0.2411, pruned_loss=0.05447, over 972078.86 frames.], batch size: 18, lr: 6.97e-04 +2022-05-04 06:09:14,677 INFO [train.py:715] (7/8) Epoch 2, batch 13150, loss[loss=0.2043, simple_loss=0.2622, pruned_loss=0.07323, over 4899.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2388, pruned_loss=0.05315, over 972585.14 frames.], batch size: 18, lr: 6.97e-04 +2022-05-04 06:09:54,439 INFO [train.py:715] (7/8) Epoch 2, batch 13200, loss[loss=0.1924, simple_loss=0.2515, pruned_loss=0.06663, over 4902.00 frames.], tot_loss[loss=0.1718, simple_loss=0.238, pruned_loss=0.05278, over 972824.78 frames.], batch size: 18, lr: 6.96e-04 +2022-05-04 06:10:35,330 INFO [train.py:715] (7/8) Epoch 2, batch 13250, loss[loss=0.1689, simple_loss=0.234, pruned_loss=0.05195, over 4816.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2376, pruned_loss=0.05208, over 972833.23 frames.], batch size: 12, lr: 6.96e-04 +2022-05-04 06:11:15,869 INFO [train.py:715] (7/8) Epoch 2, batch 13300, loss[loss=0.2113, simple_loss=0.2654, pruned_loss=0.07863, over 4919.00 frames.], tot_loss[loss=0.1716, simple_loss=0.238, pruned_loss=0.05264, over 972655.94 frames.], batch size: 29, lr: 6.96e-04 +2022-05-04 06:11:55,919 INFO [train.py:715] (7/8) Epoch 2, batch 13350, loss[loss=0.167, simple_loss=0.2325, pruned_loss=0.05071, over 4948.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2382, pruned_loss=0.05247, over 973230.08 frames.], batch size: 35, lr: 6.96e-04 +2022-05-04 06:12:36,498 INFO [train.py:715] (7/8) Epoch 2, batch 13400, loss[loss=0.1923, simple_loss=0.2592, pruned_loss=0.06272, over 4973.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2388, pruned_loss=0.05271, over 973459.63 frames.], batch size: 24, lr: 6.96e-04 +2022-05-04 06:13:17,583 INFO [train.py:715] (7/8) Epoch 2, batch 13450, loss[loss=0.1675, simple_loss=0.233, pruned_loss=0.05102, over 4897.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2389, pruned_loss=0.05309, over 973308.64 frames.], batch size: 19, lr: 6.95e-04 +2022-05-04 06:13:57,536 INFO [train.py:715] (7/8) Epoch 2, batch 13500, loss[loss=0.2195, simple_loss=0.2723, pruned_loss=0.08332, over 4813.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2398, pruned_loss=0.05338, over 973029.56 frames.], batch size: 25, lr: 6.95e-04 +2022-05-04 06:14:37,542 INFO [train.py:715] (7/8) Epoch 2, batch 13550, loss[loss=0.1566, simple_loss=0.2275, pruned_loss=0.04287, over 4844.00 frames.], tot_loss[loss=0.1731, simple_loss=0.24, pruned_loss=0.05313, over 973174.35 frames.], batch size: 30, lr: 6.95e-04 +2022-05-04 06:15:18,688 INFO [train.py:715] (7/8) Epoch 2, batch 13600, loss[loss=0.2172, simple_loss=0.2817, pruned_loss=0.07637, over 4968.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2396, pruned_loss=0.05301, over 973258.55 frames.], batch size: 39, lr: 6.95e-04 +2022-05-04 06:15:59,132 INFO [train.py:715] (7/8) Epoch 2, batch 13650, loss[loss=0.1858, simple_loss=0.2618, pruned_loss=0.05487, over 4695.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2391, pruned_loss=0.05262, over 973334.16 frames.], batch size: 15, lr: 6.95e-04 +2022-05-04 06:16:38,698 INFO [train.py:715] (7/8) Epoch 2, batch 13700, loss[loss=0.1447, simple_loss=0.2072, pruned_loss=0.04111, over 4833.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2387, pruned_loss=0.05247, over 973740.57 frames.], batch size: 13, lr: 6.94e-04 +2022-05-04 06:17:19,963 INFO [train.py:715] (7/8) Epoch 2, batch 13750, loss[loss=0.1403, simple_loss=0.2117, pruned_loss=0.03446, over 4982.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2383, pruned_loss=0.05246, over 972177.40 frames.], batch size: 25, lr: 6.94e-04 +2022-05-04 06:18:00,041 INFO [train.py:715] (7/8) Epoch 2, batch 13800, loss[loss=0.1679, simple_loss=0.2243, pruned_loss=0.05573, over 4950.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2391, pruned_loss=0.05334, over 972809.69 frames.], batch size: 21, lr: 6.94e-04 +2022-05-04 06:18:39,732 INFO [train.py:715] (7/8) Epoch 2, batch 13850, loss[loss=0.1665, simple_loss=0.2296, pruned_loss=0.05175, over 4794.00 frames.], tot_loss[loss=0.1717, simple_loss=0.238, pruned_loss=0.05269, over 972083.46 frames.], batch size: 12, lr: 6.94e-04 +2022-05-04 06:19:19,328 INFO [train.py:715] (7/8) Epoch 2, batch 13900, loss[loss=0.1767, simple_loss=0.242, pruned_loss=0.05569, over 4859.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2372, pruned_loss=0.05197, over 972142.59 frames.], batch size: 32, lr: 6.94e-04 +2022-05-04 06:20:00,089 INFO [train.py:715] (7/8) Epoch 2, batch 13950, loss[loss=0.1689, simple_loss=0.2359, pruned_loss=0.05092, over 4972.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2372, pruned_loss=0.05191, over 971967.18 frames.], batch size: 24, lr: 6.93e-04 +2022-05-04 06:20:40,299 INFO [train.py:715] (7/8) Epoch 2, batch 14000, loss[loss=0.1558, simple_loss=0.2257, pruned_loss=0.04297, over 4972.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2376, pruned_loss=0.05182, over 972285.31 frames.], batch size: 15, lr: 6.93e-04 +2022-05-04 06:21:19,550 INFO [train.py:715] (7/8) Epoch 2, batch 14050, loss[loss=0.1653, simple_loss=0.2404, pruned_loss=0.04512, over 4815.00 frames.], tot_loss[loss=0.1714, simple_loss=0.2381, pruned_loss=0.05236, over 972068.76 frames.], batch size: 27, lr: 6.93e-04 +2022-05-04 06:22:01,054 INFO [train.py:715] (7/8) Epoch 2, batch 14100, loss[loss=0.1533, simple_loss=0.2134, pruned_loss=0.04661, over 4794.00 frames.], tot_loss[loss=0.1714, simple_loss=0.2381, pruned_loss=0.05235, over 972352.42 frames.], batch size: 12, lr: 6.93e-04 +2022-05-04 06:22:41,697 INFO [train.py:715] (7/8) Epoch 2, batch 14150, loss[loss=0.1829, simple_loss=0.2519, pruned_loss=0.05699, over 4783.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2376, pruned_loss=0.05195, over 971939.78 frames.], batch size: 18, lr: 6.93e-04 +2022-05-04 06:23:21,641 INFO [train.py:715] (7/8) Epoch 2, batch 14200, loss[loss=0.1692, simple_loss=0.2432, pruned_loss=0.04764, over 4814.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2373, pruned_loss=0.05185, over 972981.47 frames.], batch size: 27, lr: 6.92e-04 +2022-05-04 06:24:01,486 INFO [train.py:715] (7/8) Epoch 2, batch 14250, loss[loss=0.1892, simple_loss=0.2518, pruned_loss=0.0633, over 4927.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2366, pruned_loss=0.05149, over 973022.48 frames.], batch size: 39, lr: 6.92e-04 +2022-05-04 06:24:42,100 INFO [train.py:715] (7/8) Epoch 2, batch 14300, loss[loss=0.1677, simple_loss=0.2417, pruned_loss=0.04685, over 4958.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2361, pruned_loss=0.0509, over 973112.63 frames.], batch size: 21, lr: 6.92e-04 +2022-05-04 06:25:21,664 INFO [train.py:715] (7/8) Epoch 2, batch 14350, loss[loss=0.1845, simple_loss=0.256, pruned_loss=0.05655, over 4642.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2375, pruned_loss=0.05185, over 972275.59 frames.], batch size: 13, lr: 6.92e-04 +2022-05-04 06:26:01,523 INFO [train.py:715] (7/8) Epoch 2, batch 14400, loss[loss=0.1637, simple_loss=0.228, pruned_loss=0.04971, over 4853.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2371, pruned_loss=0.05168, over 972478.61 frames.], batch size: 32, lr: 6.92e-04 +2022-05-04 06:26:41,864 INFO [train.py:715] (7/8) Epoch 2, batch 14450, loss[loss=0.1669, simple_loss=0.2386, pruned_loss=0.04766, over 4873.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2381, pruned_loss=0.05224, over 971607.86 frames.], batch size: 20, lr: 6.91e-04 +2022-05-04 06:27:22,102 INFO [train.py:715] (7/8) Epoch 2, batch 14500, loss[loss=0.1513, simple_loss=0.224, pruned_loss=0.03932, over 4779.00 frames.], tot_loss[loss=0.172, simple_loss=0.2383, pruned_loss=0.05288, over 971661.46 frames.], batch size: 18, lr: 6.91e-04 +2022-05-04 06:28:01,693 INFO [train.py:715] (7/8) Epoch 2, batch 14550, loss[loss=0.1708, simple_loss=0.2216, pruned_loss=0.05998, over 4746.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2388, pruned_loss=0.05291, over 971999.37 frames.], batch size: 16, lr: 6.91e-04 +2022-05-04 06:28:42,173 INFO [train.py:715] (7/8) Epoch 2, batch 14600, loss[loss=0.1563, simple_loss=0.2118, pruned_loss=0.05039, over 4877.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2401, pruned_loss=0.0536, over 971540.25 frames.], batch size: 30, lr: 6.91e-04 +2022-05-04 06:29:22,666 INFO [train.py:715] (7/8) Epoch 2, batch 14650, loss[loss=0.1551, simple_loss=0.2148, pruned_loss=0.0477, over 4778.00 frames.], tot_loss[loss=0.1731, simple_loss=0.2396, pruned_loss=0.05332, over 970744.44 frames.], batch size: 12, lr: 6.90e-04 +2022-05-04 06:30:01,961 INFO [train.py:715] (7/8) Epoch 2, batch 14700, loss[loss=0.1545, simple_loss=0.2248, pruned_loss=0.04205, over 4937.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2385, pruned_loss=0.0527, over 971196.07 frames.], batch size: 18, lr: 6.90e-04 +2022-05-04 06:30:41,285 INFO [train.py:715] (7/8) Epoch 2, batch 14750, loss[loss=0.133, simple_loss=0.2025, pruned_loss=0.03179, over 4782.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2385, pruned_loss=0.05236, over 971297.11 frames.], batch size: 18, lr: 6.90e-04 +2022-05-04 06:31:21,772 INFO [train.py:715] (7/8) Epoch 2, batch 14800, loss[loss=0.1757, simple_loss=0.2392, pruned_loss=0.05614, over 4802.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2379, pruned_loss=0.05221, over 970997.71 frames.], batch size: 24, lr: 6.90e-04 +2022-05-04 06:32:01,274 INFO [train.py:715] (7/8) Epoch 2, batch 14850, loss[loss=0.1625, simple_loss=0.2312, pruned_loss=0.04689, over 4910.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2372, pruned_loss=0.05156, over 970483.27 frames.], batch size: 39, lr: 6.90e-04 +2022-05-04 06:32:40,952 INFO [train.py:715] (7/8) Epoch 2, batch 14900, loss[loss=0.1818, simple_loss=0.2454, pruned_loss=0.05915, over 4865.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2382, pruned_loss=0.05266, over 971137.68 frames.], batch size: 20, lr: 6.89e-04 +2022-05-04 06:33:21,123 INFO [train.py:715] (7/8) Epoch 2, batch 14950, loss[loss=0.2491, simple_loss=0.3082, pruned_loss=0.09501, over 4881.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2388, pruned_loss=0.0535, over 971665.84 frames.], batch size: 32, lr: 6.89e-04 +2022-05-04 06:34:01,762 INFO [train.py:715] (7/8) Epoch 2, batch 15000, loss[loss=0.1719, simple_loss=0.2342, pruned_loss=0.05483, over 4950.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2397, pruned_loss=0.05342, over 971528.60 frames.], batch size: 24, lr: 6.89e-04 +2022-05-04 06:34:01,763 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 06:34:11,142 INFO [train.py:742] (7/8) Epoch 2, validation: loss=0.1176, simple_loss=0.2043, pruned_loss=0.01548, over 914524.00 frames. +2022-05-04 06:34:52,068 INFO [train.py:715] (7/8) Epoch 2, batch 15050, loss[loss=0.1775, simple_loss=0.2475, pruned_loss=0.05374, over 4845.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2396, pruned_loss=0.05292, over 971744.23 frames.], batch size: 15, lr: 6.89e-04 +2022-05-04 06:35:31,188 INFO [train.py:715] (7/8) Epoch 2, batch 15100, loss[loss=0.1793, simple_loss=0.2516, pruned_loss=0.05345, over 4824.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2394, pruned_loss=0.05288, over 972305.10 frames.], batch size: 15, lr: 6.89e-04 +2022-05-04 06:36:11,675 INFO [train.py:715] (7/8) Epoch 2, batch 15150, loss[loss=0.1483, simple_loss=0.2272, pruned_loss=0.03473, over 4930.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2386, pruned_loss=0.05256, over 971968.33 frames.], batch size: 18, lr: 6.88e-04 +2022-05-04 06:36:52,160 INFO [train.py:715] (7/8) Epoch 2, batch 15200, loss[loss=0.1835, simple_loss=0.2534, pruned_loss=0.05685, over 4864.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2377, pruned_loss=0.05291, over 971351.53 frames.], batch size: 32, lr: 6.88e-04 +2022-05-04 06:37:31,889 INFO [train.py:715] (7/8) Epoch 2, batch 15250, loss[loss=0.1922, simple_loss=0.2477, pruned_loss=0.0684, over 4905.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2383, pruned_loss=0.05331, over 972062.68 frames.], batch size: 29, lr: 6.88e-04 +2022-05-04 06:38:11,352 INFO [train.py:715] (7/8) Epoch 2, batch 15300, loss[loss=0.1496, simple_loss=0.2251, pruned_loss=0.03707, over 4807.00 frames.], tot_loss[loss=0.1728, simple_loss=0.239, pruned_loss=0.05326, over 972085.24 frames.], batch size: 25, lr: 6.88e-04 +2022-05-04 06:38:51,805 INFO [train.py:715] (7/8) Epoch 2, batch 15350, loss[loss=0.2121, simple_loss=0.2746, pruned_loss=0.07475, over 4873.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2394, pruned_loss=0.05312, over 972121.12 frames.], batch size: 20, lr: 6.88e-04 +2022-05-04 06:39:32,698 INFO [train.py:715] (7/8) Epoch 2, batch 15400, loss[loss=0.14, simple_loss=0.2044, pruned_loss=0.03774, over 4834.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2388, pruned_loss=0.05314, over 971990.00 frames.], batch size: 15, lr: 6.87e-04 +2022-05-04 06:40:11,873 INFO [train.py:715] (7/8) Epoch 2, batch 15450, loss[loss=0.1958, simple_loss=0.2573, pruned_loss=0.06714, over 4934.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2376, pruned_loss=0.05272, over 972015.81 frames.], batch size: 39, lr: 6.87e-04 +2022-05-04 06:40:52,379 INFO [train.py:715] (7/8) Epoch 2, batch 15500, loss[loss=0.1968, simple_loss=0.2585, pruned_loss=0.06759, over 4874.00 frames.], tot_loss[loss=0.173, simple_loss=0.239, pruned_loss=0.05344, over 971903.36 frames.], batch size: 22, lr: 6.87e-04 +2022-05-04 06:41:32,627 INFO [train.py:715] (7/8) Epoch 2, batch 15550, loss[loss=0.157, simple_loss=0.227, pruned_loss=0.04356, over 4900.00 frames.], tot_loss[loss=0.1729, simple_loss=0.239, pruned_loss=0.05346, over 972046.21 frames.], batch size: 17, lr: 6.87e-04 +2022-05-04 06:42:12,566 INFO [train.py:715] (7/8) Epoch 2, batch 15600, loss[loss=0.1316, simple_loss=0.208, pruned_loss=0.02763, over 4947.00 frames.], tot_loss[loss=0.1734, simple_loss=0.2398, pruned_loss=0.05351, over 972169.37 frames.], batch size: 29, lr: 6.87e-04 +2022-05-04 06:42:52,378 INFO [train.py:715] (7/8) Epoch 2, batch 15650, loss[loss=0.1658, simple_loss=0.233, pruned_loss=0.04933, over 4772.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2391, pruned_loss=0.05328, over 971375.55 frames.], batch size: 17, lr: 6.86e-04 +2022-05-04 06:43:33,103 INFO [train.py:715] (7/8) Epoch 2, batch 15700, loss[loss=0.1732, simple_loss=0.239, pruned_loss=0.05366, over 4964.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2378, pruned_loss=0.05272, over 971503.60 frames.], batch size: 24, lr: 6.86e-04 +2022-05-04 06:44:13,633 INFO [train.py:715] (7/8) Epoch 2, batch 15750, loss[loss=0.1878, simple_loss=0.2442, pruned_loss=0.06568, over 4779.00 frames.], tot_loss[loss=0.172, simple_loss=0.238, pruned_loss=0.05307, over 970720.05 frames.], batch size: 17, lr: 6.86e-04 +2022-05-04 06:44:52,977 INFO [train.py:715] (7/8) Epoch 2, batch 15800, loss[loss=0.1365, simple_loss=0.2076, pruned_loss=0.03269, over 4786.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2374, pruned_loss=0.05306, over 971930.96 frames.], batch size: 18, lr: 6.86e-04 +2022-05-04 06:45:33,638 INFO [train.py:715] (7/8) Epoch 2, batch 15850, loss[loss=0.1597, simple_loss=0.237, pruned_loss=0.04122, over 4971.00 frames.], tot_loss[loss=0.1714, simple_loss=0.237, pruned_loss=0.05293, over 972440.45 frames.], batch size: 28, lr: 6.86e-04 +2022-05-04 06:46:14,117 INFO [train.py:715] (7/8) Epoch 2, batch 15900, loss[loss=0.154, simple_loss=0.2272, pruned_loss=0.04037, over 4786.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2373, pruned_loss=0.05269, over 971429.26 frames.], batch size: 14, lr: 6.85e-04 +2022-05-04 06:46:53,885 INFO [train.py:715] (7/8) Epoch 2, batch 15950, loss[loss=0.1881, simple_loss=0.2468, pruned_loss=0.06468, over 4699.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2362, pruned_loss=0.05213, over 971380.28 frames.], batch size: 15, lr: 6.85e-04 +2022-05-04 06:47:34,109 INFO [train.py:715] (7/8) Epoch 2, batch 16000, loss[loss=0.1927, simple_loss=0.2487, pruned_loss=0.06838, over 4873.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2366, pruned_loss=0.05225, over 971423.32 frames.], batch size: 22, lr: 6.85e-04 +2022-05-04 06:48:14,448 INFO [train.py:715] (7/8) Epoch 2, batch 16050, loss[loss=0.16, simple_loss=0.2214, pruned_loss=0.04927, over 4987.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2358, pruned_loss=0.05171, over 969985.10 frames.], batch size: 14, lr: 6.85e-04 +2022-05-04 06:48:54,898 INFO [train.py:715] (7/8) Epoch 2, batch 16100, loss[loss=0.1316, simple_loss=0.2118, pruned_loss=0.0257, over 4967.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2358, pruned_loss=0.05161, over 970868.35 frames.], batch size: 24, lr: 6.85e-04 +2022-05-04 06:49:34,161 INFO [train.py:715] (7/8) Epoch 2, batch 16150, loss[loss=0.1659, simple_loss=0.2399, pruned_loss=0.04594, over 4693.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2359, pruned_loss=0.05163, over 970669.89 frames.], batch size: 15, lr: 6.84e-04 +2022-05-04 06:50:14,551 INFO [train.py:715] (7/8) Epoch 2, batch 16200, loss[loss=0.2015, simple_loss=0.2619, pruned_loss=0.07049, over 4957.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2354, pruned_loss=0.05111, over 971398.76 frames.], batch size: 35, lr: 6.84e-04 +2022-05-04 06:50:54,956 INFO [train.py:715] (7/8) Epoch 2, batch 16250, loss[loss=0.1651, simple_loss=0.2267, pruned_loss=0.05179, over 4941.00 frames.], tot_loss[loss=0.1685, simple_loss=0.235, pruned_loss=0.05104, over 970930.06 frames.], batch size: 21, lr: 6.84e-04 +2022-05-04 06:51:34,798 INFO [train.py:715] (7/8) Epoch 2, batch 16300, loss[loss=0.2036, simple_loss=0.2535, pruned_loss=0.07686, over 4880.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2357, pruned_loss=0.05147, over 970550.47 frames.], batch size: 16, lr: 6.84e-04 +2022-05-04 06:52:14,671 INFO [train.py:715] (7/8) Epoch 2, batch 16350, loss[loss=0.1982, simple_loss=0.2559, pruned_loss=0.07024, over 4942.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2369, pruned_loss=0.0524, over 971648.33 frames.], batch size: 21, lr: 6.84e-04 +2022-05-04 06:52:55,173 INFO [train.py:715] (7/8) Epoch 2, batch 16400, loss[loss=0.2164, simple_loss=0.2727, pruned_loss=0.08005, over 4884.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2368, pruned_loss=0.05213, over 972652.21 frames.], batch size: 39, lr: 6.83e-04 +2022-05-04 06:53:35,564 INFO [train.py:715] (7/8) Epoch 2, batch 16450, loss[loss=0.1499, simple_loss=0.235, pruned_loss=0.03241, over 4926.00 frames.], tot_loss[loss=0.171, simple_loss=0.2375, pruned_loss=0.05226, over 972715.61 frames.], batch size: 21, lr: 6.83e-04 +2022-05-04 06:54:15,149 INFO [train.py:715] (7/8) Epoch 2, batch 16500, loss[loss=0.1849, simple_loss=0.2604, pruned_loss=0.0547, over 4808.00 frames.], tot_loss[loss=0.1701, simple_loss=0.237, pruned_loss=0.05154, over 972593.96 frames.], batch size: 21, lr: 6.83e-04 +2022-05-04 06:54:56,136 INFO [train.py:715] (7/8) Epoch 2, batch 16550, loss[loss=0.1452, simple_loss=0.219, pruned_loss=0.03571, over 4977.00 frames.], tot_loss[loss=0.1722, simple_loss=0.239, pruned_loss=0.05269, over 972622.57 frames.], batch size: 15, lr: 6.83e-04 +2022-05-04 06:55:36,866 INFO [train.py:715] (7/8) Epoch 2, batch 16600, loss[loss=0.212, simple_loss=0.2731, pruned_loss=0.07548, over 4810.00 frames.], tot_loss[loss=0.171, simple_loss=0.2379, pruned_loss=0.05205, over 973186.25 frames.], batch size: 25, lr: 6.83e-04 +2022-05-04 06:56:16,725 INFO [train.py:715] (7/8) Epoch 2, batch 16650, loss[loss=0.1962, simple_loss=0.2443, pruned_loss=0.07409, over 4806.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2371, pruned_loss=0.05182, over 972778.22 frames.], batch size: 21, lr: 6.82e-04 +2022-05-04 06:56:57,165 INFO [train.py:715] (7/8) Epoch 2, batch 16700, loss[loss=0.1761, simple_loss=0.2464, pruned_loss=0.05288, over 4777.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2371, pruned_loss=0.05193, over 971365.08 frames.], batch size: 19, lr: 6.82e-04 +2022-05-04 06:57:37,921 INFO [train.py:715] (7/8) Epoch 2, batch 16750, loss[loss=0.2164, simple_loss=0.2622, pruned_loss=0.0853, over 4964.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2379, pruned_loss=0.05255, over 972578.88 frames.], batch size: 35, lr: 6.82e-04 +2022-05-04 06:58:18,623 INFO [train.py:715] (7/8) Epoch 2, batch 16800, loss[loss=0.1978, simple_loss=0.2628, pruned_loss=0.06645, over 4913.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2379, pruned_loss=0.05264, over 973513.32 frames.], batch size: 17, lr: 6.82e-04 +2022-05-04 06:58:58,048 INFO [train.py:715] (7/8) Epoch 2, batch 16850, loss[loss=0.1487, simple_loss=0.2027, pruned_loss=0.04732, over 4787.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2373, pruned_loss=0.0524, over 973833.78 frames.], batch size: 12, lr: 6.82e-04 +2022-05-04 06:59:39,314 INFO [train.py:715] (7/8) Epoch 2, batch 16900, loss[loss=0.1575, simple_loss=0.2273, pruned_loss=0.04389, over 4978.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2381, pruned_loss=0.05308, over 973450.94 frames.], batch size: 24, lr: 6.81e-04 +2022-05-04 07:00:20,141 INFO [train.py:715] (7/8) Epoch 2, batch 16950, loss[loss=0.1624, simple_loss=0.2383, pruned_loss=0.0433, over 4890.00 frames.], tot_loss[loss=0.173, simple_loss=0.239, pruned_loss=0.0535, over 973471.35 frames.], batch size: 22, lr: 6.81e-04 +2022-05-04 07:00:59,947 INFO [train.py:715] (7/8) Epoch 2, batch 17000, loss[loss=0.2044, simple_loss=0.2611, pruned_loss=0.07381, over 4930.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2389, pruned_loss=0.05347, over 973398.15 frames.], batch size: 18, lr: 6.81e-04 +2022-05-04 07:01:40,377 INFO [train.py:715] (7/8) Epoch 2, batch 17050, loss[loss=0.1717, simple_loss=0.2387, pruned_loss=0.05231, over 4749.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2385, pruned_loss=0.05325, over 973302.36 frames.], batch size: 16, lr: 6.81e-04 +2022-05-04 07:02:20,967 INFO [train.py:715] (7/8) Epoch 2, batch 17100, loss[loss=0.1537, simple_loss=0.2198, pruned_loss=0.04386, over 4984.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2387, pruned_loss=0.05319, over 973531.93 frames.], batch size: 28, lr: 6.81e-04 +2022-05-04 07:03:01,196 INFO [train.py:715] (7/8) Epoch 2, batch 17150, loss[loss=0.1485, simple_loss=0.2273, pruned_loss=0.03487, over 4827.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2381, pruned_loss=0.05273, over 973304.79 frames.], batch size: 15, lr: 6.81e-04 +2022-05-04 07:03:40,481 INFO [train.py:715] (7/8) Epoch 2, batch 17200, loss[loss=0.1772, simple_loss=0.2451, pruned_loss=0.05462, over 4990.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2375, pruned_loss=0.0517, over 973430.65 frames.], batch size: 28, lr: 6.80e-04 +2022-05-04 07:04:20,885 INFO [train.py:715] (7/8) Epoch 2, batch 17250, loss[loss=0.1522, simple_loss=0.2218, pruned_loss=0.04127, over 4763.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2386, pruned_loss=0.05245, over 972875.11 frames.], batch size: 12, lr: 6.80e-04 +2022-05-04 07:05:01,346 INFO [train.py:715] (7/8) Epoch 2, batch 17300, loss[loss=0.2017, simple_loss=0.2542, pruned_loss=0.07455, over 4880.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2382, pruned_loss=0.05258, over 972570.91 frames.], batch size: 16, lr: 6.80e-04 +2022-05-04 07:05:40,927 INFO [train.py:715] (7/8) Epoch 2, batch 17350, loss[loss=0.1882, simple_loss=0.2613, pruned_loss=0.05755, over 4897.00 frames.], tot_loss[loss=0.172, simple_loss=0.2381, pruned_loss=0.05292, over 972368.48 frames.], batch size: 19, lr: 6.80e-04 +2022-05-04 07:06:20,387 INFO [train.py:715] (7/8) Epoch 2, batch 17400, loss[loss=0.1649, simple_loss=0.2264, pruned_loss=0.05171, over 4754.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2384, pruned_loss=0.05291, over 972540.67 frames.], batch size: 18, lr: 6.80e-04 +2022-05-04 07:07:00,343 INFO [train.py:715] (7/8) Epoch 2, batch 17450, loss[loss=0.1706, simple_loss=0.2426, pruned_loss=0.04934, over 4812.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2381, pruned_loss=0.0529, over 972327.19 frames.], batch size: 25, lr: 6.79e-04 +2022-05-04 07:07:40,092 INFO [train.py:715] (7/8) Epoch 2, batch 17500, loss[loss=0.1666, simple_loss=0.2281, pruned_loss=0.0526, over 4864.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2372, pruned_loss=0.05251, over 972851.27 frames.], batch size: 30, lr: 6.79e-04 +2022-05-04 07:08:18,854 INFO [train.py:715] (7/8) Epoch 2, batch 17550, loss[loss=0.1424, simple_loss=0.2103, pruned_loss=0.03721, over 4803.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2381, pruned_loss=0.05224, over 972888.37 frames.], batch size: 13, lr: 6.79e-04 +2022-05-04 07:08:58,976 INFO [train.py:715] (7/8) Epoch 2, batch 17600, loss[loss=0.1807, simple_loss=0.2465, pruned_loss=0.05744, over 4893.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2371, pruned_loss=0.05177, over 972327.75 frames.], batch size: 19, lr: 6.79e-04 +2022-05-04 07:09:38,391 INFO [train.py:715] (7/8) Epoch 2, batch 17650, loss[loss=0.149, simple_loss=0.2154, pruned_loss=0.04128, over 4794.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2383, pruned_loss=0.05267, over 972070.91 frames.], batch size: 12, lr: 6.79e-04 +2022-05-04 07:10:17,893 INFO [train.py:715] (7/8) Epoch 2, batch 17700, loss[loss=0.1609, simple_loss=0.2313, pruned_loss=0.04528, over 4894.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2371, pruned_loss=0.05187, over 973198.15 frames.], batch size: 22, lr: 6.78e-04 +2022-05-04 07:10:57,826 INFO [train.py:715] (7/8) Epoch 2, batch 17750, loss[loss=0.184, simple_loss=0.2518, pruned_loss=0.05809, over 4802.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2372, pruned_loss=0.05228, over 972758.76 frames.], batch size: 18, lr: 6.78e-04 +2022-05-04 07:11:37,711 INFO [train.py:715] (7/8) Epoch 2, batch 17800, loss[loss=0.2163, simple_loss=0.2548, pruned_loss=0.0889, over 4920.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2374, pruned_loss=0.05284, over 972724.11 frames.], batch size: 18, lr: 6.78e-04 +2022-05-04 07:12:17,985 INFO [train.py:715] (7/8) Epoch 2, batch 17850, loss[loss=0.1532, simple_loss=0.2241, pruned_loss=0.04116, over 4884.00 frames.], tot_loss[loss=0.1708, simple_loss=0.237, pruned_loss=0.05225, over 972344.38 frames.], batch size: 22, lr: 6.78e-04 +2022-05-04 07:12:56,823 INFO [train.py:715] (7/8) Epoch 2, batch 17900, loss[loss=0.2367, simple_loss=0.3155, pruned_loss=0.07899, over 4906.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2369, pruned_loss=0.05194, over 972674.44 frames.], batch size: 19, lr: 6.78e-04 +2022-05-04 07:13:36,738 INFO [train.py:715] (7/8) Epoch 2, batch 17950, loss[loss=0.1543, simple_loss=0.2249, pruned_loss=0.04187, over 4938.00 frames.], tot_loss[loss=0.1715, simple_loss=0.238, pruned_loss=0.05253, over 972457.11 frames.], batch size: 29, lr: 6.77e-04 +2022-05-04 07:14:16,911 INFO [train.py:715] (7/8) Epoch 2, batch 18000, loss[loss=0.2001, simple_loss=0.2455, pruned_loss=0.0774, over 4982.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2382, pruned_loss=0.05344, over 972039.16 frames.], batch size: 14, lr: 6.77e-04 +2022-05-04 07:14:16,912 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 07:14:26,628 INFO [train.py:742] (7/8) Epoch 2, validation: loss=0.1173, simple_loss=0.2039, pruned_loss=0.01538, over 914524.00 frames. +2022-05-04 07:15:07,362 INFO [train.py:715] (7/8) Epoch 2, batch 18050, loss[loss=0.1606, simple_loss=0.22, pruned_loss=0.05065, over 4857.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2384, pruned_loss=0.05308, over 972571.98 frames.], batch size: 32, lr: 6.77e-04 +2022-05-04 07:15:46,536 INFO [train.py:715] (7/8) Epoch 2, batch 18100, loss[loss=0.2076, simple_loss=0.267, pruned_loss=0.07404, over 4798.00 frames.], tot_loss[loss=0.172, simple_loss=0.2381, pruned_loss=0.05292, over 972448.51 frames.], batch size: 14, lr: 6.77e-04 +2022-05-04 07:16:27,421 INFO [train.py:715] (7/8) Epoch 2, batch 18150, loss[loss=0.1811, simple_loss=0.2533, pruned_loss=0.05443, over 4852.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2384, pruned_loss=0.0529, over 973038.73 frames.], batch size: 20, lr: 6.77e-04 +2022-05-04 07:17:08,372 INFO [train.py:715] (7/8) Epoch 2, batch 18200, loss[loss=0.179, simple_loss=0.2509, pruned_loss=0.05349, over 4896.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2376, pruned_loss=0.05209, over 972618.58 frames.], batch size: 39, lr: 6.76e-04 +2022-05-04 07:17:49,831 INFO [train.py:715] (7/8) Epoch 2, batch 18250, loss[loss=0.1801, simple_loss=0.2349, pruned_loss=0.06268, over 4984.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2379, pruned_loss=0.0524, over 972375.68 frames.], batch size: 14, lr: 6.76e-04 +2022-05-04 07:18:30,275 INFO [train.py:715] (7/8) Epoch 2, batch 18300, loss[loss=0.1433, simple_loss=0.212, pruned_loss=0.03736, over 4909.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2383, pruned_loss=0.05272, over 972791.50 frames.], batch size: 17, lr: 6.76e-04 +2022-05-04 07:19:12,139 INFO [train.py:715] (7/8) Epoch 2, batch 18350, loss[loss=0.1663, simple_loss=0.2352, pruned_loss=0.04877, over 4744.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2388, pruned_loss=0.05276, over 972380.35 frames.], batch size: 16, lr: 6.76e-04 +2022-05-04 07:19:56,504 INFO [train.py:715] (7/8) Epoch 2, batch 18400, loss[loss=0.1788, simple_loss=0.2423, pruned_loss=0.05765, over 4935.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2397, pruned_loss=0.05279, over 972582.36 frames.], batch size: 35, lr: 6.76e-04 +2022-05-04 07:20:36,598 INFO [train.py:715] (7/8) Epoch 2, batch 18450, loss[loss=0.1635, simple_loss=0.2269, pruned_loss=0.05008, over 4821.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2386, pruned_loss=0.0522, over 972427.09 frames.], batch size: 25, lr: 6.75e-04 +2022-05-04 07:21:18,113 INFO [train.py:715] (7/8) Epoch 2, batch 18500, loss[loss=0.1416, simple_loss=0.2064, pruned_loss=0.03836, over 4978.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2395, pruned_loss=0.05255, over 971712.06 frames.], batch size: 15, lr: 6.75e-04 +2022-05-04 07:21:59,814 INFO [train.py:715] (7/8) Epoch 2, batch 18550, loss[loss=0.1522, simple_loss=0.227, pruned_loss=0.03869, over 4870.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2392, pruned_loss=0.05234, over 971077.95 frames.], batch size: 39, lr: 6.75e-04 +2022-05-04 07:22:41,514 INFO [train.py:715] (7/8) Epoch 2, batch 18600, loss[loss=0.1577, simple_loss=0.2377, pruned_loss=0.03887, over 4805.00 frames.], tot_loss[loss=0.1707, simple_loss=0.238, pruned_loss=0.05168, over 971312.84 frames.], batch size: 25, lr: 6.75e-04 +2022-05-04 07:23:21,832 INFO [train.py:715] (7/8) Epoch 2, batch 18650, loss[loss=0.1617, simple_loss=0.2389, pruned_loss=0.04228, over 4890.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2388, pruned_loss=0.05232, over 972219.56 frames.], batch size: 39, lr: 6.75e-04 +2022-05-04 07:24:03,485 INFO [train.py:715] (7/8) Epoch 2, batch 18700, loss[loss=0.1575, simple_loss=0.2289, pruned_loss=0.04311, over 4755.00 frames.], tot_loss[loss=0.1721, simple_loss=0.239, pruned_loss=0.05264, over 972323.05 frames.], batch size: 19, lr: 6.75e-04 +2022-05-04 07:24:45,167 INFO [train.py:715] (7/8) Epoch 2, batch 18750, loss[loss=0.1867, simple_loss=0.2503, pruned_loss=0.06152, over 4743.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2389, pruned_loss=0.05278, over 970963.20 frames.], batch size: 16, lr: 6.74e-04 +2022-05-04 07:25:25,726 INFO [train.py:715] (7/8) Epoch 2, batch 18800, loss[loss=0.1386, simple_loss=0.2068, pruned_loss=0.03517, over 4913.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2388, pruned_loss=0.05266, over 972156.71 frames.], batch size: 23, lr: 6.74e-04 +2022-05-04 07:26:06,672 INFO [train.py:715] (7/8) Epoch 2, batch 18850, loss[loss=0.2167, simple_loss=0.281, pruned_loss=0.07623, over 4892.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2393, pruned_loss=0.05264, over 972434.47 frames.], batch size: 16, lr: 6.74e-04 +2022-05-04 07:26:48,083 INFO [train.py:715] (7/8) Epoch 2, batch 18900, loss[loss=0.1497, simple_loss=0.2213, pruned_loss=0.03901, over 4915.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2381, pruned_loss=0.05243, over 973593.71 frames.], batch size: 19, lr: 6.74e-04 +2022-05-04 07:27:29,075 INFO [train.py:715] (7/8) Epoch 2, batch 18950, loss[loss=0.1475, simple_loss=0.2245, pruned_loss=0.03526, over 4973.00 frames.], tot_loss[loss=0.171, simple_loss=0.2379, pruned_loss=0.05199, over 973487.09 frames.], batch size: 28, lr: 6.74e-04 +2022-05-04 07:28:09,470 INFO [train.py:715] (7/8) Epoch 2, batch 19000, loss[loss=0.1955, simple_loss=0.2595, pruned_loss=0.06571, over 4817.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2387, pruned_loss=0.05254, over 972619.56 frames.], batch size: 25, lr: 6.73e-04 +2022-05-04 07:28:51,002 INFO [train.py:715] (7/8) Epoch 2, batch 19050, loss[loss=0.1554, simple_loss=0.2327, pruned_loss=0.03909, over 4831.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2381, pruned_loss=0.05224, over 972384.68 frames.], batch size: 26, lr: 6.73e-04 +2022-05-04 07:29:32,580 INFO [train.py:715] (7/8) Epoch 2, batch 19100, loss[loss=0.1339, simple_loss=0.2069, pruned_loss=0.03047, over 4947.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2379, pruned_loss=0.05253, over 972381.33 frames.], batch size: 23, lr: 6.73e-04 +2022-05-04 07:30:13,194 INFO [train.py:715] (7/8) Epoch 2, batch 19150, loss[loss=0.1782, simple_loss=0.2347, pruned_loss=0.06081, over 4864.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2376, pruned_loss=0.05238, over 972656.57 frames.], batch size: 32, lr: 6.73e-04 +2022-05-04 07:30:53,904 INFO [train.py:715] (7/8) Epoch 2, batch 19200, loss[loss=0.1488, simple_loss=0.2231, pruned_loss=0.03728, over 4793.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2377, pruned_loss=0.05206, over 973110.88 frames.], batch size: 24, lr: 6.73e-04 +2022-05-04 07:31:35,009 INFO [train.py:715] (7/8) Epoch 2, batch 19250, loss[loss=0.184, simple_loss=0.2529, pruned_loss=0.05755, over 4780.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2372, pruned_loss=0.05181, over 973042.24 frames.], batch size: 17, lr: 6.72e-04 +2022-05-04 07:32:15,458 INFO [train.py:715] (7/8) Epoch 2, batch 19300, loss[loss=0.1838, simple_loss=0.246, pruned_loss=0.06084, over 4922.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2373, pruned_loss=0.0517, over 972445.75 frames.], batch size: 18, lr: 6.72e-04 +2022-05-04 07:32:55,612 INFO [train.py:715] (7/8) Epoch 2, batch 19350, loss[loss=0.1181, simple_loss=0.1947, pruned_loss=0.02078, over 4969.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2365, pruned_loss=0.05131, over 972000.52 frames.], batch size: 21, lr: 6.72e-04 +2022-05-04 07:33:36,562 INFO [train.py:715] (7/8) Epoch 2, batch 19400, loss[loss=0.2254, simple_loss=0.2832, pruned_loss=0.0838, over 4808.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2365, pruned_loss=0.05163, over 972583.78 frames.], batch size: 25, lr: 6.72e-04 +2022-05-04 07:34:18,482 INFO [train.py:715] (7/8) Epoch 2, batch 19450, loss[loss=0.2065, simple_loss=0.2727, pruned_loss=0.07014, over 4938.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2376, pruned_loss=0.0518, over 972589.46 frames.], batch size: 23, lr: 6.72e-04 +2022-05-04 07:34:58,699 INFO [train.py:715] (7/8) Epoch 2, batch 19500, loss[loss=0.1873, simple_loss=0.2469, pruned_loss=0.06391, over 4881.00 frames.], tot_loss[loss=0.1716, simple_loss=0.238, pruned_loss=0.05261, over 971379.80 frames.], batch size: 39, lr: 6.72e-04 +2022-05-04 07:35:38,984 INFO [train.py:715] (7/8) Epoch 2, batch 19550, loss[loss=0.151, simple_loss=0.2218, pruned_loss=0.04009, over 4987.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2374, pruned_loss=0.0522, over 971671.21 frames.], batch size: 28, lr: 6.71e-04 +2022-05-04 07:36:20,455 INFO [train.py:715] (7/8) Epoch 2, batch 19600, loss[loss=0.1347, simple_loss=0.2092, pruned_loss=0.03014, over 4889.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2383, pruned_loss=0.05275, over 971342.98 frames.], batch size: 22, lr: 6.71e-04 +2022-05-04 07:37:01,113 INFO [train.py:715] (7/8) Epoch 2, batch 19650, loss[loss=0.1681, simple_loss=0.2253, pruned_loss=0.05549, over 4948.00 frames.], tot_loss[loss=0.172, simple_loss=0.2382, pruned_loss=0.05291, over 971180.28 frames.], batch size: 24, lr: 6.71e-04 +2022-05-04 07:37:40,952 INFO [train.py:715] (7/8) Epoch 2, batch 19700, loss[loss=0.1648, simple_loss=0.2285, pruned_loss=0.05059, over 4904.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2381, pruned_loss=0.05258, over 971631.67 frames.], batch size: 17, lr: 6.71e-04 +2022-05-04 07:38:21,838 INFO [train.py:715] (7/8) Epoch 2, batch 19750, loss[loss=0.2168, simple_loss=0.2719, pruned_loss=0.08087, over 4856.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2388, pruned_loss=0.05251, over 972169.94 frames.], batch size: 32, lr: 6.71e-04 +2022-05-04 07:39:02,976 INFO [train.py:715] (7/8) Epoch 2, batch 19800, loss[loss=0.1655, simple_loss=0.2275, pruned_loss=0.05175, over 4867.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2382, pruned_loss=0.05182, over 973009.64 frames.], batch size: 16, lr: 6.70e-04 +2022-05-04 07:39:42,770 INFO [train.py:715] (7/8) Epoch 2, batch 19850, loss[loss=0.1678, simple_loss=0.2317, pruned_loss=0.05194, over 4789.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2369, pruned_loss=0.05118, over 972943.43 frames.], batch size: 21, lr: 6.70e-04 +2022-05-04 07:40:23,485 INFO [train.py:715] (7/8) Epoch 2, batch 19900, loss[loss=0.1621, simple_loss=0.2301, pruned_loss=0.04711, over 4789.00 frames.], tot_loss[loss=0.17, simple_loss=0.2371, pruned_loss=0.05145, over 972763.60 frames.], batch size: 14, lr: 6.70e-04 +2022-05-04 07:41:04,463 INFO [train.py:715] (7/8) Epoch 2, batch 19950, loss[loss=0.1595, simple_loss=0.2127, pruned_loss=0.05313, over 4892.00 frames.], tot_loss[loss=0.169, simple_loss=0.236, pruned_loss=0.051, over 973426.82 frames.], batch size: 32, lr: 6.70e-04 +2022-05-04 07:41:44,806 INFO [train.py:715] (7/8) Epoch 2, batch 20000, loss[loss=0.1817, simple_loss=0.2404, pruned_loss=0.06148, over 4870.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2368, pruned_loss=0.05154, over 973168.55 frames.], batch size: 16, lr: 6.70e-04 +2022-05-04 07:42:25,545 INFO [train.py:715] (7/8) Epoch 2, batch 20050, loss[loss=0.1702, simple_loss=0.2402, pruned_loss=0.05007, over 4853.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2365, pruned_loss=0.05158, over 973961.77 frames.], batch size: 20, lr: 6.69e-04 +2022-05-04 07:43:06,882 INFO [train.py:715] (7/8) Epoch 2, batch 20100, loss[loss=0.1923, simple_loss=0.2591, pruned_loss=0.06275, over 4879.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2373, pruned_loss=0.05188, over 974221.13 frames.], batch size: 38, lr: 6.69e-04 +2022-05-04 07:43:48,583 INFO [train.py:715] (7/8) Epoch 2, batch 20150, loss[loss=0.1675, simple_loss=0.2279, pruned_loss=0.05355, over 4778.00 frames.], tot_loss[loss=0.1702, simple_loss=0.237, pruned_loss=0.05168, over 972633.82 frames.], batch size: 14, lr: 6.69e-04 +2022-05-04 07:44:28,876 INFO [train.py:715] (7/8) Epoch 2, batch 20200, loss[loss=0.1539, simple_loss=0.2206, pruned_loss=0.04366, over 4930.00 frames.], tot_loss[loss=0.1701, simple_loss=0.2369, pruned_loss=0.05166, over 972534.32 frames.], batch size: 21, lr: 6.69e-04 +2022-05-04 07:45:10,319 INFO [train.py:715] (7/8) Epoch 2, batch 20250, loss[loss=0.1697, simple_loss=0.241, pruned_loss=0.04919, over 4951.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2362, pruned_loss=0.05131, over 972284.52 frames.], batch size: 21, lr: 6.69e-04 +2022-05-04 07:45:52,276 INFO [train.py:715] (7/8) Epoch 2, batch 20300, loss[loss=0.1911, simple_loss=0.2462, pruned_loss=0.06797, over 4976.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2358, pruned_loss=0.05062, over 972013.09 frames.], batch size: 24, lr: 6.69e-04 +2022-05-04 07:46:33,092 INFO [train.py:715] (7/8) Epoch 2, batch 20350, loss[loss=0.1634, simple_loss=0.2372, pruned_loss=0.04478, over 4857.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2358, pruned_loss=0.05067, over 972443.24 frames.], batch size: 20, lr: 6.68e-04 +2022-05-04 07:47:14,065 INFO [train.py:715] (7/8) Epoch 2, batch 20400, loss[loss=0.1934, simple_loss=0.259, pruned_loss=0.06392, over 4865.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2364, pruned_loss=0.05095, over 971769.61 frames.], batch size: 20, lr: 6.68e-04 +2022-05-04 07:47:56,162 INFO [train.py:715] (7/8) Epoch 2, batch 20450, loss[loss=0.1604, simple_loss=0.2333, pruned_loss=0.04376, over 4744.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2367, pruned_loss=0.05098, over 972084.52 frames.], batch size: 16, lr: 6.68e-04 +2022-05-04 07:48:37,712 INFO [train.py:715] (7/8) Epoch 2, batch 20500, loss[loss=0.1656, simple_loss=0.2317, pruned_loss=0.04973, over 4751.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2358, pruned_loss=0.05056, over 972190.01 frames.], batch size: 16, lr: 6.68e-04 +2022-05-04 07:49:18,505 INFO [train.py:715] (7/8) Epoch 2, batch 20550, loss[loss=0.1515, simple_loss=0.2168, pruned_loss=0.0431, over 4902.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2361, pruned_loss=0.05056, over 972322.09 frames.], batch size: 17, lr: 6.68e-04 +2022-05-04 07:49:59,709 INFO [train.py:715] (7/8) Epoch 2, batch 20600, loss[loss=0.1572, simple_loss=0.2224, pruned_loss=0.04599, over 4865.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2362, pruned_loss=0.0511, over 972325.89 frames.], batch size: 20, lr: 6.67e-04 +2022-05-04 07:50:41,272 INFO [train.py:715] (7/8) Epoch 2, batch 20650, loss[loss=0.1549, simple_loss=0.2279, pruned_loss=0.04092, over 4777.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2365, pruned_loss=0.0509, over 972931.21 frames.], batch size: 18, lr: 6.67e-04 +2022-05-04 07:51:22,509 INFO [train.py:715] (7/8) Epoch 2, batch 20700, loss[loss=0.181, simple_loss=0.2388, pruned_loss=0.06155, over 4853.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2366, pruned_loss=0.0509, over 972376.15 frames.], batch size: 20, lr: 6.67e-04 +2022-05-04 07:52:03,039 INFO [train.py:715] (7/8) Epoch 2, batch 20750, loss[loss=0.1628, simple_loss=0.2377, pruned_loss=0.04397, over 4690.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2366, pruned_loss=0.05144, over 971525.94 frames.], batch size: 15, lr: 6.67e-04 +2022-05-04 07:52:44,284 INFO [train.py:715] (7/8) Epoch 2, batch 20800, loss[loss=0.1627, simple_loss=0.236, pruned_loss=0.04467, over 4898.00 frames.], tot_loss[loss=0.1703, simple_loss=0.237, pruned_loss=0.05182, over 971229.67 frames.], batch size: 17, lr: 6.67e-04 +2022-05-04 07:53:25,484 INFO [train.py:715] (7/8) Epoch 2, batch 20850, loss[loss=0.1551, simple_loss=0.2335, pruned_loss=0.03834, over 4859.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2363, pruned_loss=0.05143, over 970652.74 frames.], batch size: 20, lr: 6.66e-04 +2022-05-04 07:54:06,141 INFO [train.py:715] (7/8) Epoch 2, batch 20900, loss[loss=0.1932, simple_loss=0.2605, pruned_loss=0.06296, over 4850.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2366, pruned_loss=0.0513, over 971081.42 frames.], batch size: 16, lr: 6.66e-04 +2022-05-04 07:54:47,194 INFO [train.py:715] (7/8) Epoch 2, batch 20950, loss[loss=0.1367, simple_loss=0.2099, pruned_loss=0.03175, over 4839.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2368, pruned_loss=0.05199, over 971436.90 frames.], batch size: 26, lr: 6.66e-04 +2022-05-04 07:55:28,388 INFO [train.py:715] (7/8) Epoch 2, batch 21000, loss[loss=0.1677, simple_loss=0.2363, pruned_loss=0.04955, over 4976.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2366, pruned_loss=0.05149, over 971910.86 frames.], batch size: 28, lr: 6.66e-04 +2022-05-04 07:55:28,389 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 07:55:39,045 INFO [train.py:742] (7/8) Epoch 2, validation: loss=0.1174, simple_loss=0.2036, pruned_loss=0.01562, over 914524.00 frames. +2022-05-04 07:56:20,522 INFO [train.py:715] (7/8) Epoch 2, batch 21050, loss[loss=0.1794, simple_loss=0.2467, pruned_loss=0.05602, over 4920.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2367, pruned_loss=0.05123, over 972529.46 frames.], batch size: 18, lr: 6.66e-04 +2022-05-04 07:57:00,995 INFO [train.py:715] (7/8) Epoch 2, batch 21100, loss[loss=0.1241, simple_loss=0.1839, pruned_loss=0.03215, over 4837.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2363, pruned_loss=0.05113, over 972569.38 frames.], batch size: 12, lr: 6.66e-04 +2022-05-04 07:57:41,497 INFO [train.py:715] (7/8) Epoch 2, batch 21150, loss[loss=0.1883, simple_loss=0.2508, pruned_loss=0.06287, over 4988.00 frames.], tot_loss[loss=0.1694, simple_loss=0.236, pruned_loss=0.05139, over 971466.22 frames.], batch size: 16, lr: 6.65e-04 +2022-05-04 07:58:22,038 INFO [train.py:715] (7/8) Epoch 2, batch 21200, loss[loss=0.1841, simple_loss=0.2464, pruned_loss=0.06092, over 4899.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2369, pruned_loss=0.05124, over 971613.70 frames.], batch size: 17, lr: 6.65e-04 +2022-05-04 07:59:02,139 INFO [train.py:715] (7/8) Epoch 2, batch 21250, loss[loss=0.1283, simple_loss=0.2059, pruned_loss=0.02538, over 4981.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2361, pruned_loss=0.05083, over 971908.27 frames.], batch size: 28, lr: 6.65e-04 +2022-05-04 07:59:42,850 INFO [train.py:715] (7/8) Epoch 2, batch 21300, loss[loss=0.1534, simple_loss=0.2254, pruned_loss=0.04067, over 4981.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2362, pruned_loss=0.05144, over 972293.02 frames.], batch size: 14, lr: 6.65e-04 +2022-05-04 08:00:23,565 INFO [train.py:715] (7/8) Epoch 2, batch 21350, loss[loss=0.1715, simple_loss=0.2416, pruned_loss=0.05075, over 4802.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2372, pruned_loss=0.05174, over 973078.16 frames.], batch size: 21, lr: 6.65e-04 +2022-05-04 08:01:04,867 INFO [train.py:715] (7/8) Epoch 2, batch 21400, loss[loss=0.1477, simple_loss=0.2236, pruned_loss=0.03594, over 4978.00 frames.], tot_loss[loss=0.1701, simple_loss=0.2369, pruned_loss=0.05164, over 972949.72 frames.], batch size: 24, lr: 6.64e-04 +2022-05-04 08:01:45,138 INFO [train.py:715] (7/8) Epoch 2, batch 21450, loss[loss=0.1564, simple_loss=0.2276, pruned_loss=0.04254, over 4775.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2375, pruned_loss=0.05205, over 971486.33 frames.], batch size: 18, lr: 6.64e-04 +2022-05-04 08:02:26,069 INFO [train.py:715] (7/8) Epoch 2, batch 21500, loss[loss=0.1795, simple_loss=0.2571, pruned_loss=0.05096, over 4929.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2372, pruned_loss=0.05169, over 972223.71 frames.], batch size: 17, lr: 6.64e-04 +2022-05-04 08:03:07,359 INFO [train.py:715] (7/8) Epoch 2, batch 21550, loss[loss=0.1927, simple_loss=0.2532, pruned_loss=0.06608, over 4935.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2379, pruned_loss=0.05267, over 972629.18 frames.], batch size: 23, lr: 6.64e-04 +2022-05-04 08:03:47,340 INFO [train.py:715] (7/8) Epoch 2, batch 21600, loss[loss=0.1549, simple_loss=0.2288, pruned_loss=0.04051, over 4864.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2377, pruned_loss=0.05235, over 973052.90 frames.], batch size: 20, lr: 6.64e-04 +2022-05-04 08:04:28,568 INFO [train.py:715] (7/8) Epoch 2, batch 21650, loss[loss=0.1986, simple_loss=0.2618, pruned_loss=0.06764, over 4887.00 frames.], tot_loss[loss=0.1715, simple_loss=0.238, pruned_loss=0.05257, over 973420.26 frames.], batch size: 22, lr: 6.64e-04 +2022-05-04 08:05:10,116 INFO [train.py:715] (7/8) Epoch 2, batch 21700, loss[loss=0.1358, simple_loss=0.1999, pruned_loss=0.03589, over 4915.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2382, pruned_loss=0.05261, over 972818.99 frames.], batch size: 23, lr: 6.63e-04 +2022-05-04 08:05:50,686 INFO [train.py:715] (7/8) Epoch 2, batch 21750, loss[loss=0.1755, simple_loss=0.2306, pruned_loss=0.06025, over 4787.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2383, pruned_loss=0.05291, over 972748.96 frames.], batch size: 17, lr: 6.63e-04 +2022-05-04 08:06:31,758 INFO [train.py:715] (7/8) Epoch 2, batch 21800, loss[loss=0.1484, simple_loss=0.2186, pruned_loss=0.03915, over 4797.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2389, pruned_loss=0.05333, over 972486.93 frames.], batch size: 21, lr: 6.63e-04 +2022-05-04 08:07:12,180 INFO [train.py:715] (7/8) Epoch 2, batch 21850, loss[loss=0.1467, simple_loss=0.221, pruned_loss=0.03614, over 4918.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2385, pruned_loss=0.05307, over 972150.66 frames.], batch size: 23, lr: 6.63e-04 +2022-05-04 08:07:53,267 INFO [train.py:715] (7/8) Epoch 2, batch 21900, loss[loss=0.1703, simple_loss=0.2347, pruned_loss=0.05293, over 4708.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2378, pruned_loss=0.05244, over 972919.21 frames.], batch size: 15, lr: 6.63e-04 +2022-05-04 08:08:33,961 INFO [train.py:715] (7/8) Epoch 2, batch 21950, loss[loss=0.2041, simple_loss=0.2714, pruned_loss=0.06842, over 4890.00 frames.], tot_loss[loss=0.171, simple_loss=0.2376, pruned_loss=0.05217, over 973449.44 frames.], batch size: 22, lr: 6.62e-04 +2022-05-04 08:09:15,717 INFO [train.py:715] (7/8) Epoch 2, batch 22000, loss[loss=0.1848, simple_loss=0.2547, pruned_loss=0.05744, over 4950.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2366, pruned_loss=0.05159, over 972867.45 frames.], batch size: 21, lr: 6.62e-04 +2022-05-04 08:09:57,816 INFO [train.py:715] (7/8) Epoch 2, batch 22050, loss[loss=0.1868, simple_loss=0.2487, pruned_loss=0.06242, over 4810.00 frames.], tot_loss[loss=0.171, simple_loss=0.2377, pruned_loss=0.05213, over 972938.07 frames.], batch size: 25, lr: 6.62e-04 +2022-05-04 08:10:38,625 INFO [train.py:715] (7/8) Epoch 2, batch 22100, loss[loss=0.1762, simple_loss=0.2427, pruned_loss=0.05487, over 4867.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2374, pruned_loss=0.05218, over 972492.54 frames.], batch size: 20, lr: 6.62e-04 +2022-05-04 08:11:20,107 INFO [train.py:715] (7/8) Epoch 2, batch 22150, loss[loss=0.1955, simple_loss=0.2532, pruned_loss=0.06886, over 4883.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2368, pruned_loss=0.05214, over 972162.81 frames.], batch size: 32, lr: 6.62e-04 +2022-05-04 08:12:01,862 INFO [train.py:715] (7/8) Epoch 2, batch 22200, loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03176, over 4786.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2372, pruned_loss=0.05208, over 972113.11 frames.], batch size: 18, lr: 6.62e-04 +2022-05-04 08:12:43,328 INFO [train.py:715] (7/8) Epoch 2, batch 22250, loss[loss=0.1406, simple_loss=0.2098, pruned_loss=0.03569, over 4772.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2364, pruned_loss=0.05171, over 971736.61 frames.], batch size: 17, lr: 6.61e-04 +2022-05-04 08:13:24,131 INFO [train.py:715] (7/8) Epoch 2, batch 22300, loss[loss=0.2328, simple_loss=0.2943, pruned_loss=0.08565, over 4800.00 frames.], tot_loss[loss=0.1703, simple_loss=0.237, pruned_loss=0.05177, over 972152.16 frames.], batch size: 24, lr: 6.61e-04 +2022-05-04 08:14:05,208 INFO [train.py:715] (7/8) Epoch 2, batch 22350, loss[loss=0.1549, simple_loss=0.2289, pruned_loss=0.04045, over 4887.00 frames.], tot_loss[loss=0.1714, simple_loss=0.2374, pruned_loss=0.05274, over 972249.65 frames.], batch size: 16, lr: 6.61e-04 +2022-05-04 08:14:46,096 INFO [train.py:715] (7/8) Epoch 2, batch 22400, loss[loss=0.1923, simple_loss=0.247, pruned_loss=0.06881, over 4882.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2379, pruned_loss=0.05288, over 972239.34 frames.], batch size: 16, lr: 6.61e-04 +2022-05-04 08:15:26,447 INFO [train.py:715] (7/8) Epoch 2, batch 22450, loss[loss=0.1899, simple_loss=0.2636, pruned_loss=0.05814, over 4942.00 frames.], tot_loss[loss=0.1714, simple_loss=0.2378, pruned_loss=0.05245, over 971654.18 frames.], batch size: 21, lr: 6.61e-04 +2022-05-04 08:16:07,661 INFO [train.py:715] (7/8) Epoch 2, batch 22500, loss[loss=0.154, simple_loss=0.2205, pruned_loss=0.04379, over 4947.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2377, pruned_loss=0.05178, over 971808.28 frames.], batch size: 21, lr: 6.61e-04 +2022-05-04 08:16:48,523 INFO [train.py:715] (7/8) Epoch 2, batch 22550, loss[loss=0.1769, simple_loss=0.2537, pruned_loss=0.05004, over 4803.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2378, pruned_loss=0.05163, over 971746.49 frames.], batch size: 21, lr: 6.60e-04 +2022-05-04 08:17:29,226 INFO [train.py:715] (7/8) Epoch 2, batch 22600, loss[loss=0.1597, simple_loss=0.2283, pruned_loss=0.0456, over 4858.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2375, pruned_loss=0.05162, over 972097.53 frames.], batch size: 20, lr: 6.60e-04 +2022-05-04 08:18:09,994 INFO [train.py:715] (7/8) Epoch 2, batch 22650, loss[loss=0.212, simple_loss=0.267, pruned_loss=0.07852, over 4911.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2368, pruned_loss=0.05109, over 971480.50 frames.], batch size: 39, lr: 6.60e-04 +2022-05-04 08:18:50,673 INFO [train.py:715] (7/8) Epoch 2, batch 22700, loss[loss=0.1928, simple_loss=0.2522, pruned_loss=0.06667, over 4941.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2364, pruned_loss=0.05097, over 972088.20 frames.], batch size: 21, lr: 6.60e-04 +2022-05-04 08:19:31,398 INFO [train.py:715] (7/8) Epoch 2, batch 22750, loss[loss=0.2322, simple_loss=0.3131, pruned_loss=0.07565, over 4686.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2374, pruned_loss=0.05155, over 971097.81 frames.], batch size: 15, lr: 6.60e-04 +2022-05-04 08:20:12,250 INFO [train.py:715] (7/8) Epoch 2, batch 22800, loss[loss=0.1947, simple_loss=0.2486, pruned_loss=0.07041, over 4854.00 frames.], tot_loss[loss=0.17, simple_loss=0.2369, pruned_loss=0.05156, over 971854.82 frames.], batch size: 32, lr: 6.59e-04 +2022-05-04 08:20:53,301 INFO [train.py:715] (7/8) Epoch 2, batch 22850, loss[loss=0.1316, simple_loss=0.2029, pruned_loss=0.03014, over 4853.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2379, pruned_loss=0.05211, over 971701.75 frames.], batch size: 13, lr: 6.59e-04 +2022-05-04 08:21:34,652 INFO [train.py:715] (7/8) Epoch 2, batch 22900, loss[loss=0.1622, simple_loss=0.2292, pruned_loss=0.04755, over 4792.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2374, pruned_loss=0.05211, over 971511.10 frames.], batch size: 24, lr: 6.59e-04 +2022-05-04 08:22:15,456 INFO [train.py:715] (7/8) Epoch 2, batch 22950, loss[loss=0.159, simple_loss=0.2344, pruned_loss=0.04185, over 4743.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2371, pruned_loss=0.05166, over 971685.46 frames.], batch size: 19, lr: 6.59e-04 +2022-05-04 08:22:56,053 INFO [train.py:715] (7/8) Epoch 2, batch 23000, loss[loss=0.202, simple_loss=0.274, pruned_loss=0.06505, over 4920.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2367, pruned_loss=0.05132, over 971524.98 frames.], batch size: 23, lr: 6.59e-04 +2022-05-04 08:23:37,064 INFO [train.py:715] (7/8) Epoch 2, batch 23050, loss[loss=0.1455, simple_loss=0.2155, pruned_loss=0.03773, over 4930.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2377, pruned_loss=0.05204, over 971643.25 frames.], batch size: 23, lr: 6.59e-04 +2022-05-04 08:24:17,902 INFO [train.py:715] (7/8) Epoch 2, batch 23100, loss[loss=0.1585, simple_loss=0.2351, pruned_loss=0.04099, over 4970.00 frames.], tot_loss[loss=0.1701, simple_loss=0.2369, pruned_loss=0.05163, over 971955.63 frames.], batch size: 28, lr: 6.58e-04 +2022-05-04 08:24:58,405 INFO [train.py:715] (7/8) Epoch 2, batch 23150, loss[loss=0.207, simple_loss=0.262, pruned_loss=0.07594, over 4811.00 frames.], tot_loss[loss=0.17, simple_loss=0.237, pruned_loss=0.05148, over 972495.20 frames.], batch size: 13, lr: 6.58e-04 +2022-05-04 08:25:39,718 INFO [train.py:715] (7/8) Epoch 2, batch 23200, loss[loss=0.1529, simple_loss=0.224, pruned_loss=0.0409, over 4810.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2364, pruned_loss=0.05086, over 972495.65 frames.], batch size: 21, lr: 6.58e-04 +2022-05-04 08:26:20,399 INFO [train.py:715] (7/8) Epoch 2, batch 23250, loss[loss=0.1663, simple_loss=0.2511, pruned_loss=0.04079, over 4918.00 frames.], tot_loss[loss=0.169, simple_loss=0.2366, pruned_loss=0.0507, over 972599.92 frames.], batch size: 18, lr: 6.58e-04 +2022-05-04 08:27:00,751 INFO [train.py:715] (7/8) Epoch 2, batch 23300, loss[loss=0.1834, simple_loss=0.2451, pruned_loss=0.06083, over 4917.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2362, pruned_loss=0.05058, over 972511.36 frames.], batch size: 17, lr: 6.58e-04 +2022-05-04 08:27:41,450 INFO [train.py:715] (7/8) Epoch 2, batch 23350, loss[loss=0.1519, simple_loss=0.2119, pruned_loss=0.04591, over 4653.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2365, pruned_loss=0.05117, over 973029.42 frames.], batch size: 13, lr: 6.57e-04 +2022-05-04 08:28:22,396 INFO [train.py:715] (7/8) Epoch 2, batch 23400, loss[loss=0.1823, simple_loss=0.2417, pruned_loss=0.06152, over 4749.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2364, pruned_loss=0.05133, over 972842.06 frames.], batch size: 19, lr: 6.57e-04 +2022-05-04 08:29:03,329 INFO [train.py:715] (7/8) Epoch 2, batch 23450, loss[loss=0.1468, simple_loss=0.2272, pruned_loss=0.0332, over 4986.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2359, pruned_loss=0.05121, over 972732.09 frames.], batch size: 25, lr: 6.57e-04 +2022-05-04 08:29:43,623 INFO [train.py:715] (7/8) Epoch 2, batch 23500, loss[loss=0.1532, simple_loss=0.2196, pruned_loss=0.04341, over 4784.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2344, pruned_loss=0.05059, over 972929.48 frames.], batch size: 18, lr: 6.57e-04 +2022-05-04 08:30:24,813 INFO [train.py:715] (7/8) Epoch 2, batch 23550, loss[loss=0.166, simple_loss=0.2354, pruned_loss=0.04833, over 4861.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2357, pruned_loss=0.05079, over 973023.35 frames.], batch size: 16, lr: 6.57e-04 +2022-05-04 08:31:05,706 INFO [train.py:715] (7/8) Epoch 2, batch 23600, loss[loss=0.1665, simple_loss=0.2322, pruned_loss=0.05042, over 4920.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2356, pruned_loss=0.05086, over 973066.04 frames.], batch size: 23, lr: 6.57e-04 +2022-05-04 08:31:45,440 INFO [train.py:715] (7/8) Epoch 2, batch 23650, loss[loss=0.1456, simple_loss=0.2166, pruned_loss=0.03728, over 4773.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2363, pruned_loss=0.05124, over 972233.02 frames.], batch size: 17, lr: 6.56e-04 +2022-05-04 08:32:27,513 INFO [train.py:715] (7/8) Epoch 2, batch 23700, loss[loss=0.1537, simple_loss=0.2231, pruned_loss=0.04212, over 4814.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2365, pruned_loss=0.05115, over 971966.26 frames.], batch size: 21, lr: 6.56e-04 +2022-05-04 08:33:07,933 INFO [train.py:715] (7/8) Epoch 2, batch 23750, loss[loss=0.165, simple_loss=0.2351, pruned_loss=0.04746, over 4969.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2362, pruned_loss=0.05131, over 972644.17 frames.], batch size: 28, lr: 6.56e-04 +2022-05-04 08:33:48,792 INFO [train.py:715] (7/8) Epoch 2, batch 23800, loss[loss=0.1702, simple_loss=0.2406, pruned_loss=0.04989, over 4955.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2359, pruned_loss=0.05068, over 972882.46 frames.], batch size: 35, lr: 6.56e-04 +2022-05-04 08:34:29,266 INFO [train.py:715] (7/8) Epoch 2, batch 23850, loss[loss=0.139, simple_loss=0.2074, pruned_loss=0.03529, over 4987.00 frames.], tot_loss[loss=0.17, simple_loss=0.2373, pruned_loss=0.0514, over 972550.72 frames.], batch size: 28, lr: 6.56e-04 +2022-05-04 08:35:10,700 INFO [train.py:715] (7/8) Epoch 2, batch 23900, loss[loss=0.1361, simple_loss=0.1988, pruned_loss=0.03666, over 4830.00 frames.], tot_loss[loss=0.169, simple_loss=0.2361, pruned_loss=0.05096, over 972818.70 frames.], batch size: 30, lr: 6.56e-04 +2022-05-04 08:35:51,720 INFO [train.py:715] (7/8) Epoch 2, batch 23950, loss[loss=0.1621, simple_loss=0.234, pruned_loss=0.04508, over 4749.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2364, pruned_loss=0.05106, over 972810.20 frames.], batch size: 19, lr: 6.55e-04 +2022-05-04 08:36:31,649 INFO [train.py:715] (7/8) Epoch 2, batch 24000, loss[loss=0.1799, simple_loss=0.2478, pruned_loss=0.05598, over 4990.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2357, pruned_loss=0.0504, over 972964.13 frames.], batch size: 26, lr: 6.55e-04 +2022-05-04 08:36:31,650 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 08:36:40,333 INFO [train.py:742] (7/8) Epoch 2, validation: loss=0.1168, simple_loss=0.2032, pruned_loss=0.01518, over 914524.00 frames. +2022-05-04 08:37:20,481 INFO [train.py:715] (7/8) Epoch 2, batch 24050, loss[loss=0.153, simple_loss=0.2232, pruned_loss=0.04139, over 4950.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2344, pruned_loss=0.04997, over 973013.25 frames.], batch size: 29, lr: 6.55e-04 +2022-05-04 08:38:01,992 INFO [train.py:715] (7/8) Epoch 2, batch 24100, loss[loss=0.1652, simple_loss=0.2228, pruned_loss=0.05385, over 4983.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2362, pruned_loss=0.05113, over 973384.60 frames.], batch size: 15, lr: 6.55e-04 +2022-05-04 08:38:42,997 INFO [train.py:715] (7/8) Epoch 2, batch 24150, loss[loss=0.2056, simple_loss=0.2791, pruned_loss=0.06603, over 4895.00 frames.], tot_loss[loss=0.169, simple_loss=0.2362, pruned_loss=0.05086, over 972747.77 frames.], batch size: 19, lr: 6.55e-04 +2022-05-04 08:39:24,315 INFO [train.py:715] (7/8) Epoch 2, batch 24200, loss[loss=0.168, simple_loss=0.2389, pruned_loss=0.04856, over 4923.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2351, pruned_loss=0.05027, over 972500.52 frames.], batch size: 23, lr: 6.55e-04 +2022-05-04 08:40:05,200 INFO [train.py:715] (7/8) Epoch 2, batch 24250, loss[loss=0.1891, simple_loss=0.2491, pruned_loss=0.06454, over 4947.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2354, pruned_loss=0.05053, over 972496.65 frames.], batch size: 35, lr: 6.54e-04 +2022-05-04 08:40:46,097 INFO [train.py:715] (7/8) Epoch 2, batch 24300, loss[loss=0.1767, simple_loss=0.2427, pruned_loss=0.05541, over 4731.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2349, pruned_loss=0.05018, over 973364.81 frames.], batch size: 16, lr: 6.54e-04 +2022-05-04 08:41:26,663 INFO [train.py:715] (7/8) Epoch 2, batch 24350, loss[loss=0.175, simple_loss=0.246, pruned_loss=0.05198, over 4838.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2352, pruned_loss=0.0501, over 973121.27 frames.], batch size: 26, lr: 6.54e-04 +2022-05-04 08:42:06,525 INFO [train.py:715] (7/8) Epoch 2, batch 24400, loss[loss=0.1529, simple_loss=0.2207, pruned_loss=0.04254, over 4871.00 frames.], tot_loss[loss=0.168, simple_loss=0.235, pruned_loss=0.05052, over 973090.37 frames.], batch size: 16, lr: 6.54e-04 +2022-05-04 08:42:47,543 INFO [train.py:715] (7/8) Epoch 2, batch 24450, loss[loss=0.1599, simple_loss=0.2308, pruned_loss=0.04447, over 4780.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2361, pruned_loss=0.05121, over 972319.68 frames.], batch size: 18, lr: 6.54e-04 +2022-05-04 08:43:27,489 INFO [train.py:715] (7/8) Epoch 2, batch 24500, loss[loss=0.135, simple_loss=0.2051, pruned_loss=0.0324, over 4767.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2353, pruned_loss=0.05073, over 972481.88 frames.], batch size: 12, lr: 6.53e-04 +2022-05-04 08:44:07,369 INFO [train.py:715] (7/8) Epoch 2, batch 24550, loss[loss=0.1602, simple_loss=0.2317, pruned_loss=0.04438, over 4690.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2359, pruned_loss=0.05112, over 972304.55 frames.], batch size: 15, lr: 6.53e-04 +2022-05-04 08:44:46,876 INFO [train.py:715] (7/8) Epoch 2, batch 24600, loss[loss=0.1529, simple_loss=0.2297, pruned_loss=0.03808, over 4845.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2365, pruned_loss=0.05155, over 973147.45 frames.], batch size: 13, lr: 6.53e-04 +2022-05-04 08:45:27,058 INFO [train.py:715] (7/8) Epoch 2, batch 24650, loss[loss=0.1671, simple_loss=0.245, pruned_loss=0.04459, over 4961.00 frames.], tot_loss[loss=0.1706, simple_loss=0.2371, pruned_loss=0.05205, over 971664.74 frames.], batch size: 24, lr: 6.53e-04 +2022-05-04 08:46:06,412 INFO [train.py:715] (7/8) Epoch 2, batch 24700, loss[loss=0.1795, simple_loss=0.2404, pruned_loss=0.05926, over 4836.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2374, pruned_loss=0.05249, over 971152.48 frames.], batch size: 30, lr: 6.53e-04 +2022-05-04 08:46:45,152 INFO [train.py:715] (7/8) Epoch 2, batch 24750, loss[loss=0.1637, simple_loss=0.2287, pruned_loss=0.04931, over 4687.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2378, pruned_loss=0.05232, over 971500.81 frames.], batch size: 15, lr: 6.53e-04 +2022-05-04 08:47:24,975 INFO [train.py:715] (7/8) Epoch 2, batch 24800, loss[loss=0.1613, simple_loss=0.2301, pruned_loss=0.04627, over 4976.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2386, pruned_loss=0.05282, over 971801.22 frames.], batch size: 28, lr: 6.52e-04 +2022-05-04 08:48:04,570 INFO [train.py:715] (7/8) Epoch 2, batch 24850, loss[loss=0.1856, simple_loss=0.2529, pruned_loss=0.05915, over 4782.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2374, pruned_loss=0.05204, over 971346.91 frames.], batch size: 18, lr: 6.52e-04 +2022-05-04 08:48:43,459 INFO [train.py:715] (7/8) Epoch 2, batch 24900, loss[loss=0.1501, simple_loss=0.2226, pruned_loss=0.03877, over 4796.00 frames.], tot_loss[loss=0.171, simple_loss=0.2377, pruned_loss=0.05216, over 971164.29 frames.], batch size: 18, lr: 6.52e-04 +2022-05-04 08:49:22,928 INFO [train.py:715] (7/8) Epoch 2, batch 24950, loss[loss=0.1789, simple_loss=0.2551, pruned_loss=0.05129, over 4952.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2371, pruned_loss=0.05217, over 971065.22 frames.], batch size: 24, lr: 6.52e-04 +2022-05-04 08:50:02,459 INFO [train.py:715] (7/8) Epoch 2, batch 25000, loss[loss=0.1495, simple_loss=0.2051, pruned_loss=0.04696, over 4799.00 frames.], tot_loss[loss=0.171, simple_loss=0.2372, pruned_loss=0.0524, over 971255.92 frames.], batch size: 12, lr: 6.52e-04 +2022-05-04 08:50:41,257 INFO [train.py:715] (7/8) Epoch 2, batch 25050, loss[loss=0.1915, simple_loss=0.2547, pruned_loss=0.06415, over 4843.00 frames.], tot_loss[loss=0.1715, simple_loss=0.238, pruned_loss=0.05247, over 971292.45 frames.], batch size: 30, lr: 6.52e-04 +2022-05-04 08:51:19,781 INFO [train.py:715] (7/8) Epoch 2, batch 25100, loss[loss=0.1801, simple_loss=0.2427, pruned_loss=0.05876, over 4806.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2389, pruned_loss=0.05321, over 970746.30 frames.], batch size: 25, lr: 6.51e-04 +2022-05-04 08:51:59,035 INFO [train.py:715] (7/8) Epoch 2, batch 25150, loss[loss=0.1532, simple_loss=0.2209, pruned_loss=0.04274, over 4841.00 frames.], tot_loss[loss=0.1718, simple_loss=0.238, pruned_loss=0.05278, over 971627.14 frames.], batch size: 15, lr: 6.51e-04 +2022-05-04 08:52:37,847 INFO [train.py:715] (7/8) Epoch 2, batch 25200, loss[loss=0.138, simple_loss=0.2216, pruned_loss=0.02723, over 4805.00 frames.], tot_loss[loss=0.1714, simple_loss=0.2378, pruned_loss=0.0525, over 971308.51 frames.], batch size: 21, lr: 6.51e-04 +2022-05-04 08:53:16,882 INFO [train.py:715] (7/8) Epoch 2, batch 25250, loss[loss=0.1928, simple_loss=0.273, pruned_loss=0.05628, over 4687.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2387, pruned_loss=0.05295, over 971547.03 frames.], batch size: 15, lr: 6.51e-04 +2022-05-04 08:53:55,854 INFO [train.py:715] (7/8) Epoch 2, batch 25300, loss[loss=0.2094, simple_loss=0.2676, pruned_loss=0.07564, over 4787.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2381, pruned_loss=0.05247, over 971357.12 frames.], batch size: 14, lr: 6.51e-04 +2022-05-04 08:54:35,069 INFO [train.py:715] (7/8) Epoch 2, batch 25350, loss[loss=0.1849, simple_loss=0.2441, pruned_loss=0.06282, over 4981.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2382, pruned_loss=0.05296, over 971424.81 frames.], batch size: 25, lr: 6.51e-04 +2022-05-04 08:55:14,143 INFO [train.py:715] (7/8) Epoch 2, batch 25400, loss[loss=0.177, simple_loss=0.2354, pruned_loss=0.05932, over 4835.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2363, pruned_loss=0.0516, over 971681.89 frames.], batch size: 15, lr: 6.50e-04 +2022-05-04 08:55:52,996 INFO [train.py:715] (7/8) Epoch 2, batch 25450, loss[loss=0.1579, simple_loss=0.2362, pruned_loss=0.03985, over 4752.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2371, pruned_loss=0.05188, over 971764.29 frames.], batch size: 16, lr: 6.50e-04 +2022-05-04 08:56:32,024 INFO [train.py:715] (7/8) Epoch 2, batch 25500, loss[loss=0.1726, simple_loss=0.241, pruned_loss=0.0521, over 4702.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2379, pruned_loss=0.0526, over 972356.71 frames.], batch size: 15, lr: 6.50e-04 +2022-05-04 08:57:11,301 INFO [train.py:715] (7/8) Epoch 2, batch 25550, loss[loss=0.1601, simple_loss=0.2236, pruned_loss=0.04827, over 4887.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2371, pruned_loss=0.05225, over 971757.41 frames.], batch size: 17, lr: 6.50e-04 +2022-05-04 08:57:50,306 INFO [train.py:715] (7/8) Epoch 2, batch 25600, loss[loss=0.1994, simple_loss=0.2715, pruned_loss=0.06362, over 4878.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2374, pruned_loss=0.05211, over 971552.89 frames.], batch size: 16, lr: 6.50e-04 +2022-05-04 08:58:29,648 INFO [train.py:715] (7/8) Epoch 2, batch 25650, loss[loss=0.1578, simple_loss=0.2237, pruned_loss=0.04597, over 4880.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2373, pruned_loss=0.05191, over 972763.29 frames.], batch size: 16, lr: 6.50e-04 +2022-05-04 08:59:09,558 INFO [train.py:715] (7/8) Epoch 2, batch 25700, loss[loss=0.1546, simple_loss=0.2077, pruned_loss=0.05074, over 4873.00 frames.], tot_loss[loss=0.171, simple_loss=0.2374, pruned_loss=0.05232, over 972946.84 frames.], batch size: 20, lr: 6.49e-04 +2022-05-04 08:59:48,690 INFO [train.py:715] (7/8) Epoch 2, batch 25750, loss[loss=0.1716, simple_loss=0.2404, pruned_loss=0.05135, over 4790.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2366, pruned_loss=0.05199, over 972979.19 frames.], batch size: 14, lr: 6.49e-04 +2022-05-04 09:00:27,442 INFO [train.py:715] (7/8) Epoch 2, batch 25800, loss[loss=0.1463, simple_loss=0.2259, pruned_loss=0.03342, over 4872.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2362, pruned_loss=0.05144, over 972953.37 frames.], batch size: 16, lr: 6.49e-04 +2022-05-04 09:01:06,416 INFO [train.py:715] (7/8) Epoch 2, batch 25850, loss[loss=0.1512, simple_loss=0.2134, pruned_loss=0.04445, over 4818.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2362, pruned_loss=0.05106, over 972698.62 frames.], batch size: 25, lr: 6.49e-04 +2022-05-04 09:01:46,178 INFO [train.py:715] (7/8) Epoch 2, batch 25900, loss[loss=0.1815, simple_loss=0.2359, pruned_loss=0.06353, over 4774.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2365, pruned_loss=0.05096, over 972610.71 frames.], batch size: 14, lr: 6.49e-04 +2022-05-04 09:02:25,989 INFO [train.py:715] (7/8) Epoch 2, batch 25950, loss[loss=0.1622, simple_loss=0.2291, pruned_loss=0.04766, over 4793.00 frames.], tot_loss[loss=0.17, simple_loss=0.2371, pruned_loss=0.05148, over 973125.07 frames.], batch size: 17, lr: 6.49e-04 +2022-05-04 09:03:05,063 INFO [train.py:715] (7/8) Epoch 2, batch 26000, loss[loss=0.1625, simple_loss=0.2269, pruned_loss=0.04908, over 4850.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2364, pruned_loss=0.0512, over 973765.11 frames.], batch size: 20, lr: 6.48e-04 +2022-05-04 09:03:44,736 INFO [train.py:715] (7/8) Epoch 2, batch 26050, loss[loss=0.1749, simple_loss=0.2431, pruned_loss=0.05337, over 4768.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2356, pruned_loss=0.05095, over 971786.82 frames.], batch size: 17, lr: 6.48e-04 +2022-05-04 09:04:24,307 INFO [train.py:715] (7/8) Epoch 2, batch 26100, loss[loss=0.2291, simple_loss=0.2953, pruned_loss=0.08145, over 4922.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2362, pruned_loss=0.05135, over 971812.33 frames.], batch size: 18, lr: 6.48e-04 +2022-05-04 09:05:03,481 INFO [train.py:715] (7/8) Epoch 2, batch 26150, loss[loss=0.1664, simple_loss=0.2306, pruned_loss=0.0511, over 4810.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2356, pruned_loss=0.05089, over 971799.53 frames.], batch size: 21, lr: 6.48e-04 +2022-05-04 09:05:42,982 INFO [train.py:715] (7/8) Epoch 2, batch 26200, loss[loss=0.1409, simple_loss=0.2078, pruned_loss=0.03704, over 4957.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2342, pruned_loss=0.05011, over 972316.09 frames.], batch size: 24, lr: 6.48e-04 +2022-05-04 09:06:22,735 INFO [train.py:715] (7/8) Epoch 2, batch 26250, loss[loss=0.1533, simple_loss=0.2183, pruned_loss=0.04414, over 4955.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2354, pruned_loss=0.05105, over 972642.43 frames.], batch size: 14, lr: 6.48e-04 +2022-05-04 09:07:02,322 INFO [train.py:715] (7/8) Epoch 2, batch 26300, loss[loss=0.1454, simple_loss=0.2134, pruned_loss=0.03874, over 4928.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2355, pruned_loss=0.0511, over 972606.92 frames.], batch size: 23, lr: 6.47e-04 +2022-05-04 09:07:40,827 INFO [train.py:715] (7/8) Epoch 2, batch 26350, loss[loss=0.1502, simple_loss=0.2101, pruned_loss=0.04509, over 4816.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2352, pruned_loss=0.05081, over 972236.42 frames.], batch size: 15, lr: 6.47e-04 +2022-05-04 09:08:23,931 INFO [train.py:715] (7/8) Epoch 2, batch 26400, loss[loss=0.2084, simple_loss=0.2811, pruned_loss=0.06789, over 4969.00 frames.], tot_loss[loss=0.169, simple_loss=0.2356, pruned_loss=0.0512, over 972130.68 frames.], batch size: 15, lr: 6.47e-04 +2022-05-04 09:09:03,687 INFO [train.py:715] (7/8) Epoch 2, batch 26450, loss[loss=0.179, simple_loss=0.2395, pruned_loss=0.05929, over 4741.00 frames.], tot_loss[loss=0.1685, simple_loss=0.235, pruned_loss=0.05095, over 972179.90 frames.], batch size: 16, lr: 6.47e-04 +2022-05-04 09:09:42,587 INFO [train.py:715] (7/8) Epoch 2, batch 26500, loss[loss=0.1357, simple_loss=0.2066, pruned_loss=0.03242, over 4749.00 frames.], tot_loss[loss=0.169, simple_loss=0.2358, pruned_loss=0.05115, over 972487.69 frames.], batch size: 16, lr: 6.47e-04 +2022-05-04 09:10:22,394 INFO [train.py:715] (7/8) Epoch 2, batch 26550, loss[loss=0.1449, simple_loss=0.2137, pruned_loss=0.038, over 4761.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2351, pruned_loss=0.05099, over 971926.26 frames.], batch size: 19, lr: 6.46e-04 +2022-05-04 09:11:02,374 INFO [train.py:715] (7/8) Epoch 2, batch 26600, loss[loss=0.2073, simple_loss=0.2725, pruned_loss=0.07104, over 4810.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2346, pruned_loss=0.05023, over 972772.86 frames.], batch size: 25, lr: 6.46e-04 +2022-05-04 09:11:41,996 INFO [train.py:715] (7/8) Epoch 2, batch 26650, loss[loss=0.19, simple_loss=0.2544, pruned_loss=0.06287, over 4775.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2348, pruned_loss=0.05049, over 972758.60 frames.], batch size: 18, lr: 6.46e-04 +2022-05-04 09:12:21,006 INFO [train.py:715] (7/8) Epoch 2, batch 26700, loss[loss=0.1511, simple_loss=0.23, pruned_loss=0.03608, over 4823.00 frames.], tot_loss[loss=0.169, simple_loss=0.2356, pruned_loss=0.05122, over 973472.04 frames.], batch size: 26, lr: 6.46e-04 +2022-05-04 09:13:00,969 INFO [train.py:715] (7/8) Epoch 2, batch 26750, loss[loss=0.1531, simple_loss=0.2236, pruned_loss=0.04127, over 4897.00 frames.], tot_loss[loss=0.17, simple_loss=0.2364, pruned_loss=0.05178, over 973214.53 frames.], batch size: 19, lr: 6.46e-04 +2022-05-04 09:13:40,197 INFO [train.py:715] (7/8) Epoch 2, batch 26800, loss[loss=0.1698, simple_loss=0.2371, pruned_loss=0.05128, over 4954.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2365, pruned_loss=0.05128, over 973263.94 frames.], batch size: 21, lr: 6.46e-04 +2022-05-04 09:14:19,185 INFO [train.py:715] (7/8) Epoch 2, batch 26850, loss[loss=0.2003, simple_loss=0.2503, pruned_loss=0.07516, over 4790.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2362, pruned_loss=0.05127, over 971822.82 frames.], batch size: 17, lr: 6.45e-04 +2022-05-04 09:14:58,116 INFO [train.py:715] (7/8) Epoch 2, batch 26900, loss[loss=0.1764, simple_loss=0.2471, pruned_loss=0.05291, over 4976.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2355, pruned_loss=0.05046, over 971756.73 frames.], batch size: 39, lr: 6.45e-04 +2022-05-04 09:15:37,583 INFO [train.py:715] (7/8) Epoch 2, batch 26950, loss[loss=0.1585, simple_loss=0.2234, pruned_loss=0.0468, over 4786.00 frames.], tot_loss[loss=0.168, simple_loss=0.2352, pruned_loss=0.05038, over 972131.88 frames.], batch size: 14, lr: 6.45e-04 +2022-05-04 09:16:16,468 INFO [train.py:715] (7/8) Epoch 2, batch 27000, loss[loss=0.1626, simple_loss=0.2298, pruned_loss=0.04774, over 4989.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2353, pruned_loss=0.05059, over 971763.40 frames.], batch size: 16, lr: 6.45e-04 +2022-05-04 09:16:16,469 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 09:16:25,254 INFO [train.py:742] (7/8) Epoch 2, validation: loss=0.1164, simple_loss=0.2027, pruned_loss=0.01502, over 914524.00 frames. +2022-05-04 09:17:03,621 INFO [train.py:715] (7/8) Epoch 2, batch 27050, loss[loss=0.1756, simple_loss=0.2603, pruned_loss=0.04541, over 4807.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2352, pruned_loss=0.05068, over 972653.02 frames.], batch size: 25, lr: 6.45e-04 +2022-05-04 09:17:42,886 INFO [train.py:715] (7/8) Epoch 2, batch 27100, loss[loss=0.1729, simple_loss=0.2459, pruned_loss=0.04995, over 4800.00 frames.], tot_loss[loss=0.1701, simple_loss=0.2372, pruned_loss=0.05145, over 972683.88 frames.], batch size: 21, lr: 6.45e-04 +2022-05-04 09:18:22,887 INFO [train.py:715] (7/8) Epoch 2, batch 27150, loss[loss=0.1759, simple_loss=0.2376, pruned_loss=0.05708, over 4787.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2362, pruned_loss=0.05099, over 972073.21 frames.], batch size: 18, lr: 6.44e-04 +2022-05-04 09:19:02,268 INFO [train.py:715] (7/8) Epoch 2, batch 27200, loss[loss=0.1688, simple_loss=0.2402, pruned_loss=0.04864, over 4979.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2371, pruned_loss=0.05166, over 972774.85 frames.], batch size: 39, lr: 6.44e-04 +2022-05-04 09:19:41,123 INFO [train.py:715] (7/8) Epoch 2, batch 27250, loss[loss=0.1446, simple_loss=0.1988, pruned_loss=0.04518, over 4774.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2381, pruned_loss=0.05179, over 973735.62 frames.], batch size: 12, lr: 6.44e-04 +2022-05-04 09:20:20,688 INFO [train.py:715] (7/8) Epoch 2, batch 27300, loss[loss=0.201, simple_loss=0.2624, pruned_loss=0.06981, over 4847.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2381, pruned_loss=0.05151, over 973316.15 frames.], batch size: 20, lr: 6.44e-04 +2022-05-04 09:20:59,725 INFO [train.py:715] (7/8) Epoch 2, batch 27350, loss[loss=0.1713, simple_loss=0.2451, pruned_loss=0.04872, over 4945.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2373, pruned_loss=0.05112, over 972075.39 frames.], batch size: 21, lr: 6.44e-04 +2022-05-04 09:21:38,800 INFO [train.py:715] (7/8) Epoch 2, batch 27400, loss[loss=0.1678, simple_loss=0.235, pruned_loss=0.05023, over 4885.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2362, pruned_loss=0.05078, over 972807.72 frames.], batch size: 22, lr: 6.44e-04 +2022-05-04 09:22:17,483 INFO [train.py:715] (7/8) Epoch 2, batch 27450, loss[loss=0.1269, simple_loss=0.1926, pruned_loss=0.03058, over 4935.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2357, pruned_loss=0.0508, over 973015.43 frames.], batch size: 21, lr: 6.44e-04 +2022-05-04 09:22:57,210 INFO [train.py:715] (7/8) Epoch 2, batch 27500, loss[loss=0.1898, simple_loss=0.2412, pruned_loss=0.06924, over 4842.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2365, pruned_loss=0.05145, over 972859.26 frames.], batch size: 32, lr: 6.43e-04 +2022-05-04 09:23:37,096 INFO [train.py:715] (7/8) Epoch 2, batch 27550, loss[loss=0.1866, simple_loss=0.2522, pruned_loss=0.06051, over 4971.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2369, pruned_loss=0.0517, over 973070.57 frames.], batch size: 14, lr: 6.43e-04 +2022-05-04 09:24:16,423 INFO [train.py:715] (7/8) Epoch 2, batch 27600, loss[loss=0.1614, simple_loss=0.2411, pruned_loss=0.04083, over 4975.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2377, pruned_loss=0.0518, over 973484.53 frames.], batch size: 24, lr: 6.43e-04 +2022-05-04 09:24:55,999 INFO [train.py:715] (7/8) Epoch 2, batch 27650, loss[loss=0.1582, simple_loss=0.2146, pruned_loss=0.05088, over 4817.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2378, pruned_loss=0.05193, over 973435.29 frames.], batch size: 26, lr: 6.43e-04 +2022-05-04 09:25:36,594 INFO [train.py:715] (7/8) Epoch 2, batch 27700, loss[loss=0.1452, simple_loss=0.2135, pruned_loss=0.03843, over 4915.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2373, pruned_loss=0.05204, over 973843.99 frames.], batch size: 19, lr: 6.43e-04 +2022-05-04 09:26:16,923 INFO [train.py:715] (7/8) Epoch 2, batch 27750, loss[loss=0.2032, simple_loss=0.2568, pruned_loss=0.07481, over 4762.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2388, pruned_loss=0.05304, over 973825.58 frames.], batch size: 14, lr: 6.43e-04 +2022-05-04 09:26:56,307 INFO [train.py:715] (7/8) Epoch 2, batch 27800, loss[loss=0.174, simple_loss=0.249, pruned_loss=0.04954, over 4986.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2376, pruned_loss=0.0524, over 973697.72 frames.], batch size: 25, lr: 6.42e-04 +2022-05-04 09:27:36,597 INFO [train.py:715] (7/8) Epoch 2, batch 27850, loss[loss=0.1471, simple_loss=0.2117, pruned_loss=0.04132, over 4890.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2367, pruned_loss=0.05151, over 973371.85 frames.], batch size: 22, lr: 6.42e-04 +2022-05-04 09:28:15,904 INFO [train.py:715] (7/8) Epoch 2, batch 27900, loss[loss=0.1899, simple_loss=0.2464, pruned_loss=0.06671, over 4865.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2358, pruned_loss=0.05132, over 972888.00 frames.], batch size: 20, lr: 6.42e-04 +2022-05-04 09:28:55,087 INFO [train.py:715] (7/8) Epoch 2, batch 27950, loss[loss=0.2073, simple_loss=0.2679, pruned_loss=0.07331, over 4921.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2368, pruned_loss=0.0518, over 972972.07 frames.], batch size: 18, lr: 6.42e-04 +2022-05-04 09:29:34,669 INFO [train.py:715] (7/8) Epoch 2, batch 28000, loss[loss=0.1345, simple_loss=0.1945, pruned_loss=0.03722, over 4902.00 frames.], tot_loss[loss=0.1705, simple_loss=0.237, pruned_loss=0.05197, over 973218.24 frames.], batch size: 19, lr: 6.42e-04 +2022-05-04 09:30:15,046 INFO [train.py:715] (7/8) Epoch 2, batch 28050, loss[loss=0.1768, simple_loss=0.2374, pruned_loss=0.05813, over 4870.00 frames.], tot_loss[loss=0.1706, simple_loss=0.237, pruned_loss=0.05206, over 972601.67 frames.], batch size: 38, lr: 6.42e-04 +2022-05-04 09:30:54,022 INFO [train.py:715] (7/8) Epoch 2, batch 28100, loss[loss=0.1708, simple_loss=0.2323, pruned_loss=0.05465, over 4793.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2368, pruned_loss=0.05199, over 972505.27 frames.], batch size: 14, lr: 6.41e-04 +2022-05-04 09:31:33,562 INFO [train.py:715] (7/8) Epoch 2, batch 28150, loss[loss=0.1336, simple_loss=0.2069, pruned_loss=0.03012, over 4896.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2356, pruned_loss=0.05097, over 972813.36 frames.], batch size: 17, lr: 6.41e-04 +2022-05-04 09:32:13,299 INFO [train.py:715] (7/8) Epoch 2, batch 28200, loss[loss=0.1814, simple_loss=0.2441, pruned_loss=0.05929, over 4855.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2358, pruned_loss=0.05067, over 972417.21 frames.], batch size: 30, lr: 6.41e-04 +2022-05-04 09:32:52,903 INFO [train.py:715] (7/8) Epoch 2, batch 28250, loss[loss=0.1641, simple_loss=0.2372, pruned_loss=0.04554, over 4835.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2349, pruned_loss=0.04981, over 972461.37 frames.], batch size: 13, lr: 6.41e-04 +2022-05-04 09:33:31,980 INFO [train.py:715] (7/8) Epoch 2, batch 28300, loss[loss=0.1683, simple_loss=0.2362, pruned_loss=0.05023, over 4746.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2346, pruned_loss=0.04942, over 972165.59 frames.], batch size: 16, lr: 6.41e-04 +2022-05-04 09:34:11,321 INFO [train.py:715] (7/8) Epoch 2, batch 28350, loss[loss=0.1769, simple_loss=0.2492, pruned_loss=0.05233, over 4814.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2362, pruned_loss=0.05054, over 971728.73 frames.], batch size: 21, lr: 6.41e-04 +2022-05-04 09:34:51,510 INFO [train.py:715] (7/8) Epoch 2, batch 28400, loss[loss=0.1606, simple_loss=0.2318, pruned_loss=0.04469, over 4779.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2376, pruned_loss=0.05166, over 970961.84 frames.], batch size: 12, lr: 6.40e-04 +2022-05-04 09:35:30,762 INFO [train.py:715] (7/8) Epoch 2, batch 28450, loss[loss=0.1504, simple_loss=0.2294, pruned_loss=0.03575, over 4904.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2368, pruned_loss=0.05071, over 971466.72 frames.], batch size: 17, lr: 6.40e-04 +2022-05-04 09:36:10,162 INFO [train.py:715] (7/8) Epoch 2, batch 28500, loss[loss=0.1739, simple_loss=0.2425, pruned_loss=0.05265, over 4918.00 frames.], tot_loss[loss=0.169, simple_loss=0.2365, pruned_loss=0.05077, over 971008.54 frames.], batch size: 29, lr: 6.40e-04 +2022-05-04 09:36:50,111 INFO [train.py:715] (7/8) Epoch 2, batch 28550, loss[loss=0.1967, simple_loss=0.265, pruned_loss=0.0642, over 4982.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2372, pruned_loss=0.05114, over 971120.75 frames.], batch size: 14, lr: 6.40e-04 +2022-05-04 09:37:30,239 INFO [train.py:715] (7/8) Epoch 2, batch 28600, loss[loss=0.1669, simple_loss=0.2444, pruned_loss=0.0447, over 4954.00 frames.], tot_loss[loss=0.1701, simple_loss=0.2378, pruned_loss=0.05117, over 971865.74 frames.], batch size: 39, lr: 6.40e-04 +2022-05-04 09:38:09,274 INFO [train.py:715] (7/8) Epoch 2, batch 28650, loss[loss=0.1531, simple_loss=0.217, pruned_loss=0.04463, over 4801.00 frames.], tot_loss[loss=0.1692, simple_loss=0.237, pruned_loss=0.05073, over 971562.74 frames.], batch size: 21, lr: 6.40e-04 +2022-05-04 09:38:49,123 INFO [train.py:715] (7/8) Epoch 2, batch 28700, loss[loss=0.1492, simple_loss=0.2297, pruned_loss=0.03435, over 4859.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2367, pruned_loss=0.05076, over 972708.06 frames.], batch size: 20, lr: 6.39e-04 +2022-05-04 09:39:29,585 INFO [train.py:715] (7/8) Epoch 2, batch 28750, loss[loss=0.168, simple_loss=0.2307, pruned_loss=0.05269, over 4788.00 frames.], tot_loss[loss=0.168, simple_loss=0.2359, pruned_loss=0.0501, over 972740.31 frames.], batch size: 18, lr: 6.39e-04 +2022-05-04 09:40:08,513 INFO [train.py:715] (7/8) Epoch 2, batch 28800, loss[loss=0.1808, simple_loss=0.2497, pruned_loss=0.05596, over 4974.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2351, pruned_loss=0.04964, over 972726.83 frames.], batch size: 25, lr: 6.39e-04 +2022-05-04 09:40:48,105 INFO [train.py:715] (7/8) Epoch 2, batch 28850, loss[loss=0.1571, simple_loss=0.2203, pruned_loss=0.04698, over 4973.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2354, pruned_loss=0.04987, over 973006.02 frames.], batch size: 25, lr: 6.39e-04 +2022-05-04 09:41:28,110 INFO [train.py:715] (7/8) Epoch 2, batch 28900, loss[loss=0.1738, simple_loss=0.2269, pruned_loss=0.06037, over 4955.00 frames.], tot_loss[loss=0.1681, simple_loss=0.2355, pruned_loss=0.05039, over 973356.07 frames.], batch size: 35, lr: 6.39e-04 +2022-05-04 09:42:07,488 INFO [train.py:715] (7/8) Epoch 2, batch 28950, loss[loss=0.1904, simple_loss=0.2574, pruned_loss=0.06169, over 4978.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2363, pruned_loss=0.05102, over 972835.88 frames.], batch size: 25, lr: 6.39e-04 +2022-05-04 09:42:46,860 INFO [train.py:715] (7/8) Epoch 2, batch 29000, loss[loss=0.1408, simple_loss=0.208, pruned_loss=0.03686, over 4775.00 frames.], tot_loss[loss=0.1684, simple_loss=0.236, pruned_loss=0.05045, over 972030.23 frames.], batch size: 18, lr: 6.38e-04 +2022-05-04 09:43:26,612 INFO [train.py:715] (7/8) Epoch 2, batch 29050, loss[loss=0.1748, simple_loss=0.2331, pruned_loss=0.05826, over 4941.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2361, pruned_loss=0.05069, over 971638.06 frames.], batch size: 29, lr: 6.38e-04 +2022-05-04 09:44:06,291 INFO [train.py:715] (7/8) Epoch 2, batch 29100, loss[loss=0.1638, simple_loss=0.2227, pruned_loss=0.05245, over 4893.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2358, pruned_loss=0.05058, over 970988.07 frames.], batch size: 19, lr: 6.38e-04 +2022-05-04 09:44:45,464 INFO [train.py:715] (7/8) Epoch 2, batch 29150, loss[loss=0.1654, simple_loss=0.2283, pruned_loss=0.05124, over 4870.00 frames.], tot_loss[loss=0.1685, simple_loss=0.236, pruned_loss=0.05048, over 971536.68 frames.], batch size: 16, lr: 6.38e-04 +2022-05-04 09:45:24,950 INFO [train.py:715] (7/8) Epoch 2, batch 29200, loss[loss=0.1447, simple_loss=0.2186, pruned_loss=0.03537, over 4793.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2352, pruned_loss=0.05002, over 971760.84 frames.], batch size: 12, lr: 6.38e-04 +2022-05-04 09:46:05,377 INFO [train.py:715] (7/8) Epoch 2, batch 29250, loss[loss=0.1363, simple_loss=0.2064, pruned_loss=0.03309, over 4776.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2352, pruned_loss=0.05015, over 971098.41 frames.], batch size: 12, lr: 6.38e-04 +2022-05-04 09:46:44,479 INFO [train.py:715] (7/8) Epoch 2, batch 29300, loss[loss=0.1962, simple_loss=0.2751, pruned_loss=0.05866, over 4921.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2355, pruned_loss=0.04999, over 971459.37 frames.], batch size: 29, lr: 6.37e-04 +2022-05-04 09:47:23,250 INFO [train.py:715] (7/8) Epoch 2, batch 29350, loss[loss=0.173, simple_loss=0.2498, pruned_loss=0.04812, over 4911.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2349, pruned_loss=0.04986, over 971616.33 frames.], batch size: 19, lr: 6.37e-04 +2022-05-04 09:48:02,468 INFO [train.py:715] (7/8) Epoch 2, batch 29400, loss[loss=0.1842, simple_loss=0.243, pruned_loss=0.06273, over 4933.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2349, pruned_loss=0.04997, over 971275.92 frames.], batch size: 39, lr: 6.37e-04 +2022-05-04 09:48:41,889 INFO [train.py:715] (7/8) Epoch 2, batch 29450, loss[loss=0.1555, simple_loss=0.2167, pruned_loss=0.04712, over 4847.00 frames.], tot_loss[loss=0.1674, simple_loss=0.235, pruned_loss=0.04987, over 970286.90 frames.], batch size: 30, lr: 6.37e-04 +2022-05-04 09:49:20,755 INFO [train.py:715] (7/8) Epoch 2, batch 29500, loss[loss=0.1914, simple_loss=0.2431, pruned_loss=0.06981, over 4648.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2349, pruned_loss=0.04997, over 971354.25 frames.], batch size: 13, lr: 6.37e-04 +2022-05-04 09:49:59,770 INFO [train.py:715] (7/8) Epoch 2, batch 29550, loss[loss=0.1576, simple_loss=0.2197, pruned_loss=0.04777, over 4803.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2344, pruned_loss=0.04994, over 971082.10 frames.], batch size: 13, lr: 6.37e-04 +2022-05-04 09:50:39,180 INFO [train.py:715] (7/8) Epoch 2, batch 29600, loss[loss=0.1518, simple_loss=0.227, pruned_loss=0.03827, over 4982.00 frames.], tot_loss[loss=0.1673, simple_loss=0.235, pruned_loss=0.04987, over 971822.16 frames.], batch size: 26, lr: 6.37e-04 +2022-05-04 09:51:18,367 INFO [train.py:715] (7/8) Epoch 2, batch 29650, loss[loss=0.1842, simple_loss=0.2373, pruned_loss=0.06556, over 4968.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2348, pruned_loss=0.04983, over 971111.81 frames.], batch size: 35, lr: 6.36e-04 +2022-05-04 09:51:57,129 INFO [train.py:715] (7/8) Epoch 2, batch 29700, loss[loss=0.1759, simple_loss=0.2408, pruned_loss=0.05552, over 4811.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2351, pruned_loss=0.04953, over 971552.47 frames.], batch size: 27, lr: 6.36e-04 +2022-05-04 09:52:36,253 INFO [train.py:715] (7/8) Epoch 2, batch 29750, loss[loss=0.1619, simple_loss=0.2353, pruned_loss=0.04424, over 4814.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2359, pruned_loss=0.05051, over 972596.26 frames.], batch size: 27, lr: 6.36e-04 +2022-05-04 09:53:15,366 INFO [train.py:715] (7/8) Epoch 2, batch 29800, loss[loss=0.183, simple_loss=0.239, pruned_loss=0.0635, over 4984.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2365, pruned_loss=0.0508, over 972922.65 frames.], batch size: 28, lr: 6.36e-04 +2022-05-04 09:53:53,996 INFO [train.py:715] (7/8) Epoch 2, batch 29850, loss[loss=0.1554, simple_loss=0.2165, pruned_loss=0.04717, over 4814.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2353, pruned_loss=0.0501, over 972610.37 frames.], batch size: 13, lr: 6.36e-04 +2022-05-04 09:54:33,011 INFO [train.py:715] (7/8) Epoch 2, batch 29900, loss[loss=0.1288, simple_loss=0.2068, pruned_loss=0.02536, over 4825.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2349, pruned_loss=0.04977, over 972324.71 frames.], batch size: 13, lr: 6.36e-04 +2022-05-04 09:55:12,830 INFO [train.py:715] (7/8) Epoch 2, batch 29950, loss[loss=0.1474, simple_loss=0.2153, pruned_loss=0.0397, over 4751.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2349, pruned_loss=0.0498, over 970895.21 frames.], batch size: 19, lr: 6.35e-04 +2022-05-04 09:55:51,637 INFO [train.py:715] (7/8) Epoch 2, batch 30000, loss[loss=0.1554, simple_loss=0.2228, pruned_loss=0.04403, over 4737.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2367, pruned_loss=0.05089, over 970784.24 frames.], batch size: 16, lr: 6.35e-04 +2022-05-04 09:55:51,637 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 09:56:00,454 INFO [train.py:742] (7/8) Epoch 2, validation: loss=0.1166, simple_loss=0.2028, pruned_loss=0.01515, over 914524.00 frames. +2022-05-04 09:56:39,119 INFO [train.py:715] (7/8) Epoch 2, batch 30050, loss[loss=0.1735, simple_loss=0.2277, pruned_loss=0.05969, over 4783.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2366, pruned_loss=0.05129, over 970780.45 frames.], batch size: 12, lr: 6.35e-04 +2022-05-04 09:57:18,479 INFO [train.py:715] (7/8) Epoch 2, batch 30100, loss[loss=0.1575, simple_loss=0.2285, pruned_loss=0.04324, over 4802.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2364, pruned_loss=0.05133, over 970716.69 frames.], batch size: 21, lr: 6.35e-04 +2022-05-04 09:57:57,552 INFO [train.py:715] (7/8) Epoch 2, batch 30150, loss[loss=0.1678, simple_loss=0.2389, pruned_loss=0.04838, over 4994.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2359, pruned_loss=0.0505, over 971827.55 frames.], batch size: 14, lr: 6.35e-04 +2022-05-04 09:58:37,028 INFO [train.py:715] (7/8) Epoch 2, batch 30200, loss[loss=0.1602, simple_loss=0.2263, pruned_loss=0.04704, over 4930.00 frames.], tot_loss[loss=0.1686, simple_loss=0.236, pruned_loss=0.05057, over 971765.63 frames.], batch size: 29, lr: 6.35e-04 +2022-05-04 09:59:15,776 INFO [train.py:715] (7/8) Epoch 2, batch 30250, loss[loss=0.1798, simple_loss=0.2437, pruned_loss=0.05795, over 4753.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2368, pruned_loss=0.05091, over 971913.32 frames.], batch size: 19, lr: 6.34e-04 +2022-05-04 09:59:55,023 INFO [train.py:715] (7/8) Epoch 2, batch 30300, loss[loss=0.144, simple_loss=0.2214, pruned_loss=0.03334, over 4989.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2351, pruned_loss=0.05027, over 973569.63 frames.], batch size: 25, lr: 6.34e-04 +2022-05-04 10:00:35,003 INFO [train.py:715] (7/8) Epoch 2, batch 30350, loss[loss=0.1424, simple_loss=0.2099, pruned_loss=0.03751, over 4757.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2349, pruned_loss=0.05037, over 972416.07 frames.], batch size: 14, lr: 6.34e-04 +2022-05-04 10:01:14,090 INFO [train.py:715] (7/8) Epoch 2, batch 30400, loss[loss=0.156, simple_loss=0.2223, pruned_loss=0.04492, over 4977.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2344, pruned_loss=0.0501, over 971774.89 frames.], batch size: 35, lr: 6.34e-04 +2022-05-04 10:01:53,194 INFO [train.py:715] (7/8) Epoch 2, batch 30450, loss[loss=0.2011, simple_loss=0.2542, pruned_loss=0.07405, over 4835.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2343, pruned_loss=0.05042, over 971638.40 frames.], batch size: 30, lr: 6.34e-04 +2022-05-04 10:02:32,964 INFO [train.py:715] (7/8) Epoch 2, batch 30500, loss[loss=0.1744, simple_loss=0.2501, pruned_loss=0.0494, over 4977.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2357, pruned_loss=0.0508, over 971319.44 frames.], batch size: 25, lr: 6.34e-04 +2022-05-04 10:03:12,633 INFO [train.py:715] (7/8) Epoch 2, batch 30550, loss[loss=0.162, simple_loss=0.2449, pruned_loss=0.03957, over 4759.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2358, pruned_loss=0.0507, over 971514.82 frames.], batch size: 16, lr: 6.33e-04 +2022-05-04 10:03:51,365 INFO [train.py:715] (7/8) Epoch 2, batch 30600, loss[loss=0.1492, simple_loss=0.2296, pruned_loss=0.03434, over 4797.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2355, pruned_loss=0.05049, over 971315.65 frames.], batch size: 12, lr: 6.33e-04 +2022-05-04 10:04:31,218 INFO [train.py:715] (7/8) Epoch 2, batch 30650, loss[loss=0.1763, simple_loss=0.2356, pruned_loss=0.05852, over 4952.00 frames.], tot_loss[loss=0.168, simple_loss=0.2352, pruned_loss=0.05039, over 971638.47 frames.], batch size: 29, lr: 6.33e-04 +2022-05-04 10:05:11,276 INFO [train.py:715] (7/8) Epoch 2, batch 30700, loss[loss=0.1867, simple_loss=0.2469, pruned_loss=0.06321, over 4917.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2347, pruned_loss=0.05033, over 972182.17 frames.], batch size: 17, lr: 6.33e-04 +2022-05-04 10:05:51,107 INFO [train.py:715] (7/8) Epoch 2, batch 30750, loss[loss=0.1544, simple_loss=0.2165, pruned_loss=0.04612, over 4883.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2346, pruned_loss=0.05009, over 972065.55 frames.], batch size: 32, lr: 6.33e-04 +2022-05-04 10:06:30,173 INFO [train.py:715] (7/8) Epoch 2, batch 30800, loss[loss=0.1646, simple_loss=0.2515, pruned_loss=0.03882, over 4961.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2346, pruned_loss=0.04994, over 972243.14 frames.], batch size: 24, lr: 6.33e-04 +2022-05-04 10:07:09,685 INFO [train.py:715] (7/8) Epoch 2, batch 30850, loss[loss=0.1751, simple_loss=0.2362, pruned_loss=0.057, over 4872.00 frames.], tot_loss[loss=0.167, simple_loss=0.2347, pruned_loss=0.04967, over 971531.27 frames.], batch size: 16, lr: 6.33e-04 +2022-05-04 10:07:49,326 INFO [train.py:715] (7/8) Epoch 2, batch 30900, loss[loss=0.1724, simple_loss=0.2445, pruned_loss=0.05013, over 4829.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2344, pruned_loss=0.04944, over 971845.26 frames.], batch size: 27, lr: 6.32e-04 +2022-05-04 10:08:27,848 INFO [train.py:715] (7/8) Epoch 2, batch 30950, loss[loss=0.166, simple_loss=0.2443, pruned_loss=0.04382, over 4900.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2341, pruned_loss=0.04954, over 972000.49 frames.], batch size: 19, lr: 6.32e-04 +2022-05-04 10:09:07,774 INFO [train.py:715] (7/8) Epoch 2, batch 31000, loss[loss=0.151, simple_loss=0.2147, pruned_loss=0.04365, over 4938.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2347, pruned_loss=0.05038, over 971477.41 frames.], batch size: 21, lr: 6.32e-04 +2022-05-04 10:09:48,215 INFO [train.py:715] (7/8) Epoch 2, batch 31050, loss[loss=0.1469, simple_loss=0.2264, pruned_loss=0.03371, over 4891.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2344, pruned_loss=0.05028, over 971507.60 frames.], batch size: 19, lr: 6.32e-04 +2022-05-04 10:10:27,692 INFO [train.py:715] (7/8) Epoch 2, batch 31100, loss[loss=0.1863, simple_loss=0.2526, pruned_loss=0.05997, over 4941.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2346, pruned_loss=0.05018, over 971515.75 frames.], batch size: 29, lr: 6.32e-04 +2022-05-04 10:11:07,483 INFO [train.py:715] (7/8) Epoch 2, batch 31150, loss[loss=0.1414, simple_loss=0.2194, pruned_loss=0.03168, over 4831.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2352, pruned_loss=0.05036, over 972025.64 frames.], batch size: 26, lr: 6.32e-04 +2022-05-04 10:11:47,659 INFO [train.py:715] (7/8) Epoch 2, batch 31200, loss[loss=0.1683, simple_loss=0.2343, pruned_loss=0.05112, over 4710.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2341, pruned_loss=0.04989, over 972119.95 frames.], batch size: 15, lr: 6.31e-04 +2022-05-04 10:12:27,441 INFO [train.py:715] (7/8) Epoch 2, batch 31250, loss[loss=0.1815, simple_loss=0.2483, pruned_loss=0.0573, over 4800.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2337, pruned_loss=0.04981, over 971174.49 frames.], batch size: 25, lr: 6.31e-04 +2022-05-04 10:13:06,645 INFO [train.py:715] (7/8) Epoch 2, batch 31300, loss[loss=0.1741, simple_loss=0.2408, pruned_loss=0.05374, over 4764.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2338, pruned_loss=0.05, over 971814.36 frames.], batch size: 19, lr: 6.31e-04 +2022-05-04 10:13:46,589 INFO [train.py:715] (7/8) Epoch 2, batch 31350, loss[loss=0.1609, simple_loss=0.2316, pruned_loss=0.04506, over 4852.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2339, pruned_loss=0.04988, over 972431.56 frames.], batch size: 13, lr: 6.31e-04 +2022-05-04 10:14:26,955 INFO [train.py:715] (7/8) Epoch 2, batch 31400, loss[loss=0.1653, simple_loss=0.222, pruned_loss=0.05428, over 4772.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2347, pruned_loss=0.05041, over 971762.59 frames.], batch size: 17, lr: 6.31e-04 +2022-05-04 10:15:06,594 INFO [train.py:715] (7/8) Epoch 2, batch 31450, loss[loss=0.1964, simple_loss=0.2367, pruned_loss=0.07802, over 4984.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2346, pruned_loss=0.0505, over 972102.58 frames.], batch size: 15, lr: 6.31e-04 +2022-05-04 10:15:46,240 INFO [train.py:715] (7/8) Epoch 2, batch 31500, loss[loss=0.1646, simple_loss=0.2368, pruned_loss=0.04624, over 4896.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2357, pruned_loss=0.05096, over 971951.09 frames.], batch size: 19, lr: 6.31e-04 +2022-05-04 10:16:26,034 INFO [train.py:715] (7/8) Epoch 2, batch 31550, loss[loss=0.1831, simple_loss=0.2508, pruned_loss=0.05772, over 4894.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2355, pruned_loss=0.05077, over 971475.50 frames.], batch size: 19, lr: 6.30e-04 +2022-05-04 10:17:05,442 INFO [train.py:715] (7/8) Epoch 2, batch 31600, loss[loss=0.1862, simple_loss=0.2367, pruned_loss=0.06786, over 4694.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2359, pruned_loss=0.0508, over 971831.77 frames.], batch size: 15, lr: 6.30e-04 +2022-05-04 10:17:44,226 INFO [train.py:715] (7/8) Epoch 2, batch 31650, loss[loss=0.187, simple_loss=0.2551, pruned_loss=0.05947, over 4855.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2351, pruned_loss=0.05022, over 971972.59 frames.], batch size: 32, lr: 6.30e-04 +2022-05-04 10:18:24,071 INFO [train.py:715] (7/8) Epoch 2, batch 31700, loss[loss=0.146, simple_loss=0.2294, pruned_loss=0.03126, over 4956.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2354, pruned_loss=0.05011, over 971516.90 frames.], batch size: 24, lr: 6.30e-04 +2022-05-04 10:19:04,307 INFO [train.py:715] (7/8) Epoch 2, batch 31750, loss[loss=0.148, simple_loss=0.2243, pruned_loss=0.03581, over 4955.00 frames.], tot_loss[loss=0.168, simple_loss=0.2354, pruned_loss=0.05035, over 972224.10 frames.], batch size: 21, lr: 6.30e-04 +2022-05-04 10:19:44,142 INFO [train.py:715] (7/8) Epoch 2, batch 31800, loss[loss=0.1917, simple_loss=0.2541, pruned_loss=0.06464, over 4917.00 frames.], tot_loss[loss=0.1681, simple_loss=0.2354, pruned_loss=0.05044, over 972860.22 frames.], batch size: 18, lr: 6.30e-04 +2022-05-04 10:20:23,466 INFO [train.py:715] (7/8) Epoch 2, batch 31850, loss[loss=0.177, simple_loss=0.2372, pruned_loss=0.05835, over 4897.00 frames.], tot_loss[loss=0.1687, simple_loss=0.236, pruned_loss=0.05073, over 972642.39 frames.], batch size: 19, lr: 6.29e-04 +2022-05-04 10:21:02,955 INFO [train.py:715] (7/8) Epoch 2, batch 31900, loss[loss=0.1624, simple_loss=0.2288, pruned_loss=0.04804, over 4931.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2348, pruned_loss=0.05004, over 972172.29 frames.], batch size: 21, lr: 6.29e-04 +2022-05-04 10:21:42,549 INFO [train.py:715] (7/8) Epoch 2, batch 31950, loss[loss=0.1416, simple_loss=0.2173, pruned_loss=0.03292, over 4874.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2351, pruned_loss=0.05011, over 972029.32 frames.], batch size: 22, lr: 6.29e-04 +2022-05-04 10:22:21,472 INFO [train.py:715] (7/8) Epoch 2, batch 32000, loss[loss=0.2026, simple_loss=0.2416, pruned_loss=0.08183, over 4774.00 frames.], tot_loss[loss=0.168, simple_loss=0.2353, pruned_loss=0.0503, over 972609.03 frames.], batch size: 14, lr: 6.29e-04 +2022-05-04 10:23:01,105 INFO [train.py:715] (7/8) Epoch 2, batch 32050, loss[loss=0.2082, simple_loss=0.2651, pruned_loss=0.07572, over 4846.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2352, pruned_loss=0.05024, over 972899.28 frames.], batch size: 30, lr: 6.29e-04 +2022-05-04 10:23:41,017 INFO [train.py:715] (7/8) Epoch 2, batch 32100, loss[loss=0.1716, simple_loss=0.2502, pruned_loss=0.04651, over 4903.00 frames.], tot_loss[loss=0.1689, simple_loss=0.236, pruned_loss=0.05086, over 972925.25 frames.], batch size: 17, lr: 6.29e-04 +2022-05-04 10:24:20,306 INFO [train.py:715] (7/8) Epoch 2, batch 32150, loss[loss=0.1667, simple_loss=0.2384, pruned_loss=0.04746, over 4815.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2369, pruned_loss=0.05136, over 974110.88 frames.], batch size: 26, lr: 6.29e-04 +2022-05-04 10:24:59,279 INFO [train.py:715] (7/8) Epoch 2, batch 32200, loss[loss=0.1492, simple_loss=0.218, pruned_loss=0.04016, over 4804.00 frames.], tot_loss[loss=0.1694, simple_loss=0.236, pruned_loss=0.05137, over 974093.77 frames.], batch size: 12, lr: 6.28e-04 +2022-05-04 10:25:39,141 INFO [train.py:715] (7/8) Epoch 2, batch 32250, loss[loss=0.1561, simple_loss=0.2296, pruned_loss=0.04129, over 4878.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2359, pruned_loss=0.05143, over 973015.78 frames.], batch size: 16, lr: 6.28e-04 +2022-05-04 10:26:18,496 INFO [train.py:715] (7/8) Epoch 2, batch 32300, loss[loss=0.1725, simple_loss=0.2425, pruned_loss=0.05121, over 4773.00 frames.], tot_loss[loss=0.169, simple_loss=0.2359, pruned_loss=0.05103, over 972106.59 frames.], batch size: 19, lr: 6.28e-04 +2022-05-04 10:26:57,487 INFO [train.py:715] (7/8) Epoch 2, batch 32350, loss[loss=0.181, simple_loss=0.2458, pruned_loss=0.0581, over 4887.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2371, pruned_loss=0.05194, over 972093.06 frames.], batch size: 17, lr: 6.28e-04 +2022-05-04 10:27:37,327 INFO [train.py:715] (7/8) Epoch 2, batch 32400, loss[loss=0.1735, simple_loss=0.2429, pruned_loss=0.05206, over 4698.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2359, pruned_loss=0.05055, over 971661.72 frames.], batch size: 15, lr: 6.28e-04 +2022-05-04 10:28:17,095 INFO [train.py:715] (7/8) Epoch 2, batch 32450, loss[loss=0.1689, simple_loss=0.2335, pruned_loss=0.05217, over 4849.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2361, pruned_loss=0.05085, over 972739.04 frames.], batch size: 34, lr: 6.28e-04 +2022-05-04 10:28:56,081 INFO [train.py:715] (7/8) Epoch 2, batch 32500, loss[loss=0.1421, simple_loss=0.2219, pruned_loss=0.03112, over 4894.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2362, pruned_loss=0.05073, over 973265.00 frames.], batch size: 22, lr: 6.27e-04 +2022-05-04 10:29:35,594 INFO [train.py:715] (7/8) Epoch 2, batch 32550, loss[loss=0.182, simple_loss=0.2521, pruned_loss=0.05598, over 4684.00 frames.], tot_loss[loss=0.168, simple_loss=0.2356, pruned_loss=0.05022, over 971399.67 frames.], batch size: 15, lr: 6.27e-04 +2022-05-04 10:30:15,650 INFO [train.py:715] (7/8) Epoch 2, batch 32600, loss[loss=0.1522, simple_loss=0.2214, pruned_loss=0.04155, over 4841.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2343, pruned_loss=0.04954, over 971565.98 frames.], batch size: 30, lr: 6.27e-04 +2022-05-04 10:30:54,903 INFO [train.py:715] (7/8) Epoch 2, batch 32650, loss[loss=0.1848, simple_loss=0.2445, pruned_loss=0.06255, over 4797.00 frames.], tot_loss[loss=0.167, simple_loss=0.2348, pruned_loss=0.04966, over 971427.16 frames.], batch size: 21, lr: 6.27e-04 +2022-05-04 10:31:33,744 INFO [train.py:715] (7/8) Epoch 2, batch 32700, loss[loss=0.1823, simple_loss=0.2539, pruned_loss=0.05542, over 4818.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2359, pruned_loss=0.05054, over 971752.40 frames.], batch size: 21, lr: 6.27e-04 +2022-05-04 10:32:13,536 INFO [train.py:715] (7/8) Epoch 2, batch 32750, loss[loss=0.1852, simple_loss=0.2507, pruned_loss=0.05988, over 4959.00 frames.], tot_loss[loss=0.1699, simple_loss=0.237, pruned_loss=0.05138, over 971846.27 frames.], batch size: 21, lr: 6.27e-04 +2022-05-04 10:32:53,523 INFO [train.py:715] (7/8) Epoch 2, batch 32800, loss[loss=0.1498, simple_loss=0.2288, pruned_loss=0.03544, over 4820.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2358, pruned_loss=0.05051, over 970782.81 frames.], batch size: 26, lr: 6.27e-04 +2022-05-04 10:33:32,252 INFO [train.py:715] (7/8) Epoch 2, batch 32850, loss[loss=0.1465, simple_loss=0.2084, pruned_loss=0.04223, over 4787.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2348, pruned_loss=0.04982, over 970341.26 frames.], batch size: 14, lr: 6.26e-04 +2022-05-04 10:34:11,598 INFO [train.py:715] (7/8) Epoch 2, batch 32900, loss[loss=0.1457, simple_loss=0.2264, pruned_loss=0.03251, over 4975.00 frames.], tot_loss[loss=0.1664, simple_loss=0.2345, pruned_loss=0.04921, over 970267.40 frames.], batch size: 24, lr: 6.26e-04 +2022-05-04 10:34:51,518 INFO [train.py:715] (7/8) Epoch 2, batch 32950, loss[loss=0.1612, simple_loss=0.2215, pruned_loss=0.0505, over 4833.00 frames.], tot_loss[loss=0.167, simple_loss=0.2346, pruned_loss=0.04968, over 969953.32 frames.], batch size: 13, lr: 6.26e-04 +2022-05-04 10:35:30,095 INFO [train.py:715] (7/8) Epoch 2, batch 33000, loss[loss=0.1635, simple_loss=0.23, pruned_loss=0.04851, over 4963.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2349, pruned_loss=0.04974, over 970812.65 frames.], batch size: 35, lr: 6.26e-04 +2022-05-04 10:35:30,096 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 10:35:38,853 INFO [train.py:742] (7/8) Epoch 2, validation: loss=0.1163, simple_loss=0.2025, pruned_loss=0.01504, over 914524.00 frames. +2022-05-04 10:36:17,844 INFO [train.py:715] (7/8) Epoch 2, batch 33050, loss[loss=0.1554, simple_loss=0.2359, pruned_loss=0.03749, over 4950.00 frames.], tot_loss[loss=0.1681, simple_loss=0.2353, pruned_loss=0.05047, over 971426.07 frames.], batch size: 35, lr: 6.26e-04 +2022-05-04 10:36:57,381 INFO [train.py:715] (7/8) Epoch 2, batch 33100, loss[loss=0.156, simple_loss=0.2251, pruned_loss=0.04349, over 4985.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2344, pruned_loss=0.05018, over 971039.05 frames.], batch size: 28, lr: 6.26e-04 +2022-05-04 10:37:37,183 INFO [train.py:715] (7/8) Epoch 2, batch 33150, loss[loss=0.1485, simple_loss=0.223, pruned_loss=0.037, over 4895.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2355, pruned_loss=0.05095, over 971380.78 frames.], batch size: 19, lr: 6.25e-04 +2022-05-04 10:38:16,780 INFO [train.py:715] (7/8) Epoch 2, batch 33200, loss[loss=0.1709, simple_loss=0.2369, pruned_loss=0.05243, over 4781.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2347, pruned_loss=0.05017, over 971642.86 frames.], batch size: 17, lr: 6.25e-04 +2022-05-04 10:38:56,321 INFO [train.py:715] (7/8) Epoch 2, batch 33250, loss[loss=0.147, simple_loss=0.2144, pruned_loss=0.0398, over 4769.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2343, pruned_loss=0.05001, over 971190.38 frames.], batch size: 18, lr: 6.25e-04 +2022-05-04 10:39:35,529 INFO [train.py:715] (7/8) Epoch 2, batch 33300, loss[loss=0.187, simple_loss=0.2593, pruned_loss=0.05729, over 4788.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2356, pruned_loss=0.05047, over 972442.98 frames.], batch size: 14, lr: 6.25e-04 +2022-05-04 10:40:14,696 INFO [train.py:715] (7/8) Epoch 2, batch 33350, loss[loss=0.2048, simple_loss=0.2605, pruned_loss=0.07456, over 4779.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2355, pruned_loss=0.05042, over 971926.26 frames.], batch size: 17, lr: 6.25e-04 +2022-05-04 10:40:53,963 INFO [train.py:715] (7/8) Epoch 2, batch 33400, loss[loss=0.1975, simple_loss=0.254, pruned_loss=0.07053, over 4866.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2348, pruned_loss=0.05035, over 971520.73 frames.], batch size: 22, lr: 6.25e-04 +2022-05-04 10:41:33,183 INFO [train.py:715] (7/8) Epoch 2, batch 33450, loss[loss=0.1598, simple_loss=0.2161, pruned_loss=0.05171, over 4785.00 frames.], tot_loss[loss=0.1669, simple_loss=0.234, pruned_loss=0.04992, over 971809.83 frames.], batch size: 12, lr: 6.25e-04 +2022-05-04 10:42:13,249 INFO [train.py:715] (7/8) Epoch 2, batch 33500, loss[loss=0.1816, simple_loss=0.2318, pruned_loss=0.06574, over 4918.00 frames.], tot_loss[loss=0.1666, simple_loss=0.234, pruned_loss=0.04961, over 971698.23 frames.], batch size: 23, lr: 6.24e-04 +2022-05-04 10:42:52,009 INFO [train.py:715] (7/8) Epoch 2, batch 33550, loss[loss=0.181, simple_loss=0.2413, pruned_loss=0.06036, over 4712.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2329, pruned_loss=0.04922, over 971172.64 frames.], batch size: 15, lr: 6.24e-04 +2022-05-04 10:43:31,505 INFO [train.py:715] (7/8) Epoch 2, batch 33600, loss[loss=0.1832, simple_loss=0.2307, pruned_loss=0.0679, over 4877.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2335, pruned_loss=0.04941, over 971654.32 frames.], batch size: 32, lr: 6.24e-04 +2022-05-04 10:44:11,049 INFO [train.py:715] (7/8) Epoch 2, batch 33650, loss[loss=0.1484, simple_loss=0.2356, pruned_loss=0.03056, over 4814.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2331, pruned_loss=0.04929, over 970675.17 frames.], batch size: 24, lr: 6.24e-04 +2022-05-04 10:44:50,489 INFO [train.py:715] (7/8) Epoch 2, batch 33700, loss[loss=0.1925, simple_loss=0.263, pruned_loss=0.06103, over 4696.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2327, pruned_loss=0.04886, over 970745.90 frames.], batch size: 15, lr: 6.24e-04 +2022-05-04 10:45:29,906 INFO [train.py:715] (7/8) Epoch 2, batch 33750, loss[loss=0.1765, simple_loss=0.2385, pruned_loss=0.05722, over 4899.00 frames.], tot_loss[loss=0.1656, simple_loss=0.233, pruned_loss=0.04907, over 972008.82 frames.], batch size: 19, lr: 6.24e-04 +2022-05-04 10:46:09,311 INFO [train.py:715] (7/8) Epoch 2, batch 33800, loss[loss=0.1884, simple_loss=0.2599, pruned_loss=0.05842, over 4987.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2343, pruned_loss=0.05013, over 972233.48 frames.], batch size: 25, lr: 6.23e-04 +2022-05-04 10:46:49,493 INFO [train.py:715] (7/8) Epoch 2, batch 33850, loss[loss=0.1772, simple_loss=0.2514, pruned_loss=0.0515, over 4934.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2339, pruned_loss=0.04996, over 972122.38 frames.], batch size: 23, lr: 6.23e-04 +2022-05-04 10:47:28,889 INFO [train.py:715] (7/8) Epoch 2, batch 33900, loss[loss=0.1504, simple_loss=0.2269, pruned_loss=0.0369, over 4839.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2343, pruned_loss=0.05023, over 972111.97 frames.], batch size: 15, lr: 6.23e-04 +2022-05-04 10:48:08,030 INFO [train.py:715] (7/8) Epoch 2, batch 33950, loss[loss=0.1335, simple_loss=0.2033, pruned_loss=0.03186, over 4814.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2327, pruned_loss=0.04899, over 971634.39 frames.], batch size: 13, lr: 6.23e-04 +2022-05-04 10:48:47,954 INFO [train.py:715] (7/8) Epoch 2, batch 34000, loss[loss=0.2122, simple_loss=0.2649, pruned_loss=0.07975, over 4874.00 frames.], tot_loss[loss=0.1664, simple_loss=0.2338, pruned_loss=0.04953, over 971656.58 frames.], batch size: 20, lr: 6.23e-04 +2022-05-04 10:49:27,585 INFO [train.py:715] (7/8) Epoch 2, batch 34050, loss[loss=0.1689, simple_loss=0.2332, pruned_loss=0.05232, over 4861.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2334, pruned_loss=0.04956, over 972311.37 frames.], batch size: 20, lr: 6.23e-04 +2022-05-04 10:50:07,049 INFO [train.py:715] (7/8) Epoch 2, batch 34100, loss[loss=0.1498, simple_loss=0.226, pruned_loss=0.03675, over 4887.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2337, pruned_loss=0.04925, over 971852.60 frames.], batch size: 29, lr: 6.23e-04 +2022-05-04 10:50:46,462 INFO [train.py:715] (7/8) Epoch 2, batch 34150, loss[loss=0.153, simple_loss=0.2284, pruned_loss=0.03883, over 4893.00 frames.], tot_loss[loss=0.1653, simple_loss=0.233, pruned_loss=0.04884, over 972087.22 frames.], batch size: 17, lr: 6.22e-04 +2022-05-04 10:51:26,751 INFO [train.py:715] (7/8) Epoch 2, batch 34200, loss[loss=0.1871, simple_loss=0.2503, pruned_loss=0.06198, over 4888.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2328, pruned_loss=0.04916, over 972658.02 frames.], batch size: 22, lr: 6.22e-04 +2022-05-04 10:52:06,316 INFO [train.py:715] (7/8) Epoch 2, batch 34250, loss[loss=0.2299, simple_loss=0.2993, pruned_loss=0.08026, over 4775.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2327, pruned_loss=0.04897, over 972902.84 frames.], batch size: 19, lr: 6.22e-04 +2022-05-04 10:52:45,483 INFO [train.py:715] (7/8) Epoch 2, batch 34300, loss[loss=0.1863, simple_loss=0.2583, pruned_loss=0.05712, over 4932.00 frames.], tot_loss[loss=0.1659, simple_loss=0.233, pruned_loss=0.0494, over 972203.86 frames.], batch size: 18, lr: 6.22e-04 +2022-05-04 10:53:25,369 INFO [train.py:715] (7/8) Epoch 2, batch 34350, loss[loss=0.2131, simple_loss=0.2766, pruned_loss=0.07478, over 4903.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2335, pruned_loss=0.04952, over 972746.28 frames.], batch size: 17, lr: 6.22e-04 +2022-05-04 10:54:07,394 INFO [train.py:715] (7/8) Epoch 2, batch 34400, loss[loss=0.1679, simple_loss=0.2307, pruned_loss=0.05253, over 4845.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2335, pruned_loss=0.04896, over 972621.87 frames.], batch size: 26, lr: 6.22e-04 +2022-05-04 10:54:46,517 INFO [train.py:715] (7/8) Epoch 2, batch 34450, loss[loss=0.194, simple_loss=0.264, pruned_loss=0.062, over 4921.00 frames.], tot_loss[loss=0.167, simple_loss=0.2349, pruned_loss=0.04956, over 972491.79 frames.], batch size: 39, lr: 6.22e-04 +2022-05-04 10:55:25,437 INFO [train.py:715] (7/8) Epoch 2, batch 34500, loss[loss=0.1982, simple_loss=0.2679, pruned_loss=0.06424, over 4954.00 frames.], tot_loss[loss=0.1671, simple_loss=0.235, pruned_loss=0.04961, over 972486.95 frames.], batch size: 24, lr: 6.21e-04 +2022-05-04 10:56:05,352 INFO [train.py:715] (7/8) Epoch 2, batch 34550, loss[loss=0.147, simple_loss=0.2316, pruned_loss=0.03118, over 4761.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2358, pruned_loss=0.05, over 972926.72 frames.], batch size: 19, lr: 6.21e-04 +2022-05-04 10:56:44,142 INFO [train.py:715] (7/8) Epoch 2, batch 34600, loss[loss=0.1509, simple_loss=0.2228, pruned_loss=0.03946, over 4976.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2365, pruned_loss=0.05052, over 973268.01 frames.], batch size: 25, lr: 6.21e-04 +2022-05-04 10:57:23,178 INFO [train.py:715] (7/8) Epoch 2, batch 34650, loss[loss=0.1372, simple_loss=0.2097, pruned_loss=0.03237, over 4977.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2354, pruned_loss=0.05002, over 973755.97 frames.], batch size: 14, lr: 6.21e-04 +2022-05-04 10:58:02,542 INFO [train.py:715] (7/8) Epoch 2, batch 34700, loss[loss=0.198, simple_loss=0.2628, pruned_loss=0.06661, over 4680.00 frames.], tot_loss[loss=0.1675, simple_loss=0.235, pruned_loss=0.04995, over 972951.03 frames.], batch size: 15, lr: 6.21e-04 +2022-05-04 10:58:40,569 INFO [train.py:715] (7/8) Epoch 2, batch 34750, loss[loss=0.1747, simple_loss=0.2543, pruned_loss=0.04752, over 4840.00 frames.], tot_loss[loss=0.168, simple_loss=0.2356, pruned_loss=0.05023, over 971765.07 frames.], batch size: 13, lr: 6.21e-04 +2022-05-04 10:59:17,103 INFO [train.py:715] (7/8) Epoch 2, batch 34800, loss[loss=0.1366, simple_loss=0.2079, pruned_loss=0.03267, over 4756.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2353, pruned_loss=0.05026, over 970653.29 frames.], batch size: 12, lr: 6.20e-04 +2022-05-04 11:00:07,066 INFO [train.py:715] (7/8) Epoch 3, batch 0, loss[loss=0.166, simple_loss=0.2413, pruned_loss=0.04534, over 4761.00 frames.], tot_loss[loss=0.166, simple_loss=0.2413, pruned_loss=0.04534, over 4761.00 frames.], batch size: 19, lr: 5.87e-04 +2022-05-04 11:00:45,740 INFO [train.py:715] (7/8) Epoch 3, batch 50, loss[loss=0.1844, simple_loss=0.2508, pruned_loss=0.05896, over 4920.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2356, pruned_loss=0.05259, over 219299.70 frames.], batch size: 23, lr: 5.87e-04 +2022-05-04 11:01:25,680 INFO [train.py:715] (7/8) Epoch 3, batch 100, loss[loss=0.1588, simple_loss=0.2234, pruned_loss=0.04707, over 4923.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2335, pruned_loss=0.05006, over 385905.16 frames.], batch size: 23, lr: 5.87e-04 +2022-05-04 11:02:05,239 INFO [train.py:715] (7/8) Epoch 3, batch 150, loss[loss=0.1955, simple_loss=0.2623, pruned_loss=0.06437, over 4737.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2336, pruned_loss=0.04998, over 515352.25 frames.], batch size: 16, lr: 5.86e-04 +2022-05-04 11:02:44,386 INFO [train.py:715] (7/8) Epoch 3, batch 200, loss[loss=0.1559, simple_loss=0.2256, pruned_loss=0.04312, over 4701.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2325, pruned_loss=0.04883, over 617051.73 frames.], batch size: 15, lr: 5.86e-04 +2022-05-04 11:03:23,628 INFO [train.py:715] (7/8) Epoch 3, batch 250, loss[loss=0.1421, simple_loss=0.2281, pruned_loss=0.02806, over 4811.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2336, pruned_loss=0.04872, over 695712.08 frames.], batch size: 26, lr: 5.86e-04 +2022-05-04 11:04:03,636 INFO [train.py:715] (7/8) Epoch 3, batch 300, loss[loss=0.1716, simple_loss=0.2442, pruned_loss=0.04951, over 4816.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2329, pruned_loss=0.04804, over 756994.05 frames.], batch size: 25, lr: 5.86e-04 +2022-05-04 11:04:42,646 INFO [train.py:715] (7/8) Epoch 3, batch 350, loss[loss=0.1985, simple_loss=0.2554, pruned_loss=0.07078, over 4988.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2344, pruned_loss=0.04857, over 805508.77 frames.], batch size: 16, lr: 5.86e-04 +2022-05-04 11:05:21,843 INFO [train.py:715] (7/8) Epoch 3, batch 400, loss[loss=0.1669, simple_loss=0.2358, pruned_loss=0.04897, over 4818.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2342, pruned_loss=0.0488, over 842665.13 frames.], batch size: 27, lr: 5.86e-04 +2022-05-04 11:06:01,610 INFO [train.py:715] (7/8) Epoch 3, batch 450, loss[loss=0.1541, simple_loss=0.2162, pruned_loss=0.04599, over 4776.00 frames.], tot_loss[loss=0.1647, simple_loss=0.233, pruned_loss=0.04817, over 872151.94 frames.], batch size: 18, lr: 5.86e-04 +2022-05-04 11:06:41,125 INFO [train.py:715] (7/8) Epoch 3, batch 500, loss[loss=0.1504, simple_loss=0.2271, pruned_loss=0.03686, over 4815.00 frames.], tot_loss[loss=0.1647, simple_loss=0.233, pruned_loss=0.04824, over 894566.30 frames.], batch size: 27, lr: 5.85e-04 +2022-05-04 11:07:20,469 INFO [train.py:715] (7/8) Epoch 3, batch 550, loss[loss=0.1918, simple_loss=0.2521, pruned_loss=0.06578, over 4776.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2325, pruned_loss=0.04801, over 911609.06 frames.], batch size: 17, lr: 5.85e-04 +2022-05-04 11:07:59,340 INFO [train.py:715] (7/8) Epoch 3, batch 600, loss[loss=0.159, simple_loss=0.2294, pruned_loss=0.04428, over 4898.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2322, pruned_loss=0.04798, over 925677.14 frames.], batch size: 17, lr: 5.85e-04 +2022-05-04 11:08:39,294 INFO [train.py:715] (7/8) Epoch 3, batch 650, loss[loss=0.167, simple_loss=0.2337, pruned_loss=0.05013, over 4921.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2334, pruned_loss=0.04878, over 935850.02 frames.], batch size: 23, lr: 5.85e-04 +2022-05-04 11:09:18,639 INFO [train.py:715] (7/8) Epoch 3, batch 700, loss[loss=0.1506, simple_loss=0.2189, pruned_loss=0.04117, over 4804.00 frames.], tot_loss[loss=0.164, simple_loss=0.2324, pruned_loss=0.04778, over 943577.66 frames.], batch size: 13, lr: 5.85e-04 +2022-05-04 11:09:57,741 INFO [train.py:715] (7/8) Epoch 3, batch 750, loss[loss=0.1647, simple_loss=0.2266, pruned_loss=0.05137, over 4886.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2334, pruned_loss=0.04905, over 950122.70 frames.], batch size: 16, lr: 5.85e-04 +2022-05-04 11:10:37,307 INFO [train.py:715] (7/8) Epoch 3, batch 800, loss[loss=0.1759, simple_loss=0.2396, pruned_loss=0.05613, over 4747.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2334, pruned_loss=0.0491, over 955424.98 frames.], batch size: 19, lr: 5.85e-04 +2022-05-04 11:11:17,443 INFO [train.py:715] (7/8) Epoch 3, batch 850, loss[loss=0.1671, simple_loss=0.2327, pruned_loss=0.05077, over 4909.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2332, pruned_loss=0.04881, over 958955.92 frames.], batch size: 18, lr: 5.84e-04 +2022-05-04 11:11:56,827 INFO [train.py:715] (7/8) Epoch 3, batch 900, loss[loss=0.1661, simple_loss=0.2459, pruned_loss=0.04313, over 4934.00 frames.], tot_loss[loss=0.1652, simple_loss=0.233, pruned_loss=0.0487, over 961501.70 frames.], batch size: 29, lr: 5.84e-04 +2022-05-04 11:12:35,444 INFO [train.py:715] (7/8) Epoch 3, batch 950, loss[loss=0.1762, simple_loss=0.234, pruned_loss=0.05924, over 4793.00 frames.], tot_loss[loss=0.1664, simple_loss=0.234, pruned_loss=0.04941, over 963716.11 frames.], batch size: 24, lr: 5.84e-04 +2022-05-04 11:13:15,426 INFO [train.py:715] (7/8) Epoch 3, batch 1000, loss[loss=0.1575, simple_loss=0.2256, pruned_loss=0.04473, over 4942.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2332, pruned_loss=0.04858, over 965605.96 frames.], batch size: 23, lr: 5.84e-04 +2022-05-04 11:13:55,095 INFO [train.py:715] (7/8) Epoch 3, batch 1050, loss[loss=0.1816, simple_loss=0.2464, pruned_loss=0.05842, over 4904.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2334, pruned_loss=0.04903, over 966927.86 frames.], batch size: 39, lr: 5.84e-04 +2022-05-04 11:14:34,006 INFO [train.py:715] (7/8) Epoch 3, batch 1100, loss[loss=0.2056, simple_loss=0.2697, pruned_loss=0.0707, over 4870.00 frames.], tot_loss[loss=0.1665, simple_loss=0.2344, pruned_loss=0.04926, over 968971.07 frames.], batch size: 20, lr: 5.84e-04 +2022-05-04 11:15:12,879 INFO [train.py:715] (7/8) Epoch 3, batch 1150, loss[loss=0.1379, simple_loss=0.2049, pruned_loss=0.03547, over 4954.00 frames.], tot_loss[loss=0.1664, simple_loss=0.234, pruned_loss=0.04936, over 970059.06 frames.], batch size: 24, lr: 5.84e-04 +2022-05-04 11:15:52,686 INFO [train.py:715] (7/8) Epoch 3, batch 1200, loss[loss=0.2107, simple_loss=0.2688, pruned_loss=0.07625, over 4853.00 frames.], tot_loss[loss=0.166, simple_loss=0.234, pruned_loss=0.04903, over 970643.70 frames.], batch size: 32, lr: 5.83e-04 +2022-05-04 11:16:31,655 INFO [train.py:715] (7/8) Epoch 3, batch 1250, loss[loss=0.2436, simple_loss=0.3146, pruned_loss=0.08631, over 4915.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2339, pruned_loss=0.04875, over 971488.33 frames.], batch size: 39, lr: 5.83e-04 +2022-05-04 11:17:10,166 INFO [train.py:715] (7/8) Epoch 3, batch 1300, loss[loss=0.1703, simple_loss=0.2366, pruned_loss=0.052, over 4759.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2341, pruned_loss=0.04856, over 972865.77 frames.], batch size: 14, lr: 5.83e-04 +2022-05-04 11:17:49,721 INFO [train.py:715] (7/8) Epoch 3, batch 1350, loss[loss=0.1456, simple_loss=0.2155, pruned_loss=0.03791, over 4878.00 frames.], tot_loss[loss=0.1646, simple_loss=0.233, pruned_loss=0.04815, over 971768.55 frames.], batch size: 22, lr: 5.83e-04 +2022-05-04 11:18:29,001 INFO [train.py:715] (7/8) Epoch 3, batch 1400, loss[loss=0.1651, simple_loss=0.2344, pruned_loss=0.04795, over 4862.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2328, pruned_loss=0.04786, over 972021.11 frames.], batch size: 20, lr: 5.83e-04 +2022-05-04 11:19:07,867 INFO [train.py:715] (7/8) Epoch 3, batch 1450, loss[loss=0.1528, simple_loss=0.2246, pruned_loss=0.04046, over 4783.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2323, pruned_loss=0.04769, over 972669.39 frames.], batch size: 18, lr: 5.83e-04 +2022-05-04 11:19:46,424 INFO [train.py:715] (7/8) Epoch 3, batch 1500, loss[loss=0.1901, simple_loss=0.2486, pruned_loss=0.06581, over 4937.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2324, pruned_loss=0.04807, over 973018.20 frames.], batch size: 39, lr: 5.83e-04 +2022-05-04 11:20:26,147 INFO [train.py:715] (7/8) Epoch 3, batch 1550, loss[loss=0.1697, simple_loss=0.2441, pruned_loss=0.04761, over 4862.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2327, pruned_loss=0.04847, over 972966.58 frames.], batch size: 20, lr: 5.83e-04 +2022-05-04 11:21:05,414 INFO [train.py:715] (7/8) Epoch 3, batch 1600, loss[loss=0.1404, simple_loss=0.2158, pruned_loss=0.0325, over 4980.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2326, pruned_loss=0.04808, over 972807.56 frames.], batch size: 14, lr: 5.82e-04 +2022-05-04 11:21:43,533 INFO [train.py:715] (7/8) Epoch 3, batch 1650, loss[loss=0.1614, simple_loss=0.2177, pruned_loss=0.05257, over 4983.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2326, pruned_loss=0.04847, over 972844.27 frames.], batch size: 31, lr: 5.82e-04 +2022-05-04 11:22:22,786 INFO [train.py:715] (7/8) Epoch 3, batch 1700, loss[loss=0.1514, simple_loss=0.2319, pruned_loss=0.03545, over 4856.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2332, pruned_loss=0.04871, over 972486.39 frames.], batch size: 30, lr: 5.82e-04 +2022-05-04 11:23:02,324 INFO [train.py:715] (7/8) Epoch 3, batch 1750, loss[loss=0.1938, simple_loss=0.2505, pruned_loss=0.06853, over 4961.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2335, pruned_loss=0.04877, over 972694.11 frames.], batch size: 35, lr: 5.82e-04 +2022-05-04 11:23:41,623 INFO [train.py:715] (7/8) Epoch 3, batch 1800, loss[loss=0.1542, simple_loss=0.2304, pruned_loss=0.03902, over 4905.00 frames.], tot_loss[loss=0.1649, simple_loss=0.233, pruned_loss=0.04839, over 973137.47 frames.], batch size: 19, lr: 5.82e-04 +2022-05-04 11:24:20,323 INFO [train.py:715] (7/8) Epoch 3, batch 1850, loss[loss=0.1362, simple_loss=0.21, pruned_loss=0.03123, over 4848.00 frames.], tot_loss[loss=0.1638, simple_loss=0.232, pruned_loss=0.04781, over 972728.09 frames.], batch size: 34, lr: 5.82e-04 +2022-05-04 11:25:00,293 INFO [train.py:715] (7/8) Epoch 3, batch 1900, loss[loss=0.1365, simple_loss=0.2126, pruned_loss=0.03023, over 4817.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2324, pruned_loss=0.04841, over 972956.86 frames.], batch size: 25, lr: 5.82e-04 +2022-05-04 11:25:39,889 INFO [train.py:715] (7/8) Epoch 3, batch 1950, loss[loss=0.1449, simple_loss=0.2198, pruned_loss=0.03507, over 4980.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2333, pruned_loss=0.04893, over 973565.84 frames.], batch size: 28, lr: 5.81e-04 +2022-05-04 11:26:18,807 INFO [train.py:715] (7/8) Epoch 3, batch 2000, loss[loss=0.1633, simple_loss=0.2236, pruned_loss=0.05147, over 4781.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2337, pruned_loss=0.04911, over 973923.59 frames.], batch size: 14, lr: 5.81e-04 +2022-05-04 11:26:58,014 INFO [train.py:715] (7/8) Epoch 3, batch 2050, loss[loss=0.2033, simple_loss=0.2472, pruned_loss=0.07968, over 4840.00 frames.], tot_loss[loss=0.1665, simple_loss=0.2336, pruned_loss=0.04965, over 973639.22 frames.], batch size: 15, lr: 5.81e-04 +2022-05-04 11:27:37,800 INFO [train.py:715] (7/8) Epoch 3, batch 2100, loss[loss=0.1765, simple_loss=0.2443, pruned_loss=0.05434, over 4849.00 frames.], tot_loss[loss=0.1665, simple_loss=0.2338, pruned_loss=0.04958, over 973183.97 frames.], batch size: 38, lr: 5.81e-04 +2022-05-04 11:28:17,053 INFO [train.py:715] (7/8) Epoch 3, batch 2150, loss[loss=0.1927, simple_loss=0.2506, pruned_loss=0.06737, over 4705.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2342, pruned_loss=0.05004, over 973991.18 frames.], batch size: 15, lr: 5.81e-04 +2022-05-04 11:28:55,724 INFO [train.py:715] (7/8) Epoch 3, batch 2200, loss[loss=0.2023, simple_loss=0.2578, pruned_loss=0.0734, over 4861.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2345, pruned_loss=0.04998, over 973707.83 frames.], batch size: 38, lr: 5.81e-04 +2022-05-04 11:29:35,105 INFO [train.py:715] (7/8) Epoch 3, batch 2250, loss[loss=0.1329, simple_loss=0.2065, pruned_loss=0.02971, over 4906.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2351, pruned_loss=0.05011, over 974379.17 frames.], batch size: 19, lr: 5.81e-04 +2022-05-04 11:30:14,524 INFO [train.py:715] (7/8) Epoch 3, batch 2300, loss[loss=0.1735, simple_loss=0.247, pruned_loss=0.04995, over 4690.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2346, pruned_loss=0.04958, over 974205.04 frames.], batch size: 15, lr: 5.80e-04 +2022-05-04 11:30:53,582 INFO [train.py:715] (7/8) Epoch 3, batch 2350, loss[loss=0.1095, simple_loss=0.1898, pruned_loss=0.01466, over 4892.00 frames.], tot_loss[loss=0.165, simple_loss=0.233, pruned_loss=0.04851, over 974822.65 frames.], batch size: 19, lr: 5.80e-04 +2022-05-04 11:31:32,375 INFO [train.py:715] (7/8) Epoch 3, batch 2400, loss[loss=0.1398, simple_loss=0.2047, pruned_loss=0.03749, over 4908.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2337, pruned_loss=0.0493, over 973493.16 frames.], batch size: 18, lr: 5.80e-04 +2022-05-04 11:32:12,610 INFO [train.py:715] (7/8) Epoch 3, batch 2450, loss[loss=0.14, simple_loss=0.2072, pruned_loss=0.03645, over 4688.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2335, pruned_loss=0.0489, over 974461.09 frames.], batch size: 15, lr: 5.80e-04 +2022-05-04 11:32:51,968 INFO [train.py:715] (7/8) Epoch 3, batch 2500, loss[loss=0.1464, simple_loss=0.224, pruned_loss=0.03438, over 4874.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2334, pruned_loss=0.04893, over 974421.58 frames.], batch size: 22, lr: 5.80e-04 +2022-05-04 11:33:30,791 INFO [train.py:715] (7/8) Epoch 3, batch 2550, loss[loss=0.1629, simple_loss=0.2312, pruned_loss=0.04734, over 4825.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2335, pruned_loss=0.04888, over 974183.35 frames.], batch size: 12, lr: 5.80e-04 +2022-05-04 11:34:11,449 INFO [train.py:715] (7/8) Epoch 3, batch 2600, loss[loss=0.1467, simple_loss=0.2166, pruned_loss=0.03843, over 4980.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2339, pruned_loss=0.04937, over 973904.62 frames.], batch size: 40, lr: 5.80e-04 +2022-05-04 11:34:51,565 INFO [train.py:715] (7/8) Epoch 3, batch 2650, loss[loss=0.1504, simple_loss=0.2212, pruned_loss=0.03982, over 4775.00 frames.], tot_loss[loss=0.167, simple_loss=0.2344, pruned_loss=0.04978, over 973618.82 frames.], batch size: 18, lr: 5.80e-04 +2022-05-04 11:35:30,758 INFO [train.py:715] (7/8) Epoch 3, batch 2700, loss[loss=0.1574, simple_loss=0.2392, pruned_loss=0.03784, over 4918.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2363, pruned_loss=0.05047, over 973803.15 frames.], batch size: 18, lr: 5.79e-04 +2022-05-04 11:36:10,255 INFO [train.py:715] (7/8) Epoch 3, batch 2750, loss[loss=0.1328, simple_loss=0.2129, pruned_loss=0.02637, over 4903.00 frames.], tot_loss[loss=0.1678, simple_loss=0.236, pruned_loss=0.04983, over 973675.42 frames.], batch size: 17, lr: 5.79e-04 +2022-05-04 11:36:50,510 INFO [train.py:715] (7/8) Epoch 3, batch 2800, loss[loss=0.1831, simple_loss=0.2358, pruned_loss=0.06517, over 4699.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2352, pruned_loss=0.05001, over 973226.40 frames.], batch size: 15, lr: 5.79e-04 +2022-05-04 11:37:29,797 INFO [train.py:715] (7/8) Epoch 3, batch 2850, loss[loss=0.146, simple_loss=0.2151, pruned_loss=0.03847, over 4835.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2349, pruned_loss=0.04935, over 973877.11 frames.], batch size: 30, lr: 5.79e-04 +2022-05-04 11:38:08,474 INFO [train.py:715] (7/8) Epoch 3, batch 2900, loss[loss=0.1669, simple_loss=0.2254, pruned_loss=0.0542, over 4788.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2352, pruned_loss=0.04911, over 973782.56 frames.], batch size: 18, lr: 5.79e-04 +2022-05-04 11:38:48,427 INFO [train.py:715] (7/8) Epoch 3, batch 2950, loss[loss=0.1459, simple_loss=0.2168, pruned_loss=0.03748, over 4870.00 frames.], tot_loss[loss=0.1668, simple_loss=0.235, pruned_loss=0.04928, over 974083.80 frames.], batch size: 16, lr: 5.79e-04 +2022-05-04 11:39:28,054 INFO [train.py:715] (7/8) Epoch 3, batch 3000, loss[loss=0.1598, simple_loss=0.2307, pruned_loss=0.0445, over 4954.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2338, pruned_loss=0.04853, over 973654.17 frames.], batch size: 15, lr: 5.79e-04 +2022-05-04 11:39:28,054 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 11:39:36,790 INFO [train.py:742] (7/8) Epoch 3, validation: loss=0.1153, simple_loss=0.2015, pruned_loss=0.0146, over 914524.00 frames. +2022-05-04 11:40:16,888 INFO [train.py:715] (7/8) Epoch 3, batch 3050, loss[loss=0.1682, simple_loss=0.2373, pruned_loss=0.04959, over 4841.00 frames.], tot_loss[loss=0.1649, simple_loss=0.233, pruned_loss=0.0484, over 973377.37 frames.], batch size: 15, lr: 5.78e-04 +2022-05-04 11:40:55,671 INFO [train.py:715] (7/8) Epoch 3, batch 3100, loss[loss=0.1843, simple_loss=0.2553, pruned_loss=0.05662, over 4893.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2337, pruned_loss=0.04907, over 972066.57 frames.], batch size: 19, lr: 5.78e-04 +2022-05-04 11:41:35,057 INFO [train.py:715] (7/8) Epoch 3, batch 3150, loss[loss=0.1953, simple_loss=0.2529, pruned_loss=0.06887, over 4779.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2339, pruned_loss=0.04925, over 972404.09 frames.], batch size: 17, lr: 5.78e-04 +2022-05-04 11:42:14,858 INFO [train.py:715] (7/8) Epoch 3, batch 3200, loss[loss=0.1683, simple_loss=0.2541, pruned_loss=0.0412, over 4776.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2333, pruned_loss=0.049, over 971829.95 frames.], batch size: 17, lr: 5.78e-04 +2022-05-04 11:42:54,659 INFO [train.py:715] (7/8) Epoch 3, batch 3250, loss[loss=0.1961, simple_loss=0.254, pruned_loss=0.06913, over 4764.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2338, pruned_loss=0.04931, over 972248.57 frames.], batch size: 19, lr: 5.78e-04 +2022-05-04 11:43:33,197 INFO [train.py:715] (7/8) Epoch 3, batch 3300, loss[loss=0.1795, simple_loss=0.251, pruned_loss=0.05396, over 4952.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2333, pruned_loss=0.04893, over 971903.00 frames.], batch size: 23, lr: 5.78e-04 +2022-05-04 11:44:13,011 INFO [train.py:715] (7/8) Epoch 3, batch 3350, loss[loss=0.1769, simple_loss=0.2478, pruned_loss=0.05298, over 4849.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2334, pruned_loss=0.04877, over 971573.51 frames.], batch size: 30, lr: 5.78e-04 +2022-05-04 11:44:52,486 INFO [train.py:715] (7/8) Epoch 3, batch 3400, loss[loss=0.2082, simple_loss=0.2731, pruned_loss=0.07169, over 4802.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2334, pruned_loss=0.04874, over 971976.77 frames.], batch size: 21, lr: 5.77e-04 +2022-05-04 11:45:31,173 INFO [train.py:715] (7/8) Epoch 3, batch 3450, loss[loss=0.1559, simple_loss=0.2087, pruned_loss=0.05159, over 4784.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2339, pruned_loss=0.0492, over 971237.17 frames.], batch size: 14, lr: 5.77e-04 +2022-05-04 11:46:10,505 INFO [train.py:715] (7/8) Epoch 3, batch 3500, loss[loss=0.1634, simple_loss=0.2321, pruned_loss=0.04732, over 4879.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2338, pruned_loss=0.04918, over 972084.84 frames.], batch size: 22, lr: 5.77e-04 +2022-05-04 11:46:50,811 INFO [train.py:715] (7/8) Epoch 3, batch 3550, loss[loss=0.1762, simple_loss=0.2415, pruned_loss=0.05542, over 4782.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2328, pruned_loss=0.04851, over 971960.55 frames.], batch size: 12, lr: 5.77e-04 +2022-05-04 11:47:30,665 INFO [train.py:715] (7/8) Epoch 3, batch 3600, loss[loss=0.1521, simple_loss=0.213, pruned_loss=0.04555, over 4818.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2323, pruned_loss=0.04818, over 972401.81 frames.], batch size: 13, lr: 5.77e-04 +2022-05-04 11:48:09,903 INFO [train.py:715] (7/8) Epoch 3, batch 3650, loss[loss=0.1552, simple_loss=0.2331, pruned_loss=0.03868, over 4944.00 frames.], tot_loss[loss=0.165, simple_loss=0.2327, pruned_loss=0.0486, over 972095.86 frames.], batch size: 29, lr: 5.77e-04 +2022-05-04 11:48:49,626 INFO [train.py:715] (7/8) Epoch 3, batch 3700, loss[loss=0.1842, simple_loss=0.26, pruned_loss=0.05422, over 4905.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2324, pruned_loss=0.04844, over 971833.06 frames.], batch size: 39, lr: 5.77e-04 +2022-05-04 11:49:29,646 INFO [train.py:715] (7/8) Epoch 3, batch 3750, loss[loss=0.1835, simple_loss=0.2465, pruned_loss=0.06022, over 4791.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2325, pruned_loss=0.04848, over 971149.83 frames.], batch size: 18, lr: 5.77e-04 +2022-05-04 11:50:09,334 INFO [train.py:715] (7/8) Epoch 3, batch 3800, loss[loss=0.1419, simple_loss=0.2151, pruned_loss=0.03439, over 4928.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2329, pruned_loss=0.04846, over 970915.99 frames.], batch size: 29, lr: 5.76e-04 +2022-05-04 11:50:48,719 INFO [train.py:715] (7/8) Epoch 3, batch 3850, loss[loss=0.2102, simple_loss=0.264, pruned_loss=0.07822, over 4965.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2329, pruned_loss=0.0488, over 971090.57 frames.], batch size: 15, lr: 5.76e-04 +2022-05-04 11:51:28,565 INFO [train.py:715] (7/8) Epoch 3, batch 3900, loss[loss=0.1958, simple_loss=0.2469, pruned_loss=0.07236, over 4791.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2331, pruned_loss=0.04914, over 971904.75 frames.], batch size: 17, lr: 5.76e-04 +2022-05-04 11:52:08,064 INFO [train.py:715] (7/8) Epoch 3, batch 3950, loss[loss=0.1665, simple_loss=0.2306, pruned_loss=0.05117, over 4887.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2325, pruned_loss=0.04808, over 971905.42 frames.], batch size: 19, lr: 5.76e-04 +2022-05-04 11:52:47,083 INFO [train.py:715] (7/8) Epoch 3, batch 4000, loss[loss=0.194, simple_loss=0.2523, pruned_loss=0.06783, over 4770.00 frames.], tot_loss[loss=0.165, simple_loss=0.2331, pruned_loss=0.04844, over 972947.76 frames.], batch size: 17, lr: 5.76e-04 +2022-05-04 11:53:26,530 INFO [train.py:715] (7/8) Epoch 3, batch 4050, loss[loss=0.1779, simple_loss=0.2363, pruned_loss=0.05979, over 4851.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2335, pruned_loss=0.04873, over 973006.34 frames.], batch size: 13, lr: 5.76e-04 +2022-05-04 11:54:06,708 INFO [train.py:715] (7/8) Epoch 3, batch 4100, loss[loss=0.1506, simple_loss=0.2213, pruned_loss=0.03994, over 4742.00 frames.], tot_loss[loss=0.1648, simple_loss=0.233, pruned_loss=0.04828, over 973146.15 frames.], batch size: 16, lr: 5.76e-04 +2022-05-04 11:54:45,658 INFO [train.py:715] (7/8) Epoch 3, batch 4150, loss[loss=0.1971, simple_loss=0.258, pruned_loss=0.0681, over 4883.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2337, pruned_loss=0.04861, over 973195.19 frames.], batch size: 16, lr: 5.76e-04 +2022-05-04 11:55:24,492 INFO [train.py:715] (7/8) Epoch 3, batch 4200, loss[loss=0.1695, simple_loss=0.2366, pruned_loss=0.05114, over 4813.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2336, pruned_loss=0.04813, over 972921.68 frames.], batch size: 26, lr: 5.75e-04 +2022-05-04 11:56:04,945 INFO [train.py:715] (7/8) Epoch 3, batch 4250, loss[loss=0.1789, simple_loss=0.2439, pruned_loss=0.05689, over 4793.00 frames.], tot_loss[loss=0.1635, simple_loss=0.232, pruned_loss=0.04753, over 972332.08 frames.], batch size: 21, lr: 5.75e-04 +2022-05-04 11:56:44,320 INFO [train.py:715] (7/8) Epoch 3, batch 4300, loss[loss=0.128, simple_loss=0.1982, pruned_loss=0.02895, over 4876.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2319, pruned_loss=0.04745, over 972796.78 frames.], batch size: 13, lr: 5.75e-04 +2022-05-04 11:57:23,797 INFO [train.py:715] (7/8) Epoch 3, batch 4350, loss[loss=0.1586, simple_loss=0.2232, pruned_loss=0.04694, over 4953.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2321, pruned_loss=0.04786, over 972560.28 frames.], batch size: 21, lr: 5.75e-04 +2022-05-04 11:58:03,482 INFO [train.py:715] (7/8) Epoch 3, batch 4400, loss[loss=0.161, simple_loss=0.2284, pruned_loss=0.0468, over 4778.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2317, pruned_loss=0.04794, over 972424.23 frames.], batch size: 14, lr: 5.75e-04 +2022-05-04 11:58:43,523 INFO [train.py:715] (7/8) Epoch 3, batch 4450, loss[loss=0.1444, simple_loss=0.2143, pruned_loss=0.03729, over 4853.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2326, pruned_loss=0.04853, over 971520.01 frames.], batch size: 20, lr: 5.75e-04 +2022-05-04 11:59:22,569 INFO [train.py:715] (7/8) Epoch 3, batch 4500, loss[loss=0.1653, simple_loss=0.2443, pruned_loss=0.04316, over 4868.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2325, pruned_loss=0.04806, over 971610.97 frames.], batch size: 16, lr: 5.75e-04 +2022-05-04 12:00:01,995 INFO [train.py:715] (7/8) Epoch 3, batch 4550, loss[loss=0.1605, simple_loss=0.2292, pruned_loss=0.04588, over 4827.00 frames.], tot_loss[loss=0.1639, simple_loss=0.232, pruned_loss=0.04788, over 972663.38 frames.], batch size: 15, lr: 5.74e-04 +2022-05-04 12:00:41,748 INFO [train.py:715] (7/8) Epoch 3, batch 4600, loss[loss=0.1542, simple_loss=0.2243, pruned_loss=0.04199, over 4786.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2321, pruned_loss=0.04773, over 971379.73 frames.], batch size: 14, lr: 5.74e-04 +2022-05-04 12:01:21,004 INFO [train.py:715] (7/8) Epoch 3, batch 4650, loss[loss=0.1372, simple_loss=0.2078, pruned_loss=0.03333, over 4808.00 frames.], tot_loss[loss=0.1626, simple_loss=0.231, pruned_loss=0.04712, over 971090.91 frames.], batch size: 12, lr: 5.74e-04 +2022-05-04 12:01:59,935 INFO [train.py:715] (7/8) Epoch 3, batch 4700, loss[loss=0.1655, simple_loss=0.2266, pruned_loss=0.05223, over 4804.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2305, pruned_loss=0.04709, over 971691.95 frames.], batch size: 25, lr: 5.74e-04 +2022-05-04 12:02:39,138 INFO [train.py:715] (7/8) Epoch 3, batch 4750, loss[loss=0.1441, simple_loss=0.223, pruned_loss=0.03264, over 4888.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2309, pruned_loss=0.04712, over 972794.76 frames.], batch size: 19, lr: 5.74e-04 +2022-05-04 12:03:18,743 INFO [train.py:715] (7/8) Epoch 3, batch 4800, loss[loss=0.1452, simple_loss=0.2301, pruned_loss=0.03015, over 4901.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2314, pruned_loss=0.04757, over 973508.02 frames.], batch size: 19, lr: 5.74e-04 +2022-05-04 12:03:58,126 INFO [train.py:715] (7/8) Epoch 3, batch 4850, loss[loss=0.1853, simple_loss=0.2512, pruned_loss=0.05968, over 4985.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2321, pruned_loss=0.04824, over 974194.23 frames.], batch size: 28, lr: 5.74e-04 +2022-05-04 12:04:36,948 INFO [train.py:715] (7/8) Epoch 3, batch 4900, loss[loss=0.1644, simple_loss=0.2368, pruned_loss=0.04603, over 4878.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2326, pruned_loss=0.04857, over 973904.11 frames.], batch size: 22, lr: 5.74e-04 +2022-05-04 12:05:16,865 INFO [train.py:715] (7/8) Epoch 3, batch 4950, loss[loss=0.17, simple_loss=0.2376, pruned_loss=0.0512, over 4803.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2319, pruned_loss=0.04825, over 973317.34 frames.], batch size: 21, lr: 5.73e-04 +2022-05-04 12:05:56,324 INFO [train.py:715] (7/8) Epoch 3, batch 5000, loss[loss=0.1675, simple_loss=0.2465, pruned_loss=0.04422, over 4813.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2319, pruned_loss=0.04793, over 973250.57 frames.], batch size: 26, lr: 5.73e-04 +2022-05-04 12:06:35,121 INFO [train.py:715] (7/8) Epoch 3, batch 5050, loss[loss=0.1568, simple_loss=0.2304, pruned_loss=0.04158, over 4989.00 frames.], tot_loss[loss=0.1641, simple_loss=0.232, pruned_loss=0.0481, over 973789.20 frames.], batch size: 28, lr: 5.73e-04 +2022-05-04 12:07:14,490 INFO [train.py:715] (7/8) Epoch 3, batch 5100, loss[loss=0.1686, simple_loss=0.2375, pruned_loss=0.04986, over 4862.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2325, pruned_loss=0.0484, over 973067.10 frames.], batch size: 30, lr: 5.73e-04 +2022-05-04 12:07:54,248 INFO [train.py:715] (7/8) Epoch 3, batch 5150, loss[loss=0.1451, simple_loss=0.2323, pruned_loss=0.02893, over 4897.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2314, pruned_loss=0.04723, over 972470.22 frames.], batch size: 19, lr: 5.73e-04 +2022-05-04 12:08:32,993 INFO [train.py:715] (7/8) Epoch 3, batch 5200, loss[loss=0.1751, simple_loss=0.2288, pruned_loss=0.06065, over 4807.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2319, pruned_loss=0.04768, over 971902.31 frames.], batch size: 13, lr: 5.73e-04 +2022-05-04 12:09:12,112 INFO [train.py:715] (7/8) Epoch 3, batch 5250, loss[loss=0.164, simple_loss=0.2406, pruned_loss=0.0437, over 4866.00 frames.], tot_loss[loss=0.165, simple_loss=0.2331, pruned_loss=0.04843, over 973662.75 frames.], batch size: 22, lr: 5.73e-04 +2022-05-04 12:09:52,199 INFO [train.py:715] (7/8) Epoch 3, batch 5300, loss[loss=0.165, simple_loss=0.2298, pruned_loss=0.05014, over 4914.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2315, pruned_loss=0.04756, over 973294.80 frames.], batch size: 19, lr: 5.72e-04 +2022-05-04 12:10:31,374 INFO [train.py:715] (7/8) Epoch 3, batch 5350, loss[loss=0.1647, simple_loss=0.2343, pruned_loss=0.04753, over 4877.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2324, pruned_loss=0.04797, over 972725.09 frames.], batch size: 39, lr: 5.72e-04 +2022-05-04 12:11:10,308 INFO [train.py:715] (7/8) Epoch 3, batch 5400, loss[loss=0.1624, simple_loss=0.2398, pruned_loss=0.04254, over 4890.00 frames.], tot_loss[loss=0.165, simple_loss=0.2332, pruned_loss=0.04841, over 972268.14 frames.], batch size: 22, lr: 5.72e-04 +2022-05-04 12:11:49,949 INFO [train.py:715] (7/8) Epoch 3, batch 5450, loss[loss=0.1632, simple_loss=0.2319, pruned_loss=0.04722, over 4920.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2342, pruned_loss=0.0492, over 973020.04 frames.], batch size: 17, lr: 5.72e-04 +2022-05-04 12:12:30,204 INFO [train.py:715] (7/8) Epoch 3, batch 5500, loss[loss=0.1762, simple_loss=0.2537, pruned_loss=0.04937, over 4805.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2343, pruned_loss=0.04941, over 973224.63 frames.], batch size: 21, lr: 5.72e-04 +2022-05-04 12:13:09,481 INFO [train.py:715] (7/8) Epoch 3, batch 5550, loss[loss=0.1913, simple_loss=0.2563, pruned_loss=0.06311, over 4837.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2335, pruned_loss=0.04907, over 974378.64 frames.], batch size: 30, lr: 5.72e-04 +2022-05-04 12:13:49,880 INFO [train.py:715] (7/8) Epoch 3, batch 5600, loss[loss=0.1723, simple_loss=0.2401, pruned_loss=0.0523, over 4906.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2332, pruned_loss=0.04863, over 974534.97 frames.], batch size: 17, lr: 5.72e-04 +2022-05-04 12:14:29,650 INFO [train.py:715] (7/8) Epoch 3, batch 5650, loss[loss=0.1559, simple_loss=0.224, pruned_loss=0.04388, over 4774.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2319, pruned_loss=0.04766, over 973807.01 frames.], batch size: 19, lr: 5.72e-04 +2022-05-04 12:15:08,737 INFO [train.py:715] (7/8) Epoch 3, batch 5700, loss[loss=0.1709, simple_loss=0.2455, pruned_loss=0.04816, over 4987.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2314, pruned_loss=0.04717, over 973405.77 frames.], batch size: 15, lr: 5.71e-04 +2022-05-04 12:15:48,074 INFO [train.py:715] (7/8) Epoch 3, batch 5750, loss[loss=0.1388, simple_loss=0.222, pruned_loss=0.02779, over 4974.00 frames.], tot_loss[loss=0.163, simple_loss=0.2315, pruned_loss=0.04729, over 973852.26 frames.], batch size: 28, lr: 5.71e-04 +2022-05-04 12:16:27,886 INFO [train.py:715] (7/8) Epoch 3, batch 5800, loss[loss=0.1808, simple_loss=0.2461, pruned_loss=0.05778, over 4837.00 frames.], tot_loss[loss=0.163, simple_loss=0.2316, pruned_loss=0.04718, over 974449.47 frames.], batch size: 15, lr: 5.71e-04 +2022-05-04 12:17:07,630 INFO [train.py:715] (7/8) Epoch 3, batch 5850, loss[loss=0.1537, simple_loss=0.2314, pruned_loss=0.03797, over 4768.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2319, pruned_loss=0.04731, over 973822.72 frames.], batch size: 19, lr: 5.71e-04 +2022-05-04 12:17:46,986 INFO [train.py:715] (7/8) Epoch 3, batch 5900, loss[loss=0.1681, simple_loss=0.2409, pruned_loss=0.04762, over 4874.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2324, pruned_loss=0.04759, over 973400.44 frames.], batch size: 22, lr: 5.71e-04 +2022-05-04 12:18:26,962 INFO [train.py:715] (7/8) Epoch 3, batch 5950, loss[loss=0.1678, simple_loss=0.231, pruned_loss=0.05231, over 4698.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2325, pruned_loss=0.04804, over 972989.22 frames.], batch size: 15, lr: 5.71e-04 +2022-05-04 12:19:06,648 INFO [train.py:715] (7/8) Epoch 3, batch 6000, loss[loss=0.1926, simple_loss=0.2565, pruned_loss=0.06433, over 4872.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2322, pruned_loss=0.04819, over 972340.73 frames.], batch size: 22, lr: 5.71e-04 +2022-05-04 12:19:06,649 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 12:19:15,397 INFO [train.py:742] (7/8) Epoch 3, validation: loss=0.1149, simple_loss=0.2013, pruned_loss=0.01424, over 914524.00 frames. +2022-05-04 12:19:55,213 INFO [train.py:715] (7/8) Epoch 3, batch 6050, loss[loss=0.1445, simple_loss=0.2223, pruned_loss=0.03333, over 4790.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2328, pruned_loss=0.04812, over 971519.76 frames.], batch size: 18, lr: 5.71e-04 +2022-05-04 12:20:34,637 INFO [train.py:715] (7/8) Epoch 3, batch 6100, loss[loss=0.138, simple_loss=0.2023, pruned_loss=0.03687, over 4957.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2326, pruned_loss=0.04819, over 972145.36 frames.], batch size: 35, lr: 5.70e-04 +2022-05-04 12:21:13,565 INFO [train.py:715] (7/8) Epoch 3, batch 6150, loss[loss=0.1337, simple_loss=0.2031, pruned_loss=0.03217, over 4791.00 frames.], tot_loss[loss=0.164, simple_loss=0.232, pruned_loss=0.04798, over 972348.29 frames.], batch size: 24, lr: 5.70e-04 +2022-05-04 12:21:53,162 INFO [train.py:715] (7/8) Epoch 3, batch 6200, loss[loss=0.1625, simple_loss=0.2323, pruned_loss=0.04638, over 4755.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2312, pruned_loss=0.04719, over 971975.31 frames.], batch size: 16, lr: 5.70e-04 +2022-05-04 12:22:33,156 INFO [train.py:715] (7/8) Epoch 3, batch 6250, loss[loss=0.1843, simple_loss=0.254, pruned_loss=0.0573, over 4984.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2314, pruned_loss=0.04752, over 972525.09 frames.], batch size: 33, lr: 5.70e-04 +2022-05-04 12:23:12,507 INFO [train.py:715] (7/8) Epoch 3, batch 6300, loss[loss=0.1448, simple_loss=0.2238, pruned_loss=0.03283, over 4906.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2312, pruned_loss=0.0477, over 972059.95 frames.], batch size: 19, lr: 5.70e-04 +2022-05-04 12:23:51,738 INFO [train.py:715] (7/8) Epoch 3, batch 6350, loss[loss=0.1612, simple_loss=0.2455, pruned_loss=0.03841, over 4931.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2323, pruned_loss=0.0482, over 971740.85 frames.], batch size: 29, lr: 5.70e-04 +2022-05-04 12:24:31,949 INFO [train.py:715] (7/8) Epoch 3, batch 6400, loss[loss=0.1445, simple_loss=0.2235, pruned_loss=0.03271, over 4987.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2326, pruned_loss=0.04805, over 972253.03 frames.], batch size: 24, lr: 5.70e-04 +2022-05-04 12:25:11,504 INFO [train.py:715] (7/8) Epoch 3, batch 6450, loss[loss=0.1419, simple_loss=0.2071, pruned_loss=0.03833, over 4783.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2329, pruned_loss=0.04824, over 972453.67 frames.], batch size: 12, lr: 5.70e-04 +2022-05-04 12:25:50,484 INFO [train.py:715] (7/8) Epoch 3, batch 6500, loss[loss=0.1701, simple_loss=0.238, pruned_loss=0.05107, over 4735.00 frames.], tot_loss[loss=0.165, simple_loss=0.233, pruned_loss=0.04848, over 972570.54 frames.], batch size: 16, lr: 5.69e-04 +2022-05-04 12:26:30,136 INFO [train.py:715] (7/8) Epoch 3, batch 6550, loss[loss=0.1812, simple_loss=0.2401, pruned_loss=0.06113, over 4977.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2333, pruned_loss=0.04863, over 973275.34 frames.], batch size: 14, lr: 5.69e-04 +2022-05-04 12:27:09,932 INFO [train.py:715] (7/8) Epoch 3, batch 6600, loss[loss=0.1923, simple_loss=0.2506, pruned_loss=0.06702, over 4821.00 frames.], tot_loss[loss=0.165, simple_loss=0.2329, pruned_loss=0.04853, over 973346.92 frames.], batch size: 25, lr: 5.69e-04 +2022-05-04 12:27:49,186 INFO [train.py:715] (7/8) Epoch 3, batch 6650, loss[loss=0.178, simple_loss=0.2412, pruned_loss=0.05736, over 4962.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2321, pruned_loss=0.04808, over 973406.48 frames.], batch size: 14, lr: 5.69e-04 +2022-05-04 12:28:28,360 INFO [train.py:715] (7/8) Epoch 3, batch 6700, loss[loss=0.1766, simple_loss=0.2438, pruned_loss=0.05472, over 4911.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2318, pruned_loss=0.04854, over 974093.85 frames.], batch size: 19, lr: 5.69e-04 +2022-05-04 12:29:08,704 INFO [train.py:715] (7/8) Epoch 3, batch 6750, loss[loss=0.1689, simple_loss=0.2439, pruned_loss=0.04693, over 4924.00 frames.], tot_loss[loss=0.164, simple_loss=0.2316, pruned_loss=0.0482, over 973647.34 frames.], batch size: 17, lr: 5.69e-04 +2022-05-04 12:29:47,745 INFO [train.py:715] (7/8) Epoch 3, batch 6800, loss[loss=0.148, simple_loss=0.2232, pruned_loss=0.03638, over 4904.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2313, pruned_loss=0.04801, over 974065.54 frames.], batch size: 39, lr: 5.69e-04 +2022-05-04 12:30:27,120 INFO [train.py:715] (7/8) Epoch 3, batch 6850, loss[loss=0.1419, simple_loss=0.2176, pruned_loss=0.03304, over 4935.00 frames.], tot_loss[loss=0.163, simple_loss=0.2309, pruned_loss=0.0475, over 973599.66 frames.], batch size: 29, lr: 5.68e-04 +2022-05-04 12:31:06,822 INFO [train.py:715] (7/8) Epoch 3, batch 6900, loss[loss=0.1344, simple_loss=0.2154, pruned_loss=0.02668, over 4947.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2307, pruned_loss=0.04716, over 973267.54 frames.], batch size: 24, lr: 5.68e-04 +2022-05-04 12:31:46,653 INFO [train.py:715] (7/8) Epoch 3, batch 6950, loss[loss=0.1506, simple_loss=0.2244, pruned_loss=0.03841, over 4760.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2312, pruned_loss=0.04728, over 972049.78 frames.], batch size: 19, lr: 5.68e-04 +2022-05-04 12:32:25,811 INFO [train.py:715] (7/8) Epoch 3, batch 7000, loss[loss=0.1755, simple_loss=0.2382, pruned_loss=0.05642, over 4928.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2329, pruned_loss=0.04829, over 971950.96 frames.], batch size: 39, lr: 5.68e-04 +2022-05-04 12:33:05,832 INFO [train.py:715] (7/8) Epoch 3, batch 7050, loss[loss=0.1559, simple_loss=0.2125, pruned_loss=0.0496, over 4795.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2318, pruned_loss=0.04806, over 971371.32 frames.], batch size: 24, lr: 5.68e-04 +2022-05-04 12:33:45,723 INFO [train.py:715] (7/8) Epoch 3, batch 7100, loss[loss=0.1818, simple_loss=0.2585, pruned_loss=0.05253, over 4983.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2326, pruned_loss=0.0485, over 972165.66 frames.], batch size: 28, lr: 5.68e-04 +2022-05-04 12:34:24,808 INFO [train.py:715] (7/8) Epoch 3, batch 7150, loss[loss=0.1547, simple_loss=0.2322, pruned_loss=0.0386, over 4906.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2321, pruned_loss=0.04823, over 972261.89 frames.], batch size: 17, lr: 5.68e-04 +2022-05-04 12:35:04,379 INFO [train.py:715] (7/8) Epoch 3, batch 7200, loss[loss=0.2011, simple_loss=0.2628, pruned_loss=0.06967, over 4944.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2323, pruned_loss=0.04795, over 972520.52 frames.], batch size: 39, lr: 5.68e-04 +2022-05-04 12:35:44,151 INFO [train.py:715] (7/8) Epoch 3, batch 7250, loss[loss=0.1847, simple_loss=0.2406, pruned_loss=0.06441, over 4809.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2319, pruned_loss=0.04799, over 972748.02 frames.], batch size: 12, lr: 5.67e-04 +2022-05-04 12:36:23,544 INFO [train.py:715] (7/8) Epoch 3, batch 7300, loss[loss=0.1921, simple_loss=0.2603, pruned_loss=0.0619, over 4878.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2322, pruned_loss=0.04837, over 973464.78 frames.], batch size: 22, lr: 5.67e-04 +2022-05-04 12:37:03,016 INFO [train.py:715] (7/8) Epoch 3, batch 7350, loss[loss=0.1511, simple_loss=0.2171, pruned_loss=0.04252, over 4775.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2324, pruned_loss=0.04825, over 973158.85 frames.], batch size: 18, lr: 5.67e-04 +2022-05-04 12:37:42,377 INFO [train.py:715] (7/8) Epoch 3, batch 7400, loss[loss=0.1677, simple_loss=0.2349, pruned_loss=0.05024, over 4979.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2326, pruned_loss=0.04834, over 973368.83 frames.], batch size: 28, lr: 5.67e-04 +2022-05-04 12:38:22,635 INFO [train.py:715] (7/8) Epoch 3, batch 7450, loss[loss=0.1493, simple_loss=0.219, pruned_loss=0.0398, over 4918.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2319, pruned_loss=0.04772, over 973682.14 frames.], batch size: 18, lr: 5.67e-04 +2022-05-04 12:39:01,780 INFO [train.py:715] (7/8) Epoch 3, batch 7500, loss[loss=0.1219, simple_loss=0.1935, pruned_loss=0.02512, over 4812.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2317, pruned_loss=0.0477, over 973515.84 frames.], batch size: 12, lr: 5.67e-04 +2022-05-04 12:39:41,045 INFO [train.py:715] (7/8) Epoch 3, batch 7550, loss[loss=0.1369, simple_loss=0.217, pruned_loss=0.02837, over 4860.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2318, pruned_loss=0.04822, over 973122.04 frames.], batch size: 20, lr: 5.67e-04 +2022-05-04 12:40:22,801 INFO [train.py:715] (7/8) Epoch 3, batch 7600, loss[loss=0.1903, simple_loss=0.2449, pruned_loss=0.06788, over 4819.00 frames.], tot_loss[loss=0.164, simple_loss=0.2318, pruned_loss=0.04807, over 971311.95 frames.], batch size: 15, lr: 5.67e-04 +2022-05-04 12:41:02,152 INFO [train.py:715] (7/8) Epoch 3, batch 7650, loss[loss=0.1795, simple_loss=0.2478, pruned_loss=0.05554, over 4690.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2321, pruned_loss=0.0479, over 970967.54 frames.], batch size: 15, lr: 5.66e-04 +2022-05-04 12:41:41,418 INFO [train.py:715] (7/8) Epoch 3, batch 7700, loss[loss=0.1498, simple_loss=0.2217, pruned_loss=0.03893, over 4936.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2323, pruned_loss=0.04761, over 971724.03 frames.], batch size: 23, lr: 5.66e-04 +2022-05-04 12:42:20,887 INFO [train.py:715] (7/8) Epoch 3, batch 7750, loss[loss=0.1678, simple_loss=0.2354, pruned_loss=0.05012, over 4899.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2337, pruned_loss=0.04861, over 971539.66 frames.], batch size: 17, lr: 5.66e-04 +2022-05-04 12:43:00,216 INFO [train.py:715] (7/8) Epoch 3, batch 7800, loss[loss=0.1776, simple_loss=0.2471, pruned_loss=0.05407, over 4796.00 frames.], tot_loss[loss=0.165, simple_loss=0.2333, pruned_loss=0.0484, over 970908.72 frames.], batch size: 21, lr: 5.66e-04 +2022-05-04 12:43:38,790 INFO [train.py:715] (7/8) Epoch 3, batch 7850, loss[loss=0.1661, simple_loss=0.2338, pruned_loss=0.04924, over 4797.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2329, pruned_loss=0.04827, over 970595.68 frames.], batch size: 24, lr: 5.66e-04 +2022-05-04 12:44:18,372 INFO [train.py:715] (7/8) Epoch 3, batch 7900, loss[loss=0.1711, simple_loss=0.2322, pruned_loss=0.05504, over 4972.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2323, pruned_loss=0.04835, over 971485.34 frames.], batch size: 35, lr: 5.66e-04 +2022-05-04 12:44:58,145 INFO [train.py:715] (7/8) Epoch 3, batch 7950, loss[loss=0.1487, simple_loss=0.2144, pruned_loss=0.04147, over 4885.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2332, pruned_loss=0.049, over 971375.56 frames.], batch size: 22, lr: 5.66e-04 +2022-05-04 12:45:36,728 INFO [train.py:715] (7/8) Epoch 3, batch 8000, loss[loss=0.1932, simple_loss=0.2616, pruned_loss=0.06239, over 4921.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2342, pruned_loss=0.04946, over 971314.50 frames.], batch size: 18, lr: 5.66e-04 +2022-05-04 12:46:14,905 INFO [train.py:715] (7/8) Epoch 3, batch 8050, loss[loss=0.1554, simple_loss=0.2283, pruned_loss=0.04124, over 4895.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2335, pruned_loss=0.04881, over 971920.62 frames.], batch size: 19, lr: 5.65e-04 +2022-05-04 12:46:53,634 INFO [train.py:715] (7/8) Epoch 3, batch 8100, loss[loss=0.1661, simple_loss=0.2184, pruned_loss=0.05693, over 4773.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2325, pruned_loss=0.04857, over 971500.61 frames.], batch size: 17, lr: 5.65e-04 +2022-05-04 12:47:31,941 INFO [train.py:715] (7/8) Epoch 3, batch 8150, loss[loss=0.1695, simple_loss=0.2336, pruned_loss=0.05268, over 4777.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2323, pruned_loss=0.04852, over 971267.37 frames.], batch size: 19, lr: 5.65e-04 +2022-05-04 12:48:10,091 INFO [train.py:715] (7/8) Epoch 3, batch 8200, loss[loss=0.1574, simple_loss=0.2176, pruned_loss=0.04856, over 4950.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2322, pruned_loss=0.04817, over 970991.02 frames.], batch size: 21, lr: 5.65e-04 +2022-05-04 12:48:49,891 INFO [train.py:715] (7/8) Epoch 3, batch 8250, loss[loss=0.1424, simple_loss=0.2063, pruned_loss=0.03923, over 4787.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2323, pruned_loss=0.04834, over 971296.89 frames.], batch size: 14, lr: 5.65e-04 +2022-05-04 12:49:30,614 INFO [train.py:715] (7/8) Epoch 3, batch 8300, loss[loss=0.1949, simple_loss=0.2548, pruned_loss=0.0675, over 4841.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2314, pruned_loss=0.04776, over 971996.19 frames.], batch size: 13, lr: 5.65e-04 +2022-05-04 12:50:10,665 INFO [train.py:715] (7/8) Epoch 3, batch 8350, loss[loss=0.1501, simple_loss=0.2174, pruned_loss=0.04135, over 4917.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2311, pruned_loss=0.04781, over 972046.50 frames.], batch size: 18, lr: 5.65e-04 +2022-05-04 12:50:50,665 INFO [train.py:715] (7/8) Epoch 3, batch 8400, loss[loss=0.1631, simple_loss=0.2371, pruned_loss=0.04453, over 4902.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2317, pruned_loss=0.04809, over 972094.09 frames.], batch size: 17, lr: 5.65e-04 +2022-05-04 12:51:30,640 INFO [train.py:715] (7/8) Epoch 3, batch 8450, loss[loss=0.164, simple_loss=0.2285, pruned_loss=0.04979, over 4960.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2324, pruned_loss=0.04809, over 972461.35 frames.], batch size: 24, lr: 5.64e-04 +2022-05-04 12:52:10,871 INFO [train.py:715] (7/8) Epoch 3, batch 8500, loss[loss=0.1691, simple_loss=0.2254, pruned_loss=0.05638, over 4933.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2321, pruned_loss=0.04809, over 972384.76 frames.], batch size: 35, lr: 5.64e-04 +2022-05-04 12:52:49,924 INFO [train.py:715] (7/8) Epoch 3, batch 8550, loss[loss=0.1702, simple_loss=0.2361, pruned_loss=0.05215, over 4928.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2332, pruned_loss=0.04872, over 971758.88 frames.], batch size: 18, lr: 5.64e-04 +2022-05-04 12:53:31,543 INFO [train.py:715] (7/8) Epoch 3, batch 8600, loss[loss=0.1629, simple_loss=0.2239, pruned_loss=0.05094, over 4726.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2337, pruned_loss=0.04892, over 971693.63 frames.], batch size: 12, lr: 5.64e-04 +2022-05-04 12:54:13,124 INFO [train.py:715] (7/8) Epoch 3, batch 8650, loss[loss=0.1526, simple_loss=0.209, pruned_loss=0.04816, over 4937.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2325, pruned_loss=0.04813, over 971883.14 frames.], batch size: 23, lr: 5.64e-04 +2022-05-04 12:54:53,251 INFO [train.py:715] (7/8) Epoch 3, batch 8700, loss[loss=0.1877, simple_loss=0.2393, pruned_loss=0.06807, over 4872.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2328, pruned_loss=0.04841, over 971677.67 frames.], batch size: 16, lr: 5.64e-04 +2022-05-04 12:55:34,488 INFO [train.py:715] (7/8) Epoch 3, batch 8750, loss[loss=0.1675, simple_loss=0.2307, pruned_loss=0.05218, over 4968.00 frames.], tot_loss[loss=0.1646, simple_loss=0.233, pruned_loss=0.04814, over 971661.22 frames.], batch size: 35, lr: 5.64e-04 +2022-05-04 12:56:14,900 INFO [train.py:715] (7/8) Epoch 3, batch 8800, loss[loss=0.155, simple_loss=0.2285, pruned_loss=0.04077, over 4810.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2316, pruned_loss=0.04751, over 970993.34 frames.], batch size: 26, lr: 5.64e-04 +2022-05-04 12:56:55,633 INFO [train.py:715] (7/8) Epoch 3, batch 8850, loss[loss=0.1732, simple_loss=0.2467, pruned_loss=0.0498, over 4946.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2323, pruned_loss=0.048, over 971748.43 frames.], batch size: 23, lr: 5.63e-04 +2022-05-04 12:57:35,631 INFO [train.py:715] (7/8) Epoch 3, batch 8900, loss[loss=0.1738, simple_loss=0.2432, pruned_loss=0.05215, over 4907.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2326, pruned_loss=0.04814, over 972435.99 frames.], batch size: 19, lr: 5.63e-04 +2022-05-04 12:58:17,382 INFO [train.py:715] (7/8) Epoch 3, batch 8950, loss[loss=0.1548, simple_loss=0.2161, pruned_loss=0.04675, over 4766.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2317, pruned_loss=0.04821, over 972559.96 frames.], batch size: 14, lr: 5.63e-04 +2022-05-04 12:58:59,334 INFO [train.py:715] (7/8) Epoch 3, batch 9000, loss[loss=0.1952, simple_loss=0.2564, pruned_loss=0.06699, over 4824.00 frames.], tot_loss[loss=0.164, simple_loss=0.2312, pruned_loss=0.0484, over 972454.12 frames.], batch size: 15, lr: 5.63e-04 +2022-05-04 12:58:59,335 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 12:59:08,110 INFO [train.py:742] (7/8) Epoch 3, validation: loss=0.1147, simple_loss=0.2006, pruned_loss=0.01442, over 914524.00 frames. +2022-05-04 12:59:49,675 INFO [train.py:715] (7/8) Epoch 3, batch 9050, loss[loss=0.1485, simple_loss=0.2314, pruned_loss=0.03283, over 4973.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2323, pruned_loss=0.04905, over 972298.41 frames.], batch size: 24, lr: 5.63e-04 +2022-05-04 13:00:30,623 INFO [train.py:715] (7/8) Epoch 3, batch 9100, loss[loss=0.2118, simple_loss=0.2582, pruned_loss=0.08271, over 4786.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2328, pruned_loss=0.04909, over 972093.46 frames.], batch size: 14, lr: 5.63e-04 +2022-05-04 13:01:11,922 INFO [train.py:715] (7/8) Epoch 3, batch 9150, loss[loss=0.2254, simple_loss=0.2712, pruned_loss=0.08982, over 4987.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2328, pruned_loss=0.04844, over 972604.11 frames.], batch size: 14, lr: 5.63e-04 +2022-05-04 13:01:53,286 INFO [train.py:715] (7/8) Epoch 3, batch 9200, loss[loss=0.1232, simple_loss=0.1888, pruned_loss=0.02881, over 4793.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2336, pruned_loss=0.04874, over 971945.20 frames.], batch size: 12, lr: 5.63e-04 +2022-05-04 13:02:34,664 INFO [train.py:715] (7/8) Epoch 3, batch 9250, loss[loss=0.1438, simple_loss=0.228, pruned_loss=0.02979, over 4957.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2335, pruned_loss=0.04884, over 971991.14 frames.], batch size: 24, lr: 5.62e-04 +2022-05-04 13:03:15,405 INFO [train.py:715] (7/8) Epoch 3, batch 9300, loss[loss=0.1299, simple_loss=0.2076, pruned_loss=0.0261, over 4980.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2324, pruned_loss=0.04845, over 971608.60 frames.], batch size: 28, lr: 5.62e-04 +2022-05-04 13:03:56,628 INFO [train.py:715] (7/8) Epoch 3, batch 9350, loss[loss=0.16, simple_loss=0.2286, pruned_loss=0.04574, over 4925.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2334, pruned_loss=0.04922, over 972046.42 frames.], batch size: 18, lr: 5.62e-04 +2022-05-04 13:04:38,913 INFO [train.py:715] (7/8) Epoch 3, batch 9400, loss[loss=0.1237, simple_loss=0.1951, pruned_loss=0.02617, over 4637.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2328, pruned_loss=0.04876, over 971851.91 frames.], batch size: 13, lr: 5.62e-04 +2022-05-04 13:05:19,293 INFO [train.py:715] (7/8) Epoch 3, batch 9450, loss[loss=0.1601, simple_loss=0.2275, pruned_loss=0.04637, over 4805.00 frames.], tot_loss[loss=0.1653, simple_loss=0.233, pruned_loss=0.04877, over 972924.91 frames.], batch size: 24, lr: 5.62e-04 +2022-05-04 13:06:00,820 INFO [train.py:715] (7/8) Epoch 3, batch 9500, loss[loss=0.163, simple_loss=0.2327, pruned_loss=0.04662, over 4746.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2327, pruned_loss=0.04836, over 972567.23 frames.], batch size: 16, lr: 5.62e-04 +2022-05-04 13:06:42,711 INFO [train.py:715] (7/8) Epoch 3, batch 9550, loss[loss=0.1324, simple_loss=0.206, pruned_loss=0.02942, over 4883.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2333, pruned_loss=0.04903, over 973056.97 frames.], batch size: 16, lr: 5.62e-04 +2022-05-04 13:07:24,304 INFO [train.py:715] (7/8) Epoch 3, batch 9600, loss[loss=0.1583, simple_loss=0.2186, pruned_loss=0.04901, over 4788.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2325, pruned_loss=0.04883, over 971612.62 frames.], batch size: 17, lr: 5.62e-04 +2022-05-04 13:08:05,435 INFO [train.py:715] (7/8) Epoch 3, batch 9650, loss[loss=0.1461, simple_loss=0.2217, pruned_loss=0.03522, over 4979.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2312, pruned_loss=0.04813, over 971329.48 frames.], batch size: 25, lr: 5.61e-04 +2022-05-04 13:08:46,927 INFO [train.py:715] (7/8) Epoch 3, batch 9700, loss[loss=0.1427, simple_loss=0.208, pruned_loss=0.03868, over 4980.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2321, pruned_loss=0.04824, over 971713.19 frames.], batch size: 15, lr: 5.61e-04 +2022-05-04 13:09:27,950 INFO [train.py:715] (7/8) Epoch 3, batch 9750, loss[loss=0.1551, simple_loss=0.2151, pruned_loss=0.04754, over 4978.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2322, pruned_loss=0.04828, over 971381.39 frames.], batch size: 15, lr: 5.61e-04 +2022-05-04 13:10:08,812 INFO [train.py:715] (7/8) Epoch 3, batch 9800, loss[loss=0.2096, simple_loss=0.2641, pruned_loss=0.07758, over 4946.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2326, pruned_loss=0.04839, over 972293.75 frames.], batch size: 21, lr: 5.61e-04 +2022-05-04 13:10:50,546 INFO [train.py:715] (7/8) Epoch 3, batch 9850, loss[loss=0.2149, simple_loss=0.2814, pruned_loss=0.07421, over 4987.00 frames.], tot_loss[loss=0.1647, simple_loss=0.233, pruned_loss=0.04818, over 972325.85 frames.], batch size: 31, lr: 5.61e-04 +2022-05-04 13:11:32,494 INFO [train.py:715] (7/8) Epoch 3, batch 9900, loss[loss=0.1809, simple_loss=0.2459, pruned_loss=0.05794, over 4979.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2333, pruned_loss=0.04843, over 971830.77 frames.], batch size: 35, lr: 5.61e-04 +2022-05-04 13:12:12,995 INFO [train.py:715] (7/8) Epoch 3, batch 9950, loss[loss=0.1428, simple_loss=0.2243, pruned_loss=0.03068, over 4814.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2324, pruned_loss=0.04787, over 971823.75 frames.], batch size: 27, lr: 5.61e-04 +2022-05-04 13:12:54,748 INFO [train.py:715] (7/8) Epoch 3, batch 10000, loss[loss=0.141, simple_loss=0.2073, pruned_loss=0.03731, over 4961.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2325, pruned_loss=0.0481, over 972201.13 frames.], batch size: 14, lr: 5.61e-04 +2022-05-04 13:13:36,176 INFO [train.py:715] (7/8) Epoch 3, batch 10050, loss[loss=0.1496, simple_loss=0.2204, pruned_loss=0.03937, over 4782.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2331, pruned_loss=0.04851, over 972713.98 frames.], batch size: 18, lr: 5.61e-04 +2022-05-04 13:14:17,623 INFO [train.py:715] (7/8) Epoch 3, batch 10100, loss[loss=0.1736, simple_loss=0.2394, pruned_loss=0.05386, over 4818.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2322, pruned_loss=0.0485, over 972929.52 frames.], batch size: 27, lr: 5.60e-04 +2022-05-04 13:14:58,620 INFO [train.py:715] (7/8) Epoch 3, batch 10150, loss[loss=0.1375, simple_loss=0.1893, pruned_loss=0.0428, over 4789.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2332, pruned_loss=0.04913, over 973089.84 frames.], batch size: 12, lr: 5.60e-04 +2022-05-04 13:15:40,203 INFO [train.py:715] (7/8) Epoch 3, batch 10200, loss[loss=0.1893, simple_loss=0.2502, pruned_loss=0.06417, over 4947.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2319, pruned_loss=0.04818, over 973788.69 frames.], batch size: 21, lr: 5.60e-04 +2022-05-04 13:16:21,953 INFO [train.py:715] (7/8) Epoch 3, batch 10250, loss[loss=0.1741, simple_loss=0.2468, pruned_loss=0.0507, over 4989.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2317, pruned_loss=0.04754, over 973817.80 frames.], batch size: 25, lr: 5.60e-04 +2022-05-04 13:17:01,804 INFO [train.py:715] (7/8) Epoch 3, batch 10300, loss[loss=0.1386, simple_loss=0.2072, pruned_loss=0.03504, over 4937.00 frames.], tot_loss[loss=0.1641, simple_loss=0.232, pruned_loss=0.04806, over 972884.59 frames.], batch size: 18, lr: 5.60e-04 +2022-05-04 13:17:42,035 INFO [train.py:715] (7/8) Epoch 3, batch 10350, loss[loss=0.1931, simple_loss=0.2552, pruned_loss=0.06546, over 4950.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2319, pruned_loss=0.04816, over 972961.51 frames.], batch size: 39, lr: 5.60e-04 +2022-05-04 13:18:22,569 INFO [train.py:715] (7/8) Epoch 3, batch 10400, loss[loss=0.1354, simple_loss=0.2026, pruned_loss=0.03407, over 4970.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2323, pruned_loss=0.04895, over 972345.19 frames.], batch size: 35, lr: 5.60e-04 +2022-05-04 13:19:03,195 INFO [train.py:715] (7/8) Epoch 3, batch 10450, loss[loss=0.1233, simple_loss=0.1919, pruned_loss=0.02739, over 4839.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2318, pruned_loss=0.04834, over 971937.72 frames.], batch size: 15, lr: 5.60e-04 +2022-05-04 13:19:43,609 INFO [train.py:715] (7/8) Epoch 3, batch 10500, loss[loss=0.1693, simple_loss=0.2312, pruned_loss=0.05366, over 4977.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2317, pruned_loss=0.04831, over 971424.85 frames.], batch size: 15, lr: 5.59e-04 +2022-05-04 13:20:24,622 INFO [train.py:715] (7/8) Epoch 3, batch 10550, loss[loss=0.1389, simple_loss=0.2123, pruned_loss=0.03278, over 4971.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2318, pruned_loss=0.04844, over 971651.92 frames.], batch size: 15, lr: 5.59e-04 +2022-05-04 13:21:07,139 INFO [train.py:715] (7/8) Epoch 3, batch 10600, loss[loss=0.1693, simple_loss=0.2314, pruned_loss=0.05354, over 4826.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2312, pruned_loss=0.048, over 971961.50 frames.], batch size: 30, lr: 5.59e-04 +2022-05-04 13:21:48,621 INFO [train.py:715] (7/8) Epoch 3, batch 10650, loss[loss=0.1795, simple_loss=0.241, pruned_loss=0.05894, over 4885.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2306, pruned_loss=0.04755, over 972080.13 frames.], batch size: 16, lr: 5.59e-04 +2022-05-04 13:22:30,747 INFO [train.py:715] (7/8) Epoch 3, batch 10700, loss[loss=0.166, simple_loss=0.2387, pruned_loss=0.04663, over 4979.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2314, pruned_loss=0.04802, over 971117.61 frames.], batch size: 15, lr: 5.59e-04 +2022-05-04 13:23:13,521 INFO [train.py:715] (7/8) Epoch 3, batch 10750, loss[loss=0.1517, simple_loss=0.2044, pruned_loss=0.04945, over 4799.00 frames.], tot_loss[loss=0.163, simple_loss=0.2307, pruned_loss=0.04768, over 971832.97 frames.], batch size: 12, lr: 5.59e-04 +2022-05-04 13:23:56,758 INFO [train.py:715] (7/8) Epoch 3, batch 10800, loss[loss=0.1465, simple_loss=0.2167, pruned_loss=0.03814, over 4846.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2308, pruned_loss=0.04737, over 972641.99 frames.], batch size: 32, lr: 5.59e-04 +2022-05-04 13:24:38,552 INFO [train.py:715] (7/8) Epoch 3, batch 10850, loss[loss=0.18, simple_loss=0.2465, pruned_loss=0.0568, over 4858.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2329, pruned_loss=0.04845, over 972044.61 frames.], batch size: 38, lr: 5.59e-04 +2022-05-04 13:25:21,318 INFO [train.py:715] (7/8) Epoch 3, batch 10900, loss[loss=0.1415, simple_loss=0.2174, pruned_loss=0.03281, over 4808.00 frames.], tot_loss[loss=0.1664, simple_loss=0.2339, pruned_loss=0.04939, over 971824.69 frames.], batch size: 25, lr: 5.58e-04 +2022-05-04 13:26:04,562 INFO [train.py:715] (7/8) Epoch 3, batch 10950, loss[loss=0.1764, simple_loss=0.2447, pruned_loss=0.05408, over 4832.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2335, pruned_loss=0.04909, over 971568.32 frames.], batch size: 15, lr: 5.58e-04 +2022-05-04 13:26:46,514 INFO [train.py:715] (7/8) Epoch 3, batch 11000, loss[loss=0.2196, simple_loss=0.2903, pruned_loss=0.07446, over 4768.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2338, pruned_loss=0.04894, over 972136.56 frames.], batch size: 19, lr: 5.58e-04 +2022-05-04 13:27:28,084 INFO [train.py:715] (7/8) Epoch 3, batch 11050, loss[loss=0.1486, simple_loss=0.2194, pruned_loss=0.0389, over 4885.00 frames.], tot_loss[loss=0.1649, simple_loss=0.233, pruned_loss=0.04842, over 972858.09 frames.], batch size: 22, lr: 5.58e-04 +2022-05-04 13:28:11,597 INFO [train.py:715] (7/8) Epoch 3, batch 11100, loss[loss=0.1425, simple_loss=0.2116, pruned_loss=0.03668, over 4916.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2327, pruned_loss=0.0481, over 972716.90 frames.], batch size: 18, lr: 5.58e-04 +2022-05-04 13:28:53,678 INFO [train.py:715] (7/8) Epoch 3, batch 11150, loss[loss=0.1422, simple_loss=0.2257, pruned_loss=0.02928, over 4910.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2324, pruned_loss=0.04774, over 973057.42 frames.], batch size: 19, lr: 5.58e-04 +2022-05-04 13:29:35,739 INFO [train.py:715] (7/8) Epoch 3, batch 11200, loss[loss=0.1905, simple_loss=0.2574, pruned_loss=0.0618, over 4927.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2324, pruned_loss=0.04788, over 973951.13 frames.], batch size: 18, lr: 5.58e-04 +2022-05-04 13:30:18,278 INFO [train.py:715] (7/8) Epoch 3, batch 11250, loss[loss=0.2037, simple_loss=0.2755, pruned_loss=0.06589, over 4754.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2326, pruned_loss=0.04775, over 973892.77 frames.], batch size: 19, lr: 5.58e-04 +2022-05-04 13:31:01,501 INFO [train.py:715] (7/8) Epoch 3, batch 11300, loss[loss=0.1851, simple_loss=0.2442, pruned_loss=0.06297, over 4957.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2328, pruned_loss=0.04791, over 973083.01 frames.], batch size: 35, lr: 5.57e-04 +2022-05-04 13:31:42,778 INFO [train.py:715] (7/8) Epoch 3, batch 11350, loss[loss=0.1575, simple_loss=0.2199, pruned_loss=0.0475, over 4910.00 frames.], tot_loss[loss=0.164, simple_loss=0.2327, pruned_loss=0.04767, over 972979.34 frames.], batch size: 19, lr: 5.57e-04 +2022-05-04 13:32:25,113 INFO [train.py:715] (7/8) Epoch 3, batch 11400, loss[loss=0.1541, simple_loss=0.236, pruned_loss=0.03605, over 4747.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2319, pruned_loss=0.0474, over 972683.08 frames.], batch size: 16, lr: 5.57e-04 +2022-05-04 13:33:08,055 INFO [train.py:715] (7/8) Epoch 3, batch 11450, loss[loss=0.1487, simple_loss=0.2268, pruned_loss=0.03531, over 4858.00 frames.], tot_loss[loss=0.164, simple_loss=0.2328, pruned_loss=0.04756, over 972407.25 frames.], batch size: 20, lr: 5.57e-04 +2022-05-04 13:33:50,182 INFO [train.py:715] (7/8) Epoch 3, batch 11500, loss[loss=0.1505, simple_loss=0.2233, pruned_loss=0.03884, over 4907.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2314, pruned_loss=0.04644, over 972503.03 frames.], batch size: 18, lr: 5.57e-04 +2022-05-04 13:34:32,227 INFO [train.py:715] (7/8) Epoch 3, batch 11550, loss[loss=0.1641, simple_loss=0.2302, pruned_loss=0.04902, over 4773.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2313, pruned_loss=0.04657, over 972665.05 frames.], batch size: 18, lr: 5.57e-04 +2022-05-04 13:35:14,413 INFO [train.py:715] (7/8) Epoch 3, batch 11600, loss[loss=0.1424, simple_loss=0.2139, pruned_loss=0.03542, over 4784.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2303, pruned_loss=0.04605, over 972092.33 frames.], batch size: 18, lr: 5.57e-04 +2022-05-04 13:35:57,174 INFO [train.py:715] (7/8) Epoch 3, batch 11650, loss[loss=0.1403, simple_loss=0.2143, pruned_loss=0.03318, over 4907.00 frames.], tot_loss[loss=0.162, simple_loss=0.2306, pruned_loss=0.04665, over 971921.58 frames.], batch size: 17, lr: 5.57e-04 +2022-05-04 13:36:39,265 INFO [train.py:715] (7/8) Epoch 3, batch 11700, loss[loss=0.1461, simple_loss=0.2161, pruned_loss=0.03809, over 4953.00 frames.], tot_loss[loss=0.1613, simple_loss=0.23, pruned_loss=0.04629, over 972765.48 frames.], batch size: 24, lr: 5.57e-04 +2022-05-04 13:37:21,480 INFO [train.py:715] (7/8) Epoch 3, batch 11750, loss[loss=0.1528, simple_loss=0.2183, pruned_loss=0.04361, over 4815.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2297, pruned_loss=0.04643, over 972416.21 frames.], batch size: 13, lr: 5.56e-04 +2022-05-04 13:38:05,292 INFO [train.py:715] (7/8) Epoch 3, batch 11800, loss[loss=0.1477, simple_loss=0.2188, pruned_loss=0.03829, over 4910.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2306, pruned_loss=0.04714, over 971748.31 frames.], batch size: 17, lr: 5.56e-04 +2022-05-04 13:38:47,458 INFO [train.py:715] (7/8) Epoch 3, batch 11850, loss[loss=0.1524, simple_loss=0.2207, pruned_loss=0.04203, over 4972.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2308, pruned_loss=0.04687, over 972078.40 frames.], batch size: 15, lr: 5.56e-04 +2022-05-04 13:39:29,614 INFO [train.py:715] (7/8) Epoch 3, batch 11900, loss[loss=0.2053, simple_loss=0.2547, pruned_loss=0.07793, over 4968.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2313, pruned_loss=0.04756, over 971400.10 frames.], batch size: 35, lr: 5.56e-04 +2022-05-04 13:40:11,708 INFO [train.py:715] (7/8) Epoch 3, batch 11950, loss[loss=0.1661, simple_loss=0.235, pruned_loss=0.0486, over 4814.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2307, pruned_loss=0.04775, over 970580.80 frames.], batch size: 27, lr: 5.56e-04 +2022-05-04 13:40:54,205 INFO [train.py:715] (7/8) Epoch 3, batch 12000, loss[loss=0.1831, simple_loss=0.2458, pruned_loss=0.06021, over 4784.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2317, pruned_loss=0.04849, over 970781.55 frames.], batch size: 14, lr: 5.56e-04 +2022-05-04 13:40:54,206 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 13:41:02,572 INFO [train.py:742] (7/8) Epoch 3, validation: loss=0.1142, simple_loss=0.2003, pruned_loss=0.01401, over 914524.00 frames. +2022-05-04 13:41:44,682 INFO [train.py:715] (7/8) Epoch 3, batch 12050, loss[loss=0.1166, simple_loss=0.1801, pruned_loss=0.02661, over 4793.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2321, pruned_loss=0.04876, over 971588.67 frames.], batch size: 14, lr: 5.56e-04 +2022-05-04 13:42:26,376 INFO [train.py:715] (7/8) Epoch 3, batch 12100, loss[loss=0.1623, simple_loss=0.2354, pruned_loss=0.0446, over 4898.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2323, pruned_loss=0.04867, over 971472.83 frames.], batch size: 19, lr: 5.56e-04 +2022-05-04 13:43:08,779 INFO [train.py:715] (7/8) Epoch 3, batch 12150, loss[loss=0.1616, simple_loss=0.2263, pruned_loss=0.04849, over 4946.00 frames.], tot_loss[loss=0.164, simple_loss=0.2317, pruned_loss=0.04817, over 971891.22 frames.], batch size: 21, lr: 5.55e-04 +2022-05-04 13:43:52,024 INFO [train.py:715] (7/8) Epoch 3, batch 12200, loss[loss=0.1755, simple_loss=0.238, pruned_loss=0.05646, over 4945.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2314, pruned_loss=0.04799, over 972057.61 frames.], batch size: 39, lr: 5.55e-04 +2022-05-04 13:44:33,690 INFO [train.py:715] (7/8) Epoch 3, batch 12250, loss[loss=0.1833, simple_loss=0.2407, pruned_loss=0.06296, over 4829.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2315, pruned_loss=0.0479, over 971951.88 frames.], batch size: 26, lr: 5.55e-04 +2022-05-04 13:45:15,597 INFO [train.py:715] (7/8) Epoch 3, batch 12300, loss[loss=0.1695, simple_loss=0.2539, pruned_loss=0.04254, over 4984.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2325, pruned_loss=0.04804, over 972612.21 frames.], batch size: 28, lr: 5.55e-04 +2022-05-04 13:45:58,056 INFO [train.py:715] (7/8) Epoch 3, batch 12350, loss[loss=0.1408, simple_loss=0.2105, pruned_loss=0.03557, over 4898.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2326, pruned_loss=0.04788, over 972099.97 frames.], batch size: 22, lr: 5.55e-04 +2022-05-04 13:46:41,404 INFO [train.py:715] (7/8) Epoch 3, batch 12400, loss[loss=0.1755, simple_loss=0.2472, pruned_loss=0.05188, over 4983.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2331, pruned_loss=0.04783, over 972364.11 frames.], batch size: 28, lr: 5.55e-04 +2022-05-04 13:47:23,076 INFO [train.py:715] (7/8) Epoch 3, batch 12450, loss[loss=0.2846, simple_loss=0.3352, pruned_loss=0.1171, over 4858.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2331, pruned_loss=0.0479, over 972206.95 frames.], batch size: 32, lr: 5.55e-04 +2022-05-04 13:48:04,573 INFO [train.py:715] (7/8) Epoch 3, batch 12500, loss[loss=0.1587, simple_loss=0.2234, pruned_loss=0.04697, over 4697.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2329, pruned_loss=0.04796, over 972675.00 frames.], batch size: 15, lr: 5.55e-04 +2022-05-04 13:48:47,324 INFO [train.py:715] (7/8) Epoch 3, batch 12550, loss[loss=0.154, simple_loss=0.2179, pruned_loss=0.04501, over 4887.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2322, pruned_loss=0.04764, over 972319.06 frames.], batch size: 16, lr: 5.54e-04 +2022-05-04 13:49:29,595 INFO [train.py:715] (7/8) Epoch 3, batch 12600, loss[loss=0.1576, simple_loss=0.2234, pruned_loss=0.04588, over 4928.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2314, pruned_loss=0.04701, over 972573.60 frames.], batch size: 18, lr: 5.54e-04 +2022-05-04 13:50:11,364 INFO [train.py:715] (7/8) Epoch 3, batch 12650, loss[loss=0.1563, simple_loss=0.2368, pruned_loss=0.0379, over 4872.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2313, pruned_loss=0.04677, over 971813.93 frames.], batch size: 16, lr: 5.54e-04 +2022-05-04 13:50:53,068 INFO [train.py:715] (7/8) Epoch 3, batch 12700, loss[loss=0.154, simple_loss=0.2237, pruned_loss=0.0422, over 4987.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2314, pruned_loss=0.04696, over 971310.15 frames.], batch size: 25, lr: 5.54e-04 +2022-05-04 13:51:35,166 INFO [train.py:715] (7/8) Epoch 3, batch 12750, loss[loss=0.1185, simple_loss=0.1926, pruned_loss=0.02219, over 4793.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2308, pruned_loss=0.04649, over 971839.87 frames.], batch size: 12, lr: 5.54e-04 +2022-05-04 13:52:17,429 INFO [train.py:715] (7/8) Epoch 3, batch 12800, loss[loss=0.1595, simple_loss=0.2349, pruned_loss=0.042, over 4822.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2314, pruned_loss=0.04707, over 971350.42 frames.], batch size: 25, lr: 5.54e-04 +2022-05-04 13:52:58,260 INFO [train.py:715] (7/8) Epoch 3, batch 12850, loss[loss=0.1595, simple_loss=0.2375, pruned_loss=0.04077, over 4906.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2312, pruned_loss=0.0472, over 972424.01 frames.], batch size: 19, lr: 5.54e-04 +2022-05-04 13:53:40,986 INFO [train.py:715] (7/8) Epoch 3, batch 12900, loss[loss=0.167, simple_loss=0.2385, pruned_loss=0.04778, over 4873.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2305, pruned_loss=0.04667, over 972221.44 frames.], batch size: 20, lr: 5.54e-04 +2022-05-04 13:54:23,562 INFO [train.py:715] (7/8) Epoch 3, batch 12950, loss[loss=0.1603, simple_loss=0.2317, pruned_loss=0.04443, over 4957.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2309, pruned_loss=0.047, over 972065.61 frames.], batch size: 21, lr: 5.54e-04 +2022-05-04 13:55:04,930 INFO [train.py:715] (7/8) Epoch 3, batch 13000, loss[loss=0.1405, simple_loss=0.2098, pruned_loss=0.0356, over 4953.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2311, pruned_loss=0.04691, over 972625.25 frames.], batch size: 14, lr: 5.53e-04 +2022-05-04 13:55:46,797 INFO [train.py:715] (7/8) Epoch 3, batch 13050, loss[loss=0.173, simple_loss=0.2316, pruned_loss=0.05717, over 4832.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2314, pruned_loss=0.04748, over 972730.24 frames.], batch size: 15, lr: 5.53e-04 +2022-05-04 13:56:28,794 INFO [train.py:715] (7/8) Epoch 3, batch 13100, loss[loss=0.223, simple_loss=0.2688, pruned_loss=0.0886, over 4752.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2324, pruned_loss=0.04841, over 972818.29 frames.], batch size: 16, lr: 5.53e-04 +2022-05-04 13:57:10,558 INFO [train.py:715] (7/8) Epoch 3, batch 13150, loss[loss=0.1856, simple_loss=0.257, pruned_loss=0.05717, over 4882.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2323, pruned_loss=0.04843, over 972252.60 frames.], batch size: 19, lr: 5.53e-04 +2022-05-04 13:57:52,121 INFO [train.py:715] (7/8) Epoch 3, batch 13200, loss[loss=0.1498, simple_loss=0.2204, pruned_loss=0.0396, over 4787.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2318, pruned_loss=0.04796, over 972368.99 frames.], batch size: 21, lr: 5.53e-04 +2022-05-04 13:58:34,749 INFO [train.py:715] (7/8) Epoch 3, batch 13250, loss[loss=0.1922, simple_loss=0.2414, pruned_loss=0.07151, over 4945.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2316, pruned_loss=0.04814, over 972191.84 frames.], batch size: 21, lr: 5.53e-04 +2022-05-04 13:59:17,150 INFO [train.py:715] (7/8) Epoch 3, batch 13300, loss[loss=0.1616, simple_loss=0.2199, pruned_loss=0.05169, over 4984.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2321, pruned_loss=0.04844, over 971637.31 frames.], batch size: 14, lr: 5.53e-04 +2022-05-04 13:59:58,640 INFO [train.py:715] (7/8) Epoch 3, batch 13350, loss[loss=0.1676, simple_loss=0.236, pruned_loss=0.04958, over 4786.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2318, pruned_loss=0.04841, over 971618.07 frames.], batch size: 18, lr: 5.53e-04 +2022-05-04 14:00:40,470 INFO [train.py:715] (7/8) Epoch 3, batch 13400, loss[loss=0.1904, simple_loss=0.256, pruned_loss=0.06234, over 4839.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2323, pruned_loss=0.04859, over 972549.74 frames.], batch size: 30, lr: 5.52e-04 +2022-05-04 14:01:23,055 INFO [train.py:715] (7/8) Epoch 3, batch 13450, loss[loss=0.176, simple_loss=0.2384, pruned_loss=0.05678, over 4814.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2309, pruned_loss=0.04799, over 972819.83 frames.], batch size: 27, lr: 5.52e-04 +2022-05-04 14:02:04,526 INFO [train.py:715] (7/8) Epoch 3, batch 13500, loss[loss=0.1894, simple_loss=0.2675, pruned_loss=0.05559, over 4735.00 frames.], tot_loss[loss=0.163, simple_loss=0.2311, pruned_loss=0.04747, over 972620.58 frames.], batch size: 16, lr: 5.52e-04 +2022-05-04 14:02:46,056 INFO [train.py:715] (7/8) Epoch 3, batch 13550, loss[loss=0.1385, simple_loss=0.2092, pruned_loss=0.03394, over 4982.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2313, pruned_loss=0.04741, over 972236.40 frames.], batch size: 15, lr: 5.52e-04 +2022-05-04 14:03:28,381 INFO [train.py:715] (7/8) Epoch 3, batch 13600, loss[loss=0.1535, simple_loss=0.223, pruned_loss=0.04196, over 4861.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2313, pruned_loss=0.04706, over 972383.42 frames.], batch size: 20, lr: 5.52e-04 +2022-05-04 14:04:10,283 INFO [train.py:715] (7/8) Epoch 3, batch 13650, loss[loss=0.1427, simple_loss=0.2175, pruned_loss=0.03394, over 4884.00 frames.], tot_loss[loss=0.1626, simple_loss=0.231, pruned_loss=0.04712, over 971881.81 frames.], batch size: 22, lr: 5.52e-04 +2022-05-04 14:04:51,703 INFO [train.py:715] (7/8) Epoch 3, batch 13700, loss[loss=0.1619, simple_loss=0.2349, pruned_loss=0.0445, over 4800.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2313, pruned_loss=0.04767, over 971586.22 frames.], batch size: 24, lr: 5.52e-04 +2022-05-04 14:05:34,467 INFO [train.py:715] (7/8) Epoch 3, batch 13750, loss[loss=0.1929, simple_loss=0.2708, pruned_loss=0.05752, over 4980.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2316, pruned_loss=0.04791, over 971580.65 frames.], batch size: 15, lr: 5.52e-04 +2022-05-04 14:06:16,552 INFO [train.py:715] (7/8) Epoch 3, batch 13800, loss[loss=0.145, simple_loss=0.2041, pruned_loss=0.04297, over 4705.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2317, pruned_loss=0.04799, over 971601.72 frames.], batch size: 15, lr: 5.52e-04 +2022-05-04 14:06:58,030 INFO [train.py:715] (7/8) Epoch 3, batch 13850, loss[loss=0.1812, simple_loss=0.26, pruned_loss=0.05122, over 4838.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2312, pruned_loss=0.04719, over 972465.97 frames.], batch size: 15, lr: 5.51e-04 +2022-05-04 14:07:39,272 INFO [train.py:715] (7/8) Epoch 3, batch 13900, loss[loss=0.1552, simple_loss=0.22, pruned_loss=0.04521, over 4765.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2306, pruned_loss=0.0472, over 971407.55 frames.], batch size: 16, lr: 5.51e-04 +2022-05-04 14:08:21,708 INFO [train.py:715] (7/8) Epoch 3, batch 13950, loss[loss=0.141, simple_loss=0.2097, pruned_loss=0.03611, over 4849.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2308, pruned_loss=0.04739, over 971667.14 frames.], batch size: 13, lr: 5.51e-04 +2022-05-04 14:09:04,163 INFO [train.py:715] (7/8) Epoch 3, batch 14000, loss[loss=0.1694, simple_loss=0.2275, pruned_loss=0.05564, over 4950.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2323, pruned_loss=0.04804, over 971791.85 frames.], batch size: 21, lr: 5.51e-04 +2022-05-04 14:09:45,591 INFO [train.py:715] (7/8) Epoch 3, batch 14050, loss[loss=0.159, simple_loss=0.2192, pruned_loss=0.04938, over 4841.00 frames.], tot_loss[loss=0.164, simple_loss=0.2322, pruned_loss=0.04791, over 972256.96 frames.], batch size: 15, lr: 5.51e-04 +2022-05-04 14:10:28,390 INFO [train.py:715] (7/8) Epoch 3, batch 14100, loss[loss=0.1217, simple_loss=0.1875, pruned_loss=0.02792, over 4708.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2316, pruned_loss=0.04748, over 972725.95 frames.], batch size: 15, lr: 5.51e-04 +2022-05-04 14:11:10,226 INFO [train.py:715] (7/8) Epoch 3, batch 14150, loss[loss=0.1654, simple_loss=0.2431, pruned_loss=0.04384, over 4818.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2309, pruned_loss=0.04715, over 973701.67 frames.], batch size: 27, lr: 5.51e-04 +2022-05-04 14:11:51,376 INFO [train.py:715] (7/8) Epoch 3, batch 14200, loss[loss=0.1681, simple_loss=0.243, pruned_loss=0.0466, over 4794.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2306, pruned_loss=0.04713, over 973106.14 frames.], batch size: 18, lr: 5.51e-04 +2022-05-04 14:12:33,508 INFO [train.py:715] (7/8) Epoch 3, batch 14250, loss[loss=0.151, simple_loss=0.2316, pruned_loss=0.03514, over 4915.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2313, pruned_loss=0.04768, over 971661.01 frames.], batch size: 17, lr: 5.51e-04 +2022-05-04 14:13:15,875 INFO [train.py:715] (7/8) Epoch 3, batch 14300, loss[loss=0.1461, simple_loss=0.2187, pruned_loss=0.03677, over 4787.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2312, pruned_loss=0.04762, over 972321.89 frames.], batch size: 17, lr: 5.50e-04 +2022-05-04 14:13:58,176 INFO [train.py:715] (7/8) Epoch 3, batch 14350, loss[loss=0.1361, simple_loss=0.2035, pruned_loss=0.03439, over 4807.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2323, pruned_loss=0.04776, over 972951.65 frames.], batch size: 15, lr: 5.50e-04 +2022-05-04 14:14:38,956 INFO [train.py:715] (7/8) Epoch 3, batch 14400, loss[loss=0.1661, simple_loss=0.255, pruned_loss=0.0386, over 4811.00 frames.], tot_loss[loss=0.163, simple_loss=0.2317, pruned_loss=0.0472, over 972337.11 frames.], batch size: 21, lr: 5.50e-04 +2022-05-04 14:15:21,417 INFO [train.py:715] (7/8) Epoch 3, batch 14450, loss[loss=0.1392, simple_loss=0.2185, pruned_loss=0.02992, over 4887.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2319, pruned_loss=0.04765, over 972565.19 frames.], batch size: 22, lr: 5.50e-04 +2022-05-04 14:16:03,343 INFO [train.py:715] (7/8) Epoch 3, batch 14500, loss[loss=0.1804, simple_loss=0.2411, pruned_loss=0.05986, over 4738.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2308, pruned_loss=0.04708, over 972819.09 frames.], batch size: 19, lr: 5.50e-04 +2022-05-04 14:16:44,531 INFO [train.py:715] (7/8) Epoch 3, batch 14550, loss[loss=0.1593, simple_loss=0.2324, pruned_loss=0.04311, over 4879.00 frames.], tot_loss[loss=0.162, simple_loss=0.2303, pruned_loss=0.04687, over 972147.80 frames.], batch size: 22, lr: 5.50e-04 +2022-05-04 14:17:26,984 INFO [train.py:715] (7/8) Epoch 3, batch 14600, loss[loss=0.1438, simple_loss=0.2084, pruned_loss=0.03953, over 4965.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2302, pruned_loss=0.04738, over 972181.09 frames.], batch size: 28, lr: 5.50e-04 +2022-05-04 14:18:08,873 INFO [train.py:715] (7/8) Epoch 3, batch 14650, loss[loss=0.1398, simple_loss=0.2186, pruned_loss=0.03049, over 4800.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2312, pruned_loss=0.04762, over 972154.91 frames.], batch size: 24, lr: 5.50e-04 +2022-05-04 14:18:50,916 INFO [train.py:715] (7/8) Epoch 3, batch 14700, loss[loss=0.1305, simple_loss=0.2119, pruned_loss=0.02452, over 4799.00 frames.], tot_loss[loss=0.1631, simple_loss=0.231, pruned_loss=0.04756, over 971948.44 frames.], batch size: 25, lr: 5.49e-04 +2022-05-04 14:19:32,216 INFO [train.py:715] (7/8) Epoch 3, batch 14750, loss[loss=0.1745, simple_loss=0.2418, pruned_loss=0.05362, over 4920.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2312, pruned_loss=0.04749, over 971963.59 frames.], batch size: 29, lr: 5.49e-04 +2022-05-04 14:20:14,642 INFO [train.py:715] (7/8) Epoch 3, batch 14800, loss[loss=0.1743, simple_loss=0.242, pruned_loss=0.05327, over 4898.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2311, pruned_loss=0.04764, over 971675.83 frames.], batch size: 22, lr: 5.49e-04 +2022-05-04 14:20:56,941 INFO [train.py:715] (7/8) Epoch 3, batch 14850, loss[loss=0.1746, simple_loss=0.2469, pruned_loss=0.05111, over 4949.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2313, pruned_loss=0.04785, over 971552.53 frames.], batch size: 23, lr: 5.49e-04 +2022-05-04 14:21:37,857 INFO [train.py:715] (7/8) Epoch 3, batch 14900, loss[loss=0.185, simple_loss=0.2486, pruned_loss=0.06071, over 4918.00 frames.], tot_loss[loss=0.164, simple_loss=0.2321, pruned_loss=0.04797, over 972086.29 frames.], batch size: 23, lr: 5.49e-04 +2022-05-04 14:22:20,813 INFO [train.py:715] (7/8) Epoch 3, batch 14950, loss[loss=0.1932, simple_loss=0.2402, pruned_loss=0.07314, over 4962.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2319, pruned_loss=0.04824, over 971320.72 frames.], batch size: 15, lr: 5.49e-04 +2022-05-04 14:23:02,211 INFO [train.py:715] (7/8) Epoch 3, batch 15000, loss[loss=0.1341, simple_loss=0.2117, pruned_loss=0.02829, over 4984.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2316, pruned_loss=0.04798, over 971502.49 frames.], batch size: 28, lr: 5.49e-04 +2022-05-04 14:23:02,212 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 14:23:10,876 INFO [train.py:742] (7/8) Epoch 3, validation: loss=0.1142, simple_loss=0.2003, pruned_loss=0.01402, over 914524.00 frames. +2022-05-04 14:23:52,717 INFO [train.py:715] (7/8) Epoch 3, batch 15050, loss[loss=0.1835, simple_loss=0.2394, pruned_loss=0.06379, over 4873.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2313, pruned_loss=0.04789, over 971667.97 frames.], batch size: 30, lr: 5.49e-04 +2022-05-04 14:24:34,028 INFO [train.py:715] (7/8) Epoch 3, batch 15100, loss[loss=0.1441, simple_loss=0.2261, pruned_loss=0.0311, over 4745.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2316, pruned_loss=0.04764, over 972548.43 frames.], batch size: 16, lr: 5.49e-04 +2022-05-04 14:25:16,192 INFO [train.py:715] (7/8) Epoch 3, batch 15150, loss[loss=0.1684, simple_loss=0.2405, pruned_loss=0.04809, over 4929.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2313, pruned_loss=0.04759, over 972727.69 frames.], batch size: 35, lr: 5.48e-04 +2022-05-04 14:25:57,817 INFO [train.py:715] (7/8) Epoch 3, batch 15200, loss[loss=0.1817, simple_loss=0.2469, pruned_loss=0.05822, over 4969.00 frames.], tot_loss[loss=0.163, simple_loss=0.2311, pruned_loss=0.04745, over 973106.92 frames.], batch size: 40, lr: 5.48e-04 +2022-05-04 14:26:39,371 INFO [train.py:715] (7/8) Epoch 3, batch 15250, loss[loss=0.1697, simple_loss=0.2336, pruned_loss=0.0529, over 4776.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2315, pruned_loss=0.04768, over 973421.23 frames.], batch size: 17, lr: 5.48e-04 +2022-05-04 14:27:20,707 INFO [train.py:715] (7/8) Epoch 3, batch 15300, loss[loss=0.1712, simple_loss=0.2416, pruned_loss=0.05037, over 4790.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2319, pruned_loss=0.04768, over 972061.46 frames.], batch size: 24, lr: 5.48e-04 +2022-05-04 14:28:02,531 INFO [train.py:715] (7/8) Epoch 3, batch 15350, loss[loss=0.1843, simple_loss=0.2492, pruned_loss=0.05976, over 4986.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2316, pruned_loss=0.04735, over 972207.40 frames.], batch size: 31, lr: 5.48e-04 +2022-05-04 14:28:44,648 INFO [train.py:715] (7/8) Epoch 3, batch 15400, loss[loss=0.155, simple_loss=0.2355, pruned_loss=0.03722, over 4833.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2321, pruned_loss=0.04764, over 972770.94 frames.], batch size: 26, lr: 5.48e-04 +2022-05-04 14:29:25,739 INFO [train.py:715] (7/8) Epoch 3, batch 15450, loss[loss=0.1904, simple_loss=0.2519, pruned_loss=0.06439, over 4893.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2325, pruned_loss=0.04803, over 972460.46 frames.], batch size: 32, lr: 5.48e-04 +2022-05-04 14:30:08,691 INFO [train.py:715] (7/8) Epoch 3, batch 15500, loss[loss=0.1526, simple_loss=0.2167, pruned_loss=0.04425, over 4762.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2321, pruned_loss=0.04768, over 972102.51 frames.], batch size: 17, lr: 5.48e-04 +2022-05-04 14:30:50,508 INFO [train.py:715] (7/8) Epoch 3, batch 15550, loss[loss=0.193, simple_loss=0.2586, pruned_loss=0.06377, over 4902.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2326, pruned_loss=0.048, over 972221.68 frames.], batch size: 19, lr: 5.48e-04 +2022-05-04 14:31:35,090 INFO [train.py:715] (7/8) Epoch 3, batch 15600, loss[loss=0.1362, simple_loss=0.2159, pruned_loss=0.02829, over 4905.00 frames.], tot_loss[loss=0.1638, simple_loss=0.232, pruned_loss=0.04775, over 972643.29 frames.], batch size: 18, lr: 5.47e-04 +2022-05-04 14:32:16,101 INFO [train.py:715] (7/8) Epoch 3, batch 15650, loss[loss=0.1721, simple_loss=0.2482, pruned_loss=0.04799, over 4782.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2321, pruned_loss=0.04767, over 972890.08 frames.], batch size: 17, lr: 5.47e-04 +2022-05-04 14:32:57,691 INFO [train.py:715] (7/8) Epoch 3, batch 15700, loss[loss=0.1632, simple_loss=0.227, pruned_loss=0.04971, over 4799.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2311, pruned_loss=0.04714, over 973028.11 frames.], batch size: 21, lr: 5.47e-04 +2022-05-04 14:33:40,525 INFO [train.py:715] (7/8) Epoch 3, batch 15750, loss[loss=0.1473, simple_loss=0.219, pruned_loss=0.03783, over 4888.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2313, pruned_loss=0.04754, over 973260.53 frames.], batch size: 22, lr: 5.47e-04 +2022-05-04 14:34:22,341 INFO [train.py:715] (7/8) Epoch 3, batch 15800, loss[loss=0.1433, simple_loss=0.2121, pruned_loss=0.03721, over 4812.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2303, pruned_loss=0.04655, over 972708.51 frames.], batch size: 25, lr: 5.47e-04 +2022-05-04 14:35:03,585 INFO [train.py:715] (7/8) Epoch 3, batch 15850, loss[loss=0.1615, simple_loss=0.2391, pruned_loss=0.04192, over 4879.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2298, pruned_loss=0.04622, over 973152.48 frames.], batch size: 22, lr: 5.47e-04 +2022-05-04 14:35:45,962 INFO [train.py:715] (7/8) Epoch 3, batch 15900, loss[loss=0.1875, simple_loss=0.2578, pruned_loss=0.05857, over 4982.00 frames.], tot_loss[loss=0.1614, simple_loss=0.23, pruned_loss=0.04644, over 973223.41 frames.], batch size: 25, lr: 5.47e-04 +2022-05-04 14:36:28,599 INFO [train.py:715] (7/8) Epoch 3, batch 15950, loss[loss=0.1583, simple_loss=0.2319, pruned_loss=0.04233, over 4991.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2301, pruned_loss=0.04662, over 972537.01 frames.], batch size: 16, lr: 5.47e-04 +2022-05-04 14:37:09,196 INFO [train.py:715] (7/8) Epoch 3, batch 16000, loss[loss=0.1671, simple_loss=0.2205, pruned_loss=0.05689, over 4706.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2302, pruned_loss=0.04673, over 972541.92 frames.], batch size: 15, lr: 5.47e-04 +2022-05-04 14:37:50,847 INFO [train.py:715] (7/8) Epoch 3, batch 16050, loss[loss=0.1829, simple_loss=0.2424, pruned_loss=0.06168, over 4781.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2306, pruned_loss=0.04677, over 972731.10 frames.], batch size: 17, lr: 5.46e-04 +2022-05-04 14:38:33,476 INFO [train.py:715] (7/8) Epoch 3, batch 16100, loss[loss=0.1615, simple_loss=0.2324, pruned_loss=0.04528, over 4941.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2301, pruned_loss=0.04621, over 972444.85 frames.], batch size: 21, lr: 5.46e-04 +2022-05-04 14:39:15,447 INFO [train.py:715] (7/8) Epoch 3, batch 16150, loss[loss=0.1644, simple_loss=0.2206, pruned_loss=0.05409, over 4759.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2301, pruned_loss=0.04649, over 972137.17 frames.], batch size: 16, lr: 5.46e-04 +2022-05-04 14:39:56,182 INFO [train.py:715] (7/8) Epoch 3, batch 16200, loss[loss=0.1515, simple_loss=0.2089, pruned_loss=0.04701, over 4795.00 frames.], tot_loss[loss=0.162, simple_loss=0.2303, pruned_loss=0.04686, over 972099.96 frames.], batch size: 18, lr: 5.46e-04 +2022-05-04 14:40:38,483 INFO [train.py:715] (7/8) Epoch 3, batch 16250, loss[loss=0.1916, simple_loss=0.2669, pruned_loss=0.05818, over 4800.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2311, pruned_loss=0.04702, over 971706.36 frames.], batch size: 14, lr: 5.46e-04 +2022-05-04 14:41:20,556 INFO [train.py:715] (7/8) Epoch 3, batch 16300, loss[loss=0.157, simple_loss=0.2299, pruned_loss=0.04205, over 4681.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2312, pruned_loss=0.04674, over 971919.89 frames.], batch size: 15, lr: 5.46e-04 +2022-05-04 14:42:01,220 INFO [train.py:715] (7/8) Epoch 3, batch 16350, loss[loss=0.1801, simple_loss=0.2397, pruned_loss=0.06019, over 4850.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2314, pruned_loss=0.04693, over 972373.73 frames.], batch size: 15, lr: 5.46e-04 +2022-05-04 14:42:43,180 INFO [train.py:715] (7/8) Epoch 3, batch 16400, loss[loss=0.1451, simple_loss=0.2128, pruned_loss=0.03872, over 4748.00 frames.], tot_loss[loss=0.161, simple_loss=0.2299, pruned_loss=0.04604, over 972291.27 frames.], batch size: 12, lr: 5.46e-04 +2022-05-04 14:43:25,723 INFO [train.py:715] (7/8) Epoch 3, batch 16450, loss[loss=0.1339, simple_loss=0.2072, pruned_loss=0.03032, over 4826.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2303, pruned_loss=0.04642, over 972724.46 frames.], batch size: 15, lr: 5.45e-04 +2022-05-04 14:44:08,336 INFO [train.py:715] (7/8) Epoch 3, batch 16500, loss[loss=0.1349, simple_loss=0.2155, pruned_loss=0.02712, over 4696.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2303, pruned_loss=0.04651, over 972552.45 frames.], batch size: 15, lr: 5.45e-04 +2022-05-04 14:44:49,047 INFO [train.py:715] (7/8) Epoch 3, batch 16550, loss[loss=0.2246, simple_loss=0.2932, pruned_loss=0.07801, over 4909.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2299, pruned_loss=0.04643, over 972109.08 frames.], batch size: 22, lr: 5.45e-04 +2022-05-04 14:45:31,916 INFO [train.py:715] (7/8) Epoch 3, batch 16600, loss[loss=0.1802, simple_loss=0.2411, pruned_loss=0.05972, over 4784.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2305, pruned_loss=0.04695, over 972472.98 frames.], batch size: 18, lr: 5.45e-04 +2022-05-04 14:46:14,689 INFO [train.py:715] (7/8) Epoch 3, batch 16650, loss[loss=0.1562, simple_loss=0.2255, pruned_loss=0.04342, over 4905.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2295, pruned_loss=0.0464, over 971951.92 frames.], batch size: 19, lr: 5.45e-04 +2022-05-04 14:46:55,381 INFO [train.py:715] (7/8) Epoch 3, batch 16700, loss[loss=0.1357, simple_loss=0.2106, pruned_loss=0.03045, over 4827.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2301, pruned_loss=0.04746, over 971778.01 frames.], batch size: 26, lr: 5.45e-04 +2022-05-04 14:47:37,403 INFO [train.py:715] (7/8) Epoch 3, batch 16750, loss[loss=0.1632, simple_loss=0.2384, pruned_loss=0.044, over 4839.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2305, pruned_loss=0.04791, over 972496.32 frames.], batch size: 15, lr: 5.45e-04 +2022-05-04 14:48:19,858 INFO [train.py:715] (7/8) Epoch 3, batch 16800, loss[loss=0.1308, simple_loss=0.1903, pruned_loss=0.03569, over 4815.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2296, pruned_loss=0.04707, over 971312.14 frames.], batch size: 13, lr: 5.45e-04 +2022-05-04 14:49:01,333 INFO [train.py:715] (7/8) Epoch 3, batch 16850, loss[loss=0.165, simple_loss=0.225, pruned_loss=0.05253, over 4795.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2298, pruned_loss=0.04704, over 971271.78 frames.], batch size: 14, lr: 5.45e-04 +2022-05-04 14:49:42,739 INFO [train.py:715] (7/8) Epoch 3, batch 16900, loss[loss=0.1597, simple_loss=0.2229, pruned_loss=0.04829, over 4990.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2302, pruned_loss=0.0472, over 971651.03 frames.], batch size: 28, lr: 5.44e-04 +2022-05-04 14:50:24,690 INFO [train.py:715] (7/8) Epoch 3, batch 16950, loss[loss=0.1451, simple_loss=0.2204, pruned_loss=0.0349, over 4776.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2306, pruned_loss=0.04717, over 971572.51 frames.], batch size: 18, lr: 5.44e-04 +2022-05-04 14:51:07,251 INFO [train.py:715] (7/8) Epoch 3, batch 17000, loss[loss=0.1991, simple_loss=0.2518, pruned_loss=0.07323, over 4795.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2303, pruned_loss=0.04713, over 972450.51 frames.], batch size: 21, lr: 5.44e-04 +2022-05-04 14:51:47,565 INFO [train.py:715] (7/8) Epoch 3, batch 17050, loss[loss=0.1949, simple_loss=0.2655, pruned_loss=0.06214, over 4928.00 frames.], tot_loss[loss=0.162, simple_loss=0.2301, pruned_loss=0.0469, over 971816.59 frames.], batch size: 19, lr: 5.44e-04 +2022-05-04 14:52:29,483 INFO [train.py:715] (7/8) Epoch 3, batch 17100, loss[loss=0.1735, simple_loss=0.2386, pruned_loss=0.05424, over 4865.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2304, pruned_loss=0.04703, over 972712.06 frames.], batch size: 32, lr: 5.44e-04 +2022-05-04 14:53:11,185 INFO [train.py:715] (7/8) Epoch 3, batch 17150, loss[loss=0.143, simple_loss=0.2083, pruned_loss=0.03884, over 4929.00 frames.], tot_loss[loss=0.163, simple_loss=0.2314, pruned_loss=0.04725, over 973686.84 frames.], batch size: 29, lr: 5.44e-04 +2022-05-04 14:53:52,357 INFO [train.py:715] (7/8) Epoch 3, batch 17200, loss[loss=0.1634, simple_loss=0.2389, pruned_loss=0.04394, over 4913.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2309, pruned_loss=0.04716, over 972975.04 frames.], batch size: 17, lr: 5.44e-04 +2022-05-04 14:54:33,054 INFO [train.py:715] (7/8) Epoch 3, batch 17250, loss[loss=0.159, simple_loss=0.2436, pruned_loss=0.03716, over 4798.00 frames.], tot_loss[loss=0.1629, simple_loss=0.231, pruned_loss=0.04734, over 972841.53 frames.], batch size: 21, lr: 5.44e-04 +2022-05-04 14:55:14,509 INFO [train.py:715] (7/8) Epoch 3, batch 17300, loss[loss=0.15, simple_loss=0.2165, pruned_loss=0.04176, over 4749.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2314, pruned_loss=0.04759, over 972911.06 frames.], batch size: 16, lr: 5.44e-04 +2022-05-04 14:55:56,121 INFO [train.py:715] (7/8) Epoch 3, batch 17350, loss[loss=0.1614, simple_loss=0.228, pruned_loss=0.04743, over 4890.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2311, pruned_loss=0.04737, over 971986.58 frames.], batch size: 16, lr: 5.43e-04 +2022-05-04 14:56:36,190 INFO [train.py:715] (7/8) Epoch 3, batch 17400, loss[loss=0.1665, simple_loss=0.2352, pruned_loss=0.04885, over 4892.00 frames.], tot_loss[loss=0.163, simple_loss=0.2315, pruned_loss=0.04722, over 971486.82 frames.], batch size: 19, lr: 5.43e-04 +2022-05-04 14:57:18,253 INFO [train.py:715] (7/8) Epoch 3, batch 17450, loss[loss=0.2165, simple_loss=0.2931, pruned_loss=0.06998, over 4964.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2313, pruned_loss=0.04688, over 972730.77 frames.], batch size: 24, lr: 5.43e-04 +2022-05-04 14:58:00,477 INFO [train.py:715] (7/8) Epoch 3, batch 17500, loss[loss=0.158, simple_loss=0.223, pruned_loss=0.04653, over 4939.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2309, pruned_loss=0.04631, over 971816.56 frames.], batch size: 23, lr: 5.43e-04 +2022-05-04 14:58:41,513 INFO [train.py:715] (7/8) Epoch 3, batch 17550, loss[loss=0.171, simple_loss=0.2521, pruned_loss=0.04496, over 4820.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2308, pruned_loss=0.04611, over 971530.29 frames.], batch size: 27, lr: 5.43e-04 +2022-05-04 14:59:22,856 INFO [train.py:715] (7/8) Epoch 3, batch 17600, loss[loss=0.1743, simple_loss=0.24, pruned_loss=0.05429, over 4708.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2313, pruned_loss=0.04651, over 971244.78 frames.], batch size: 15, lr: 5.43e-04 +2022-05-04 15:00:04,532 INFO [train.py:715] (7/8) Epoch 3, batch 17650, loss[loss=0.1889, simple_loss=0.2534, pruned_loss=0.06216, over 4882.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2313, pruned_loss=0.04643, over 971029.54 frames.], batch size: 38, lr: 5.43e-04 +2022-05-04 15:00:46,083 INFO [train.py:715] (7/8) Epoch 3, batch 17700, loss[loss=0.1419, simple_loss=0.2209, pruned_loss=0.03145, over 4923.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2301, pruned_loss=0.04561, over 971235.50 frames.], batch size: 18, lr: 5.43e-04 +2022-05-04 15:01:26,898 INFO [train.py:715] (7/8) Epoch 3, batch 17750, loss[loss=0.1467, simple_loss=0.2208, pruned_loss=0.03636, over 4784.00 frames.], tot_loss[loss=0.162, simple_loss=0.2315, pruned_loss=0.04625, over 970928.08 frames.], batch size: 14, lr: 5.43e-04 +2022-05-04 15:02:08,924 INFO [train.py:715] (7/8) Epoch 3, batch 17800, loss[loss=0.1873, simple_loss=0.2584, pruned_loss=0.05812, over 4836.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2311, pruned_loss=0.04584, over 971031.22 frames.], batch size: 15, lr: 5.42e-04 +2022-05-04 15:02:50,346 INFO [train.py:715] (7/8) Epoch 3, batch 17850, loss[loss=0.1646, simple_loss=0.2403, pruned_loss=0.04447, over 4866.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2301, pruned_loss=0.04555, over 971124.20 frames.], batch size: 32, lr: 5.42e-04 +2022-05-04 15:03:30,309 INFO [train.py:715] (7/8) Epoch 3, batch 17900, loss[loss=0.1496, simple_loss=0.2297, pruned_loss=0.03478, over 4700.00 frames.], tot_loss[loss=0.1617, simple_loss=0.231, pruned_loss=0.0462, over 971014.71 frames.], batch size: 15, lr: 5.42e-04 +2022-05-04 15:04:12,147 INFO [train.py:715] (7/8) Epoch 3, batch 17950, loss[loss=0.1383, simple_loss=0.2158, pruned_loss=0.03037, over 4901.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2305, pruned_loss=0.04634, over 971043.85 frames.], batch size: 19, lr: 5.42e-04 +2022-05-04 15:04:53,406 INFO [train.py:715] (7/8) Epoch 3, batch 18000, loss[loss=0.1805, simple_loss=0.2574, pruned_loss=0.05186, over 4962.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2311, pruned_loss=0.04673, over 971830.72 frames.], batch size: 24, lr: 5.42e-04 +2022-05-04 15:04:53,407 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 15:05:02,070 INFO [train.py:742] (7/8) Epoch 3, validation: loss=0.1143, simple_loss=0.2002, pruned_loss=0.01414, over 914524.00 frames. +2022-05-04 15:05:43,868 INFO [train.py:715] (7/8) Epoch 3, batch 18050, loss[loss=0.1794, simple_loss=0.2414, pruned_loss=0.05872, over 4976.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2297, pruned_loss=0.04625, over 972208.28 frames.], batch size: 35, lr: 5.42e-04 +2022-05-04 15:06:25,504 INFO [train.py:715] (7/8) Epoch 3, batch 18100, loss[loss=0.1424, simple_loss=0.2182, pruned_loss=0.03329, over 4797.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2312, pruned_loss=0.04678, over 972774.98 frames.], batch size: 18, lr: 5.42e-04 +2022-05-04 15:07:06,176 INFO [train.py:715] (7/8) Epoch 3, batch 18150, loss[loss=0.1384, simple_loss=0.2138, pruned_loss=0.03152, over 4927.00 frames.], tot_loss[loss=0.1622, simple_loss=0.231, pruned_loss=0.04671, over 973416.51 frames.], batch size: 29, lr: 5.42e-04 +2022-05-04 15:07:47,684 INFO [train.py:715] (7/8) Epoch 3, batch 18200, loss[loss=0.1953, simple_loss=0.2551, pruned_loss=0.06778, over 4987.00 frames.], tot_loss[loss=0.162, simple_loss=0.2309, pruned_loss=0.04652, over 973291.27 frames.], batch size: 25, lr: 5.42e-04 +2022-05-04 15:08:29,478 INFO [train.py:715] (7/8) Epoch 3, batch 18250, loss[loss=0.194, simple_loss=0.2523, pruned_loss=0.06788, over 4910.00 frames.], tot_loss[loss=0.1621, simple_loss=0.231, pruned_loss=0.04666, over 972966.41 frames.], batch size: 39, lr: 5.41e-04 +2022-05-04 15:09:10,297 INFO [train.py:715] (7/8) Epoch 3, batch 18300, loss[loss=0.1539, simple_loss=0.2222, pruned_loss=0.04278, over 4792.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2311, pruned_loss=0.04676, over 972431.82 frames.], batch size: 18, lr: 5.41e-04 +2022-05-04 15:09:51,605 INFO [train.py:715] (7/8) Epoch 3, batch 18350, loss[loss=0.1514, simple_loss=0.2308, pruned_loss=0.03599, over 4885.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2305, pruned_loss=0.04604, over 972241.62 frames.], batch size: 22, lr: 5.41e-04 +2022-05-04 15:10:33,031 INFO [train.py:715] (7/8) Epoch 3, batch 18400, loss[loss=0.164, simple_loss=0.2279, pruned_loss=0.05006, over 4842.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2307, pruned_loss=0.04603, over 972604.01 frames.], batch size: 30, lr: 5.41e-04 +2022-05-04 15:11:13,988 INFO [train.py:715] (7/8) Epoch 3, batch 18450, loss[loss=0.139, simple_loss=0.2104, pruned_loss=0.03383, over 4813.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2312, pruned_loss=0.04644, over 973047.53 frames.], batch size: 12, lr: 5.41e-04 +2022-05-04 15:11:55,026 INFO [train.py:715] (7/8) Epoch 3, batch 18500, loss[loss=0.1806, simple_loss=0.2493, pruned_loss=0.05591, over 4798.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2324, pruned_loss=0.0472, over 972751.96 frames.], batch size: 18, lr: 5.41e-04 +2022-05-04 15:12:36,410 INFO [train.py:715] (7/8) Epoch 3, batch 18550, loss[loss=0.1423, simple_loss=0.2168, pruned_loss=0.03386, over 4877.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2313, pruned_loss=0.04681, over 972376.04 frames.], batch size: 16, lr: 5.41e-04 +2022-05-04 15:13:18,635 INFO [train.py:715] (7/8) Epoch 3, batch 18600, loss[loss=0.1499, simple_loss=0.2298, pruned_loss=0.03504, over 4927.00 frames.], tot_loss[loss=0.1621, simple_loss=0.231, pruned_loss=0.04658, over 972057.26 frames.], batch size: 23, lr: 5.41e-04 +2022-05-04 15:13:58,616 INFO [train.py:715] (7/8) Epoch 3, batch 18650, loss[loss=0.1503, simple_loss=0.2216, pruned_loss=0.03954, over 4866.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2306, pruned_loss=0.0464, over 971610.96 frames.], batch size: 16, lr: 5.41e-04 +2022-05-04 15:14:39,325 INFO [train.py:715] (7/8) Epoch 3, batch 18700, loss[loss=0.1613, simple_loss=0.2303, pruned_loss=0.04618, over 4862.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2315, pruned_loss=0.04673, over 971164.50 frames.], batch size: 16, lr: 5.40e-04 +2022-05-04 15:15:20,422 INFO [train.py:715] (7/8) Epoch 3, batch 18750, loss[loss=0.1571, simple_loss=0.2251, pruned_loss=0.04458, over 4883.00 frames.], tot_loss[loss=0.1621, simple_loss=0.231, pruned_loss=0.0466, over 971230.59 frames.], batch size: 22, lr: 5.40e-04 +2022-05-04 15:16:00,293 INFO [train.py:715] (7/8) Epoch 3, batch 18800, loss[loss=0.1602, simple_loss=0.2174, pruned_loss=0.05149, over 4770.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2313, pruned_loss=0.04684, over 971077.03 frames.], batch size: 17, lr: 5.40e-04 +2022-05-04 15:16:41,091 INFO [train.py:715] (7/8) Epoch 3, batch 18850, loss[loss=0.1838, simple_loss=0.2513, pruned_loss=0.05818, over 4894.00 frames.], tot_loss[loss=0.163, simple_loss=0.2318, pruned_loss=0.04706, over 972078.72 frames.], batch size: 19, lr: 5.40e-04 +2022-05-04 15:17:21,063 INFO [train.py:715] (7/8) Epoch 3, batch 18900, loss[loss=0.1743, simple_loss=0.2506, pruned_loss=0.04896, over 4871.00 frames.], tot_loss[loss=0.1638, simple_loss=0.2327, pruned_loss=0.04748, over 972036.23 frames.], batch size: 20, lr: 5.40e-04 +2022-05-04 15:18:01,535 INFO [train.py:715] (7/8) Epoch 3, batch 18950, loss[loss=0.1217, simple_loss=0.1971, pruned_loss=0.02312, over 4833.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2316, pruned_loss=0.0471, over 971306.85 frames.], batch size: 12, lr: 5.40e-04 +2022-05-04 15:18:40,944 INFO [train.py:715] (7/8) Epoch 3, batch 19000, loss[loss=0.1613, simple_loss=0.2262, pruned_loss=0.04821, over 4810.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2319, pruned_loss=0.04709, over 971346.82 frames.], batch size: 25, lr: 5.40e-04 +2022-05-04 15:19:20,767 INFO [train.py:715] (7/8) Epoch 3, batch 19050, loss[loss=0.1805, simple_loss=0.2412, pruned_loss=0.05986, over 4756.00 frames.], tot_loss[loss=0.1646, simple_loss=0.233, pruned_loss=0.04807, over 972214.62 frames.], batch size: 19, lr: 5.40e-04 +2022-05-04 15:20:01,077 INFO [train.py:715] (7/8) Epoch 3, batch 19100, loss[loss=0.1824, simple_loss=0.2364, pruned_loss=0.06421, over 4969.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2331, pruned_loss=0.04786, over 972863.23 frames.], batch size: 39, lr: 5.40e-04 +2022-05-04 15:20:40,500 INFO [train.py:715] (7/8) Epoch 3, batch 19150, loss[loss=0.1561, simple_loss=0.2158, pruned_loss=0.04817, over 4828.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2332, pruned_loss=0.04791, over 973504.47 frames.], batch size: 15, lr: 5.40e-04 +2022-05-04 15:21:20,181 INFO [train.py:715] (7/8) Epoch 3, batch 19200, loss[loss=0.1645, simple_loss=0.2407, pruned_loss=0.0441, over 4756.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2331, pruned_loss=0.04799, over 973171.94 frames.], batch size: 19, lr: 5.39e-04 +2022-05-04 15:21:59,822 INFO [train.py:715] (7/8) Epoch 3, batch 19250, loss[loss=0.2011, simple_loss=0.2565, pruned_loss=0.07283, over 4930.00 frames.], tot_loss[loss=0.165, simple_loss=0.2331, pruned_loss=0.04843, over 972705.59 frames.], batch size: 29, lr: 5.39e-04 +2022-05-04 15:22:40,131 INFO [train.py:715] (7/8) Epoch 3, batch 19300, loss[loss=0.1801, simple_loss=0.2412, pruned_loss=0.05954, over 4976.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2333, pruned_loss=0.04852, over 972297.02 frames.], batch size: 33, lr: 5.39e-04 +2022-05-04 15:23:19,476 INFO [train.py:715] (7/8) Epoch 3, batch 19350, loss[loss=0.223, simple_loss=0.2809, pruned_loss=0.08254, over 4884.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2328, pruned_loss=0.04846, over 972548.61 frames.], batch size: 22, lr: 5.39e-04 +2022-05-04 15:23:59,205 INFO [train.py:715] (7/8) Epoch 3, batch 19400, loss[loss=0.1808, simple_loss=0.2477, pruned_loss=0.057, over 4984.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2314, pruned_loss=0.04755, over 972404.49 frames.], batch size: 24, lr: 5.39e-04 +2022-05-04 15:24:39,295 INFO [train.py:715] (7/8) Epoch 3, batch 19450, loss[loss=0.1387, simple_loss=0.2057, pruned_loss=0.0358, over 4893.00 frames.], tot_loss[loss=0.164, simple_loss=0.2322, pruned_loss=0.04786, over 971746.75 frames.], batch size: 19, lr: 5.39e-04 +2022-05-04 15:25:18,371 INFO [train.py:715] (7/8) Epoch 3, batch 19500, loss[loss=0.2606, simple_loss=0.2781, pruned_loss=0.1215, over 4750.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2326, pruned_loss=0.04827, over 971314.92 frames.], batch size: 12, lr: 5.39e-04 +2022-05-04 15:25:58,128 INFO [train.py:715] (7/8) Epoch 3, batch 19550, loss[loss=0.1412, simple_loss=0.218, pruned_loss=0.0322, over 4778.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2304, pruned_loss=0.04701, over 970987.68 frames.], batch size: 17, lr: 5.39e-04 +2022-05-04 15:26:37,669 INFO [train.py:715] (7/8) Epoch 3, batch 19600, loss[loss=0.1708, simple_loss=0.2307, pruned_loss=0.0555, over 4879.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2308, pruned_loss=0.04737, over 971217.14 frames.], batch size: 16, lr: 5.39e-04 +2022-05-04 15:27:17,577 INFO [train.py:715] (7/8) Epoch 3, batch 19650, loss[loss=0.1698, simple_loss=0.2344, pruned_loss=0.05255, over 4899.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2312, pruned_loss=0.04758, over 971404.60 frames.], batch size: 19, lr: 5.38e-04 +2022-05-04 15:27:56,471 INFO [train.py:715] (7/8) Epoch 3, batch 19700, loss[loss=0.1504, simple_loss=0.2341, pruned_loss=0.03338, over 4754.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2313, pruned_loss=0.04778, over 970533.82 frames.], batch size: 19, lr: 5.38e-04 +2022-05-04 15:28:36,069 INFO [train.py:715] (7/8) Epoch 3, batch 19750, loss[loss=0.2026, simple_loss=0.2461, pruned_loss=0.07955, over 4878.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2313, pruned_loss=0.04763, over 971385.78 frames.], batch size: 22, lr: 5.38e-04 +2022-05-04 15:29:15,543 INFO [train.py:715] (7/8) Epoch 3, batch 19800, loss[loss=0.1972, simple_loss=0.2557, pruned_loss=0.0694, over 4960.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2313, pruned_loss=0.04754, over 971227.83 frames.], batch size: 15, lr: 5.38e-04 +2022-05-04 15:29:55,123 INFO [train.py:715] (7/8) Epoch 3, batch 19850, loss[loss=0.1715, simple_loss=0.2325, pruned_loss=0.05528, over 4801.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2324, pruned_loss=0.04837, over 971023.85 frames.], batch size: 12, lr: 5.38e-04 +2022-05-04 15:30:34,820 INFO [train.py:715] (7/8) Epoch 3, batch 19900, loss[loss=0.1534, simple_loss=0.2166, pruned_loss=0.04509, over 4946.00 frames.], tot_loss[loss=0.1634, simple_loss=0.231, pruned_loss=0.04791, over 970878.59 frames.], batch size: 39, lr: 5.38e-04 +2022-05-04 15:31:15,114 INFO [train.py:715] (7/8) Epoch 3, batch 19950, loss[loss=0.1461, simple_loss=0.2204, pruned_loss=0.03589, over 4817.00 frames.], tot_loss[loss=0.1619, simple_loss=0.23, pruned_loss=0.04683, over 971324.80 frames.], batch size: 13, lr: 5.38e-04 +2022-05-04 15:31:54,891 INFO [train.py:715] (7/8) Epoch 3, batch 20000, loss[loss=0.1618, simple_loss=0.2394, pruned_loss=0.04214, over 4868.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2287, pruned_loss=0.04585, over 971473.86 frames.], batch size: 32, lr: 5.38e-04 +2022-05-04 15:32:34,161 INFO [train.py:715] (7/8) Epoch 3, batch 20050, loss[loss=0.1699, simple_loss=0.2303, pruned_loss=0.0547, over 4984.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2284, pruned_loss=0.0456, over 971916.21 frames.], batch size: 35, lr: 5.38e-04 +2022-05-04 15:33:14,405 INFO [train.py:715] (7/8) Epoch 3, batch 20100, loss[loss=0.1453, simple_loss=0.2221, pruned_loss=0.0342, over 4861.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2281, pruned_loss=0.04557, over 972180.51 frames.], batch size: 20, lr: 5.37e-04 +2022-05-04 15:33:54,298 INFO [train.py:715] (7/8) Epoch 3, batch 20150, loss[loss=0.1731, simple_loss=0.2481, pruned_loss=0.04906, over 4831.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2289, pruned_loss=0.04592, over 972231.18 frames.], batch size: 26, lr: 5.37e-04 +2022-05-04 15:34:33,627 INFO [train.py:715] (7/8) Epoch 3, batch 20200, loss[loss=0.1276, simple_loss=0.197, pruned_loss=0.02908, over 4798.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2288, pruned_loss=0.04609, over 971988.16 frames.], batch size: 12, lr: 5.37e-04 +2022-05-04 15:35:13,295 INFO [train.py:715] (7/8) Epoch 3, batch 20250, loss[loss=0.15, simple_loss=0.2201, pruned_loss=0.03992, over 4875.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2302, pruned_loss=0.04714, over 972053.70 frames.], batch size: 20, lr: 5.37e-04 +2022-05-04 15:35:53,130 INFO [train.py:715] (7/8) Epoch 3, batch 20300, loss[loss=0.1508, simple_loss=0.2276, pruned_loss=0.03697, over 4751.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2305, pruned_loss=0.04727, over 972008.60 frames.], batch size: 19, lr: 5.37e-04 +2022-05-04 15:36:33,514 INFO [train.py:715] (7/8) Epoch 3, batch 20350, loss[loss=0.1523, simple_loss=0.212, pruned_loss=0.04633, over 4858.00 frames.], tot_loss[loss=0.163, simple_loss=0.231, pruned_loss=0.04751, over 971644.13 frames.], batch size: 32, lr: 5.37e-04 +2022-05-04 15:37:12,092 INFO [train.py:715] (7/8) Epoch 3, batch 20400, loss[loss=0.1853, simple_loss=0.2565, pruned_loss=0.05705, over 4789.00 frames.], tot_loss[loss=0.163, simple_loss=0.2306, pruned_loss=0.04771, over 972059.55 frames.], batch size: 18, lr: 5.37e-04 +2022-05-04 15:37:51,792 INFO [train.py:715] (7/8) Epoch 3, batch 20450, loss[loss=0.1491, simple_loss=0.2237, pruned_loss=0.03724, over 4778.00 frames.], tot_loss[loss=0.162, simple_loss=0.2299, pruned_loss=0.04707, over 972762.68 frames.], batch size: 14, lr: 5.37e-04 +2022-05-04 15:38:31,878 INFO [train.py:715] (7/8) Epoch 3, batch 20500, loss[loss=0.1736, simple_loss=0.2455, pruned_loss=0.05083, over 4792.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2306, pruned_loss=0.0471, over 972820.58 frames.], batch size: 24, lr: 5.37e-04 +2022-05-04 15:39:10,985 INFO [train.py:715] (7/8) Epoch 3, batch 20550, loss[loss=0.1063, simple_loss=0.1788, pruned_loss=0.01691, over 4958.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2312, pruned_loss=0.04705, over 973823.62 frames.], batch size: 21, lr: 5.36e-04 +2022-05-04 15:39:50,446 INFO [train.py:715] (7/8) Epoch 3, batch 20600, loss[loss=0.1801, simple_loss=0.2628, pruned_loss=0.0487, over 4807.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2314, pruned_loss=0.04665, over 972161.73 frames.], batch size: 25, lr: 5.36e-04 +2022-05-04 15:40:30,889 INFO [train.py:715] (7/8) Epoch 3, batch 20650, loss[loss=0.1571, simple_loss=0.2272, pruned_loss=0.04352, over 4916.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2307, pruned_loss=0.04639, over 972806.12 frames.], batch size: 17, lr: 5.36e-04 +2022-05-04 15:41:10,739 INFO [train.py:715] (7/8) Epoch 3, batch 20700, loss[loss=0.1563, simple_loss=0.2197, pruned_loss=0.04639, over 4836.00 frames.], tot_loss[loss=0.1613, simple_loss=0.23, pruned_loss=0.04633, over 972620.35 frames.], batch size: 15, lr: 5.36e-04 +2022-05-04 15:41:50,208 INFO [train.py:715] (7/8) Epoch 3, batch 20750, loss[loss=0.1484, simple_loss=0.2141, pruned_loss=0.04139, over 4886.00 frames.], tot_loss[loss=0.1609, simple_loss=0.23, pruned_loss=0.04584, over 973050.94 frames.], batch size: 16, lr: 5.36e-04 +2022-05-04 15:42:30,296 INFO [train.py:715] (7/8) Epoch 3, batch 20800, loss[loss=0.1488, simple_loss=0.213, pruned_loss=0.04227, over 4797.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2294, pruned_loss=0.04579, over 973089.97 frames.], batch size: 17, lr: 5.36e-04 +2022-05-04 15:43:11,040 INFO [train.py:715] (7/8) Epoch 3, batch 20850, loss[loss=0.1312, simple_loss=0.2076, pruned_loss=0.02738, over 4696.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2303, pruned_loss=0.04611, over 972944.81 frames.], batch size: 15, lr: 5.36e-04 +2022-05-04 15:43:50,808 INFO [train.py:715] (7/8) Epoch 3, batch 20900, loss[loss=0.1618, simple_loss=0.2383, pruned_loss=0.04266, over 4762.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2306, pruned_loss=0.04636, over 972951.72 frames.], batch size: 19, lr: 5.36e-04 +2022-05-04 15:44:31,211 INFO [train.py:715] (7/8) Epoch 3, batch 20950, loss[loss=0.1611, simple_loss=0.2319, pruned_loss=0.04511, over 4794.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2306, pruned_loss=0.04605, over 972385.99 frames.], batch size: 21, lr: 5.36e-04 +2022-05-04 15:45:11,746 INFO [train.py:715] (7/8) Epoch 3, batch 21000, loss[loss=0.1663, simple_loss=0.2415, pruned_loss=0.04552, over 4935.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2317, pruned_loss=0.04695, over 971518.15 frames.], batch size: 23, lr: 5.36e-04 +2022-05-04 15:45:11,747 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 15:45:24,193 INFO [train.py:742] (7/8) Epoch 3, validation: loss=0.1137, simple_loss=0.1999, pruned_loss=0.01377, over 914524.00 frames. +2022-05-04 15:46:04,609 INFO [train.py:715] (7/8) Epoch 3, batch 21050, loss[loss=0.1634, simple_loss=0.2384, pruned_loss=0.04418, over 4813.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2322, pruned_loss=0.04723, over 971441.43 frames.], batch size: 27, lr: 5.35e-04 +2022-05-04 15:46:45,385 INFO [train.py:715] (7/8) Epoch 3, batch 21100, loss[loss=0.1616, simple_loss=0.2349, pruned_loss=0.0441, over 4864.00 frames.], tot_loss[loss=0.162, simple_loss=0.2312, pruned_loss=0.04638, over 971763.36 frames.], batch size: 16, lr: 5.35e-04 +2022-05-04 15:47:25,774 INFO [train.py:715] (7/8) Epoch 3, batch 21150, loss[loss=0.176, simple_loss=0.257, pruned_loss=0.04752, over 4822.00 frames.], tot_loss[loss=0.1623, simple_loss=0.231, pruned_loss=0.04686, over 971303.99 frames.], batch size: 26, lr: 5.35e-04 +2022-05-04 15:48:08,611 INFO [train.py:715] (7/8) Epoch 3, batch 21200, loss[loss=0.1296, simple_loss=0.1978, pruned_loss=0.03074, over 4776.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2312, pruned_loss=0.04653, over 971018.86 frames.], batch size: 12, lr: 5.35e-04 +2022-05-04 15:48:49,624 INFO [train.py:715] (7/8) Epoch 3, batch 21250, loss[loss=0.1183, simple_loss=0.1969, pruned_loss=0.01982, over 4876.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2308, pruned_loss=0.04632, over 970404.70 frames.], batch size: 16, lr: 5.35e-04 +2022-05-04 15:49:28,346 INFO [train.py:715] (7/8) Epoch 3, batch 21300, loss[loss=0.1545, simple_loss=0.2317, pruned_loss=0.03859, over 4784.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2308, pruned_loss=0.04623, over 971024.26 frames.], batch size: 18, lr: 5.35e-04 +2022-05-04 15:50:10,547 INFO [train.py:715] (7/8) Epoch 3, batch 21350, loss[loss=0.1487, simple_loss=0.2143, pruned_loss=0.04155, over 4756.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2315, pruned_loss=0.04655, over 971596.57 frames.], batch size: 16, lr: 5.35e-04 +2022-05-04 15:50:51,359 INFO [train.py:715] (7/8) Epoch 3, batch 21400, loss[loss=0.1505, simple_loss=0.2233, pruned_loss=0.03889, over 4693.00 frames.], tot_loss[loss=0.162, simple_loss=0.231, pruned_loss=0.04646, over 970756.95 frames.], batch size: 15, lr: 5.35e-04 +2022-05-04 15:51:30,341 INFO [train.py:715] (7/8) Epoch 3, batch 21450, loss[loss=0.22, simple_loss=0.2824, pruned_loss=0.07878, over 4976.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2305, pruned_loss=0.04621, over 971715.80 frames.], batch size: 15, lr: 5.35e-04 +2022-05-04 15:52:08,622 INFO [train.py:715] (7/8) Epoch 3, batch 21500, loss[loss=0.1614, simple_loss=0.2284, pruned_loss=0.04718, over 4942.00 frames.], tot_loss[loss=0.161, simple_loss=0.2303, pruned_loss=0.04582, over 972178.13 frames.], batch size: 29, lr: 5.34e-04 +2022-05-04 15:52:47,667 INFO [train.py:715] (7/8) Epoch 3, batch 21550, loss[loss=0.1774, simple_loss=0.2441, pruned_loss=0.05531, over 4910.00 frames.], tot_loss[loss=0.162, simple_loss=0.2307, pruned_loss=0.04664, over 972408.88 frames.], batch size: 39, lr: 5.34e-04 +2022-05-04 15:53:27,194 INFO [train.py:715] (7/8) Epoch 3, batch 21600, loss[loss=0.1411, simple_loss=0.219, pruned_loss=0.0316, over 4924.00 frames.], tot_loss[loss=0.1627, simple_loss=0.231, pruned_loss=0.04723, over 972180.64 frames.], batch size: 29, lr: 5.34e-04 +2022-05-04 15:54:06,110 INFO [train.py:715] (7/8) Epoch 3, batch 21650, loss[loss=0.1491, simple_loss=0.22, pruned_loss=0.0391, over 4758.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2312, pruned_loss=0.04694, over 972537.20 frames.], batch size: 19, lr: 5.34e-04 +2022-05-04 15:54:46,391 INFO [train.py:715] (7/8) Epoch 3, batch 21700, loss[loss=0.145, simple_loss=0.2167, pruned_loss=0.03663, over 4931.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2304, pruned_loss=0.04641, over 972841.87 frames.], batch size: 18, lr: 5.34e-04 +2022-05-04 15:55:26,904 INFO [train.py:715] (7/8) Epoch 3, batch 21750, loss[loss=0.1701, simple_loss=0.2472, pruned_loss=0.04653, over 4867.00 frames.], tot_loss[loss=0.162, simple_loss=0.2305, pruned_loss=0.04675, over 973468.66 frames.], batch size: 16, lr: 5.34e-04 +2022-05-04 15:56:06,025 INFO [train.py:715] (7/8) Epoch 3, batch 21800, loss[loss=0.1336, simple_loss=0.2147, pruned_loss=0.02623, over 4857.00 frames.], tot_loss[loss=0.1613, simple_loss=0.23, pruned_loss=0.04632, over 973230.57 frames.], batch size: 20, lr: 5.34e-04 +2022-05-04 15:56:44,181 INFO [train.py:715] (7/8) Epoch 3, batch 21850, loss[loss=0.1547, simple_loss=0.2206, pruned_loss=0.04435, over 4957.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2293, pruned_loss=0.04611, over 973538.35 frames.], batch size: 14, lr: 5.34e-04 +2022-05-04 15:57:22,929 INFO [train.py:715] (7/8) Epoch 3, batch 21900, loss[loss=0.1548, simple_loss=0.2308, pruned_loss=0.03937, over 4841.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2297, pruned_loss=0.04639, over 972929.32 frames.], batch size: 30, lr: 5.34e-04 +2022-05-04 15:58:03,620 INFO [train.py:715] (7/8) Epoch 3, batch 21950, loss[loss=0.1622, simple_loss=0.2344, pruned_loss=0.04496, over 4806.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2298, pruned_loss=0.04664, over 973164.94 frames.], batch size: 25, lr: 5.34e-04 +2022-05-04 15:58:43,249 INFO [train.py:715] (7/8) Epoch 3, batch 22000, loss[loss=0.1687, simple_loss=0.237, pruned_loss=0.05023, over 4938.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2301, pruned_loss=0.04636, over 973490.42 frames.], batch size: 29, lr: 5.33e-04 +2022-05-04 15:59:23,571 INFO [train.py:715] (7/8) Epoch 3, batch 22050, loss[loss=0.1569, simple_loss=0.2292, pruned_loss=0.04227, over 4947.00 frames.], tot_loss[loss=0.161, simple_loss=0.2298, pruned_loss=0.04615, over 973231.78 frames.], batch size: 21, lr: 5.33e-04 +2022-05-04 16:00:04,302 INFO [train.py:715] (7/8) Epoch 3, batch 22100, loss[loss=0.1756, simple_loss=0.2401, pruned_loss=0.05561, over 4697.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2306, pruned_loss=0.04649, over 972346.04 frames.], batch size: 15, lr: 5.33e-04 +2022-05-04 16:00:44,823 INFO [train.py:715] (7/8) Epoch 3, batch 22150, loss[loss=0.1652, simple_loss=0.2335, pruned_loss=0.04851, over 4776.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2308, pruned_loss=0.0471, over 972440.01 frames.], batch size: 14, lr: 5.33e-04 +2022-05-04 16:01:24,043 INFO [train.py:715] (7/8) Epoch 3, batch 22200, loss[loss=0.1509, simple_loss=0.2308, pruned_loss=0.03552, over 4959.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2299, pruned_loss=0.0465, over 972802.76 frames.], batch size: 24, lr: 5.33e-04 +2022-05-04 16:02:04,291 INFO [train.py:715] (7/8) Epoch 3, batch 22250, loss[loss=0.1484, simple_loss=0.2136, pruned_loss=0.04155, over 4746.00 frames.], tot_loss[loss=0.161, simple_loss=0.2299, pruned_loss=0.04602, over 972760.72 frames.], batch size: 12, lr: 5.33e-04 +2022-05-04 16:02:45,551 INFO [train.py:715] (7/8) Epoch 3, batch 22300, loss[loss=0.1448, simple_loss=0.2204, pruned_loss=0.03455, over 4944.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2304, pruned_loss=0.04627, over 972466.82 frames.], batch size: 35, lr: 5.33e-04 +2022-05-04 16:03:24,534 INFO [train.py:715] (7/8) Epoch 3, batch 22350, loss[loss=0.1726, simple_loss=0.2298, pruned_loss=0.05769, over 4756.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2305, pruned_loss=0.04635, over 971944.47 frames.], batch size: 19, lr: 5.33e-04 +2022-05-04 16:04:04,611 INFO [train.py:715] (7/8) Epoch 3, batch 22400, loss[loss=0.1402, simple_loss=0.2097, pruned_loss=0.03531, over 4780.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2303, pruned_loss=0.04625, over 971283.23 frames.], batch size: 12, lr: 5.33e-04 +2022-05-04 16:04:45,523 INFO [train.py:715] (7/8) Epoch 3, batch 22450, loss[loss=0.1732, simple_loss=0.231, pruned_loss=0.05772, over 4863.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2308, pruned_loss=0.04692, over 972025.48 frames.], batch size: 16, lr: 5.32e-04 +2022-05-04 16:05:25,969 INFO [train.py:715] (7/8) Epoch 3, batch 22500, loss[loss=0.1946, simple_loss=0.2615, pruned_loss=0.06379, over 4886.00 frames.], tot_loss[loss=0.162, simple_loss=0.2305, pruned_loss=0.04675, over 972265.17 frames.], batch size: 22, lr: 5.32e-04 +2022-05-04 16:06:05,380 INFO [train.py:715] (7/8) Epoch 3, batch 22550, loss[loss=0.1807, simple_loss=0.2523, pruned_loss=0.05457, over 4848.00 frames.], tot_loss[loss=0.1622, simple_loss=0.231, pruned_loss=0.04673, over 972205.51 frames.], batch size: 20, lr: 5.32e-04 +2022-05-04 16:06:45,623 INFO [train.py:715] (7/8) Epoch 3, batch 22600, loss[loss=0.1667, simple_loss=0.2299, pruned_loss=0.05178, over 4919.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2314, pruned_loss=0.04659, over 972355.83 frames.], batch size: 18, lr: 5.32e-04 +2022-05-04 16:07:26,480 INFO [train.py:715] (7/8) Epoch 3, batch 22650, loss[loss=0.2292, simple_loss=0.283, pruned_loss=0.08771, over 4914.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2317, pruned_loss=0.04731, over 972753.61 frames.], batch size: 38, lr: 5.32e-04 +2022-05-04 16:08:06,299 INFO [train.py:715] (7/8) Epoch 3, batch 22700, loss[loss=0.1873, simple_loss=0.2512, pruned_loss=0.06171, over 4914.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2316, pruned_loss=0.04764, over 972463.71 frames.], batch size: 17, lr: 5.32e-04 +2022-05-04 16:08:46,702 INFO [train.py:715] (7/8) Epoch 3, batch 22750, loss[loss=0.1469, simple_loss=0.2183, pruned_loss=0.03778, over 4862.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2321, pruned_loss=0.04785, over 971435.59 frames.], batch size: 32, lr: 5.32e-04 +2022-05-04 16:09:27,107 INFO [train.py:715] (7/8) Epoch 3, batch 22800, loss[loss=0.1492, simple_loss=0.2174, pruned_loss=0.04048, over 4905.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2321, pruned_loss=0.04764, over 971945.71 frames.], batch size: 19, lr: 5.32e-04 +2022-05-04 16:10:07,179 INFO [train.py:715] (7/8) Epoch 3, batch 22850, loss[loss=0.2149, simple_loss=0.2691, pruned_loss=0.08041, over 4841.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2308, pruned_loss=0.04723, over 972187.09 frames.], batch size: 15, lr: 5.32e-04 +2022-05-04 16:10:46,896 INFO [train.py:715] (7/8) Epoch 3, batch 22900, loss[loss=0.169, simple_loss=0.2437, pruned_loss=0.0471, over 4894.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2313, pruned_loss=0.04702, over 972488.03 frames.], batch size: 19, lr: 5.32e-04 +2022-05-04 16:11:27,326 INFO [train.py:715] (7/8) Epoch 3, batch 22950, loss[loss=0.2049, simple_loss=0.2547, pruned_loss=0.07754, over 4834.00 frames.], tot_loss[loss=0.1635, simple_loss=0.232, pruned_loss=0.04747, over 972700.36 frames.], batch size: 30, lr: 5.31e-04 +2022-05-04 16:12:08,438 INFO [train.py:715] (7/8) Epoch 3, batch 23000, loss[loss=0.1811, simple_loss=0.2354, pruned_loss=0.06345, over 4685.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2312, pruned_loss=0.04711, over 972022.83 frames.], batch size: 15, lr: 5.31e-04 +2022-05-04 16:12:48,296 INFO [train.py:715] (7/8) Epoch 3, batch 23050, loss[loss=0.1291, simple_loss=0.1935, pruned_loss=0.0324, over 4914.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2313, pruned_loss=0.04701, over 972097.38 frames.], batch size: 17, lr: 5.31e-04 +2022-05-04 16:13:28,631 INFO [train.py:715] (7/8) Epoch 3, batch 23100, loss[loss=0.1472, simple_loss=0.2285, pruned_loss=0.03292, over 4810.00 frames.], tot_loss[loss=0.163, simple_loss=0.2317, pruned_loss=0.04718, over 971827.98 frames.], batch size: 25, lr: 5.31e-04 +2022-05-04 16:14:09,396 INFO [train.py:715] (7/8) Epoch 3, batch 23150, loss[loss=0.1642, simple_loss=0.2181, pruned_loss=0.05518, over 4860.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2306, pruned_loss=0.04682, over 971925.80 frames.], batch size: 32, lr: 5.31e-04 +2022-05-04 16:14:49,965 INFO [train.py:715] (7/8) Epoch 3, batch 23200, loss[loss=0.1693, simple_loss=0.2286, pruned_loss=0.05502, over 4957.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2301, pruned_loss=0.04658, over 972292.76 frames.], batch size: 35, lr: 5.31e-04 +2022-05-04 16:15:29,489 INFO [train.py:715] (7/8) Epoch 3, batch 23250, loss[loss=0.1826, simple_loss=0.2382, pruned_loss=0.06351, over 4982.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2302, pruned_loss=0.04663, over 972730.60 frames.], batch size: 25, lr: 5.31e-04 +2022-05-04 16:16:10,261 INFO [train.py:715] (7/8) Epoch 3, batch 23300, loss[loss=0.163, simple_loss=0.2347, pruned_loss=0.04565, over 4764.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2301, pruned_loss=0.04703, over 972712.30 frames.], batch size: 18, lr: 5.31e-04 +2022-05-04 16:16:49,870 INFO [train.py:715] (7/8) Epoch 3, batch 23350, loss[loss=0.16, simple_loss=0.2266, pruned_loss=0.04667, over 4816.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2305, pruned_loss=0.0471, over 972861.74 frames.], batch size: 25, lr: 5.31e-04 +2022-05-04 16:17:27,676 INFO [train.py:715] (7/8) Epoch 3, batch 23400, loss[loss=0.1693, simple_loss=0.2433, pruned_loss=0.04764, over 4990.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2309, pruned_loss=0.04718, over 973699.34 frames.], batch size: 15, lr: 5.30e-04 +2022-05-04 16:18:06,223 INFO [train.py:715] (7/8) Epoch 3, batch 23450, loss[loss=0.1949, simple_loss=0.2499, pruned_loss=0.06995, over 4893.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2301, pruned_loss=0.04676, over 973256.44 frames.], batch size: 17, lr: 5.30e-04 +2022-05-04 16:18:44,911 INFO [train.py:715] (7/8) Epoch 3, batch 23500, loss[loss=0.1424, simple_loss=0.2058, pruned_loss=0.03943, over 4781.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2306, pruned_loss=0.04695, over 973247.70 frames.], batch size: 14, lr: 5.30e-04 +2022-05-04 16:19:24,107 INFO [train.py:715] (7/8) Epoch 3, batch 23550, loss[loss=0.1515, simple_loss=0.2185, pruned_loss=0.04228, over 4772.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2299, pruned_loss=0.04614, over 973067.47 frames.], batch size: 18, lr: 5.30e-04 +2022-05-04 16:20:05,337 INFO [train.py:715] (7/8) Epoch 3, batch 23600, loss[loss=0.1678, simple_loss=0.2421, pruned_loss=0.04675, over 4982.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2309, pruned_loss=0.04662, over 972735.42 frames.], batch size: 25, lr: 5.30e-04 +2022-05-04 16:20:44,864 INFO [train.py:715] (7/8) Epoch 3, batch 23650, loss[loss=0.155, simple_loss=0.2141, pruned_loss=0.04797, over 4844.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2311, pruned_loss=0.04689, over 973429.78 frames.], batch size: 30, lr: 5.30e-04 +2022-05-04 16:21:24,821 INFO [train.py:715] (7/8) Epoch 3, batch 23700, loss[loss=0.136, simple_loss=0.2128, pruned_loss=0.02966, over 4830.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2313, pruned_loss=0.04673, over 973212.42 frames.], batch size: 26, lr: 5.30e-04 +2022-05-04 16:22:03,571 INFO [train.py:715] (7/8) Epoch 3, batch 23750, loss[loss=0.1678, simple_loss=0.2377, pruned_loss=0.04898, over 4948.00 frames.], tot_loss[loss=0.162, simple_loss=0.2307, pruned_loss=0.04665, over 972701.02 frames.], batch size: 39, lr: 5.30e-04 +2022-05-04 16:22:43,183 INFO [train.py:715] (7/8) Epoch 3, batch 23800, loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03405, over 4926.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2306, pruned_loss=0.04678, over 972107.31 frames.], batch size: 29, lr: 5.30e-04 +2022-05-04 16:23:22,780 INFO [train.py:715] (7/8) Epoch 3, batch 23850, loss[loss=0.1636, simple_loss=0.2294, pruned_loss=0.04893, over 4976.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2309, pruned_loss=0.04677, over 972424.81 frames.], batch size: 35, lr: 5.30e-04 +2022-05-04 16:24:02,499 INFO [train.py:715] (7/8) Epoch 3, batch 23900, loss[loss=0.1663, simple_loss=0.2518, pruned_loss=0.04037, over 4982.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2312, pruned_loss=0.04687, over 972817.98 frames.], batch size: 24, lr: 5.29e-04 +2022-05-04 16:24:41,553 INFO [train.py:715] (7/8) Epoch 3, batch 23950, loss[loss=0.1473, simple_loss=0.215, pruned_loss=0.03978, over 4964.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2305, pruned_loss=0.04658, over 972707.36 frames.], batch size: 35, lr: 5.29e-04 +2022-05-04 16:25:20,401 INFO [train.py:715] (7/8) Epoch 3, batch 24000, loss[loss=0.1438, simple_loss=0.2056, pruned_loss=0.04099, over 4778.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2307, pruned_loss=0.04698, over 972255.53 frames.], batch size: 14, lr: 5.29e-04 +2022-05-04 16:25:20,401 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 16:25:32,862 INFO [train.py:742] (7/8) Epoch 3, validation: loss=0.1132, simple_loss=0.1992, pruned_loss=0.0136, over 914524.00 frames. +2022-05-04 16:26:12,215 INFO [train.py:715] (7/8) Epoch 3, batch 24050, loss[loss=0.1539, simple_loss=0.2194, pruned_loss=0.04416, over 4816.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2303, pruned_loss=0.04672, over 971533.66 frames.], batch size: 25, lr: 5.29e-04 +2022-05-04 16:26:52,064 INFO [train.py:715] (7/8) Epoch 3, batch 24100, loss[loss=0.1521, simple_loss=0.2121, pruned_loss=0.04609, over 4805.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2308, pruned_loss=0.047, over 972174.98 frames.], batch size: 12, lr: 5.29e-04 +2022-05-04 16:27:30,863 INFO [train.py:715] (7/8) Epoch 3, batch 24150, loss[loss=0.1396, simple_loss=0.2131, pruned_loss=0.0331, over 4879.00 frames.], tot_loss[loss=0.1622, simple_loss=0.231, pruned_loss=0.04676, over 973357.18 frames.], batch size: 22, lr: 5.29e-04 +2022-05-04 16:28:10,109 INFO [train.py:715] (7/8) Epoch 3, batch 24200, loss[loss=0.1401, simple_loss=0.2183, pruned_loss=0.03091, over 4831.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2309, pruned_loss=0.04672, over 973170.89 frames.], batch size: 26, lr: 5.29e-04 +2022-05-04 16:28:50,507 INFO [train.py:715] (7/8) Epoch 3, batch 24250, loss[loss=0.2103, simple_loss=0.256, pruned_loss=0.08233, over 4833.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2305, pruned_loss=0.04659, over 973680.39 frames.], batch size: 25, lr: 5.29e-04 +2022-05-04 16:29:30,771 INFO [train.py:715] (7/8) Epoch 3, batch 24300, loss[loss=0.1552, simple_loss=0.2306, pruned_loss=0.03995, over 4911.00 frames.], tot_loss[loss=0.1623, simple_loss=0.231, pruned_loss=0.04677, over 973428.27 frames.], batch size: 18, lr: 5.29e-04 +2022-05-04 16:30:10,087 INFO [train.py:715] (7/8) Epoch 3, batch 24350, loss[loss=0.1363, simple_loss=0.2, pruned_loss=0.0363, over 4747.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2304, pruned_loss=0.0466, over 973319.80 frames.], batch size: 16, lr: 5.29e-04 +2022-05-04 16:30:49,732 INFO [train.py:715] (7/8) Epoch 3, batch 24400, loss[loss=0.108, simple_loss=0.1742, pruned_loss=0.02086, over 4818.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2304, pruned_loss=0.04701, over 973534.92 frames.], batch size: 12, lr: 5.28e-04 +2022-05-04 16:31:29,802 INFO [train.py:715] (7/8) Epoch 3, batch 24450, loss[loss=0.1493, simple_loss=0.223, pruned_loss=0.03777, over 4980.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2304, pruned_loss=0.04707, over 973923.88 frames.], batch size: 28, lr: 5.28e-04 +2022-05-04 16:32:09,119 INFO [train.py:715] (7/8) Epoch 3, batch 24500, loss[loss=0.1277, simple_loss=0.1941, pruned_loss=0.03066, over 4825.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2302, pruned_loss=0.04702, over 972518.50 frames.], batch size: 26, lr: 5.28e-04 +2022-05-04 16:32:48,518 INFO [train.py:715] (7/8) Epoch 3, batch 24550, loss[loss=0.2139, simple_loss=0.2677, pruned_loss=0.08003, over 4917.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2315, pruned_loss=0.0476, over 973135.51 frames.], batch size: 18, lr: 5.28e-04 +2022-05-04 16:33:28,756 INFO [train.py:715] (7/8) Epoch 3, batch 24600, loss[loss=0.1431, simple_loss=0.2255, pruned_loss=0.03036, over 4739.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2313, pruned_loss=0.04691, over 971990.73 frames.], batch size: 16, lr: 5.28e-04 +2022-05-04 16:34:08,292 INFO [train.py:715] (7/8) Epoch 3, batch 24650, loss[loss=0.1553, simple_loss=0.2187, pruned_loss=0.04597, over 4941.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2303, pruned_loss=0.04696, over 972041.53 frames.], batch size: 35, lr: 5.28e-04 +2022-05-04 16:34:47,793 INFO [train.py:715] (7/8) Epoch 3, batch 24700, loss[loss=0.145, simple_loss=0.2157, pruned_loss=0.03721, over 4778.00 frames.], tot_loss[loss=0.1627, simple_loss=0.231, pruned_loss=0.04723, over 971763.48 frames.], batch size: 18, lr: 5.28e-04 +2022-05-04 16:35:26,414 INFO [train.py:715] (7/8) Epoch 3, batch 24750, loss[loss=0.1513, simple_loss=0.2151, pruned_loss=0.04372, over 4799.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2316, pruned_loss=0.04746, over 971713.92 frames.], batch size: 13, lr: 5.28e-04 +2022-05-04 16:36:07,060 INFO [train.py:715] (7/8) Epoch 3, batch 24800, loss[loss=0.1565, simple_loss=0.2368, pruned_loss=0.03811, over 4822.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2315, pruned_loss=0.04772, over 972526.11 frames.], batch size: 26, lr: 5.28e-04 +2022-05-04 16:36:46,787 INFO [train.py:715] (7/8) Epoch 3, batch 24850, loss[loss=0.144, simple_loss=0.2294, pruned_loss=0.02934, over 4814.00 frames.], tot_loss[loss=0.1629, simple_loss=0.231, pruned_loss=0.04742, over 972152.86 frames.], batch size: 21, lr: 5.28e-04 +2022-05-04 16:37:25,563 INFO [train.py:715] (7/8) Epoch 3, batch 24900, loss[loss=0.1581, simple_loss=0.2291, pruned_loss=0.04356, over 4887.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2299, pruned_loss=0.0468, over 972180.31 frames.], batch size: 32, lr: 5.27e-04 +2022-05-04 16:38:05,481 INFO [train.py:715] (7/8) Epoch 3, batch 24950, loss[loss=0.1551, simple_loss=0.2179, pruned_loss=0.04619, over 4920.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2312, pruned_loss=0.04707, over 972161.20 frames.], batch size: 23, lr: 5.27e-04 +2022-05-04 16:38:45,656 INFO [train.py:715] (7/8) Epoch 3, batch 25000, loss[loss=0.1803, simple_loss=0.2372, pruned_loss=0.06171, over 4913.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2306, pruned_loss=0.04693, over 971937.44 frames.], batch size: 17, lr: 5.27e-04 +2022-05-04 16:39:25,200 INFO [train.py:715] (7/8) Epoch 3, batch 25050, loss[loss=0.1434, simple_loss=0.2118, pruned_loss=0.03752, over 4960.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2301, pruned_loss=0.04674, over 971371.01 frames.], batch size: 35, lr: 5.27e-04 +2022-05-04 16:40:04,371 INFO [train.py:715] (7/8) Epoch 3, batch 25100, loss[loss=0.1724, simple_loss=0.2355, pruned_loss=0.05463, over 4966.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2296, pruned_loss=0.04673, over 970884.02 frames.], batch size: 39, lr: 5.27e-04 +2022-05-04 16:40:44,399 INFO [train.py:715] (7/8) Epoch 3, batch 25150, loss[loss=0.1568, simple_loss=0.2217, pruned_loss=0.04591, over 4913.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2307, pruned_loss=0.04708, over 971177.89 frames.], batch size: 17, lr: 5.27e-04 +2022-05-04 16:41:23,895 INFO [train.py:715] (7/8) Epoch 3, batch 25200, loss[loss=0.1604, simple_loss=0.2301, pruned_loss=0.04539, over 4911.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2309, pruned_loss=0.04703, over 971428.01 frames.], batch size: 17, lr: 5.27e-04 +2022-05-04 16:42:03,028 INFO [train.py:715] (7/8) Epoch 3, batch 25250, loss[loss=0.1532, simple_loss=0.2197, pruned_loss=0.04334, over 4755.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2304, pruned_loss=0.04696, over 971006.58 frames.], batch size: 14, lr: 5.27e-04 +2022-05-04 16:42:43,127 INFO [train.py:715] (7/8) Epoch 3, batch 25300, loss[loss=0.1938, simple_loss=0.2584, pruned_loss=0.06464, over 4876.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2304, pruned_loss=0.04647, over 971389.87 frames.], batch size: 39, lr: 5.27e-04 +2022-05-04 16:43:22,962 INFO [train.py:715] (7/8) Epoch 3, batch 25350, loss[loss=0.1462, simple_loss=0.2147, pruned_loss=0.03882, over 4817.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2298, pruned_loss=0.04657, over 970587.74 frames.], batch size: 27, lr: 5.26e-04 +2022-05-04 16:44:02,967 INFO [train.py:715] (7/8) Epoch 3, batch 25400, loss[loss=0.1457, simple_loss=0.2108, pruned_loss=0.04036, over 4760.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2299, pruned_loss=0.04634, over 971036.30 frames.], batch size: 19, lr: 5.26e-04 +2022-05-04 16:44:42,166 INFO [train.py:715] (7/8) Epoch 3, batch 25450, loss[loss=0.1443, simple_loss=0.2128, pruned_loss=0.03794, over 4939.00 frames.], tot_loss[loss=0.161, simple_loss=0.2295, pruned_loss=0.04622, over 971614.34 frames.], batch size: 21, lr: 5.26e-04 +2022-05-04 16:45:22,340 INFO [train.py:715] (7/8) Epoch 3, batch 25500, loss[loss=0.1515, simple_loss=0.2288, pruned_loss=0.03705, over 4918.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2304, pruned_loss=0.0466, over 971756.66 frames.], batch size: 29, lr: 5.26e-04 +2022-05-04 16:46:02,170 INFO [train.py:715] (7/8) Epoch 3, batch 25550, loss[loss=0.1423, simple_loss=0.2103, pruned_loss=0.03712, over 4947.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2304, pruned_loss=0.04657, over 971709.97 frames.], batch size: 24, lr: 5.26e-04 +2022-05-04 16:46:41,627 INFO [train.py:715] (7/8) Epoch 3, batch 25600, loss[loss=0.1677, simple_loss=0.2334, pruned_loss=0.05107, over 4832.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2299, pruned_loss=0.04655, over 971833.92 frames.], batch size: 15, lr: 5.26e-04 +2022-05-04 16:47:22,013 INFO [train.py:715] (7/8) Epoch 3, batch 25650, loss[loss=0.1908, simple_loss=0.2579, pruned_loss=0.06182, over 4884.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2307, pruned_loss=0.04691, over 971782.15 frames.], batch size: 22, lr: 5.26e-04 +2022-05-04 16:48:02,208 INFO [train.py:715] (7/8) Epoch 3, batch 25700, loss[loss=0.1575, simple_loss=0.2335, pruned_loss=0.04072, over 4800.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2307, pruned_loss=0.04661, over 971590.22 frames.], batch size: 14, lr: 5.26e-04 +2022-05-04 16:48:41,537 INFO [train.py:715] (7/8) Epoch 3, batch 25750, loss[loss=0.2026, simple_loss=0.2662, pruned_loss=0.06956, over 4877.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2297, pruned_loss=0.04604, over 971736.74 frames.], batch size: 22, lr: 5.26e-04 +2022-05-04 16:49:21,103 INFO [train.py:715] (7/8) Epoch 3, batch 25800, loss[loss=0.1516, simple_loss=0.2248, pruned_loss=0.03918, over 4906.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2296, pruned_loss=0.04563, over 973175.43 frames.], batch size: 39, lr: 5.26e-04 +2022-05-04 16:50:01,087 INFO [train.py:715] (7/8) Epoch 3, batch 25850, loss[loss=0.1662, simple_loss=0.221, pruned_loss=0.05566, over 4920.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2288, pruned_loss=0.04515, over 972161.47 frames.], batch size: 18, lr: 5.25e-04 +2022-05-04 16:50:39,400 INFO [train.py:715] (7/8) Epoch 3, batch 25900, loss[loss=0.1732, simple_loss=0.2375, pruned_loss=0.05444, over 4909.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2289, pruned_loss=0.04573, over 971973.73 frames.], batch size: 18, lr: 5.25e-04 +2022-05-04 16:51:18,330 INFO [train.py:715] (7/8) Epoch 3, batch 25950, loss[loss=0.1773, simple_loss=0.2408, pruned_loss=0.05693, over 4975.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2285, pruned_loss=0.04521, over 971881.72 frames.], batch size: 14, lr: 5.25e-04 +2022-05-04 16:51:58,435 INFO [train.py:715] (7/8) Epoch 3, batch 26000, loss[loss=0.1507, simple_loss=0.2166, pruned_loss=0.04236, over 4753.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2291, pruned_loss=0.04583, over 971697.93 frames.], batch size: 16, lr: 5.25e-04 +2022-05-04 16:52:37,680 INFO [train.py:715] (7/8) Epoch 3, batch 26050, loss[loss=0.1624, simple_loss=0.2294, pruned_loss=0.04771, over 4990.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2288, pruned_loss=0.04554, over 971302.78 frames.], batch size: 28, lr: 5.25e-04 +2022-05-04 16:53:16,016 INFO [train.py:715] (7/8) Epoch 3, batch 26100, loss[loss=0.153, simple_loss=0.2209, pruned_loss=0.04252, over 4751.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2294, pruned_loss=0.04573, over 971177.10 frames.], batch size: 16, lr: 5.25e-04 +2022-05-04 16:53:55,506 INFO [train.py:715] (7/8) Epoch 3, batch 26150, loss[loss=0.16, simple_loss=0.2322, pruned_loss=0.04392, over 4883.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2301, pruned_loss=0.04617, over 971529.92 frames.], batch size: 22, lr: 5.25e-04 +2022-05-04 16:54:35,542 INFO [train.py:715] (7/8) Epoch 3, batch 26200, loss[loss=0.2075, simple_loss=0.2656, pruned_loss=0.0747, over 4978.00 frames.], tot_loss[loss=0.1619, simple_loss=0.231, pruned_loss=0.04641, over 971125.33 frames.], batch size: 14, lr: 5.25e-04 +2022-05-04 16:55:13,650 INFO [train.py:715] (7/8) Epoch 3, batch 26250, loss[loss=0.1386, simple_loss=0.2032, pruned_loss=0.03703, over 4827.00 frames.], tot_loss[loss=0.1622, simple_loss=0.231, pruned_loss=0.04669, over 971186.72 frames.], batch size: 13, lr: 5.25e-04 +2022-05-04 16:55:52,858 INFO [train.py:715] (7/8) Epoch 3, batch 26300, loss[loss=0.162, simple_loss=0.2283, pruned_loss=0.0478, over 4815.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2306, pruned_loss=0.04642, over 971512.94 frames.], batch size: 25, lr: 5.25e-04 +2022-05-04 16:56:32,822 INFO [train.py:715] (7/8) Epoch 3, batch 26350, loss[loss=0.1195, simple_loss=0.1891, pruned_loss=0.02494, over 4789.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2306, pruned_loss=0.04629, over 971522.32 frames.], batch size: 12, lr: 5.24e-04 +2022-05-04 16:57:12,186 INFO [train.py:715] (7/8) Epoch 3, batch 26400, loss[loss=0.1434, simple_loss=0.2221, pruned_loss=0.03239, over 4808.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2301, pruned_loss=0.04641, over 971217.59 frames.], batch size: 25, lr: 5.24e-04 +2022-05-04 16:57:51,177 INFO [train.py:715] (7/8) Epoch 3, batch 26450, loss[loss=0.1831, simple_loss=0.2535, pruned_loss=0.05639, over 4968.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2299, pruned_loss=0.0462, over 972011.72 frames.], batch size: 21, lr: 5.24e-04 +2022-05-04 16:58:30,425 INFO [train.py:715] (7/8) Epoch 3, batch 26500, loss[loss=0.1672, simple_loss=0.2252, pruned_loss=0.0546, over 4857.00 frames.], tot_loss[loss=0.162, simple_loss=0.2307, pruned_loss=0.04666, over 971513.50 frames.], batch size: 20, lr: 5.24e-04 +2022-05-04 16:59:09,913 INFO [train.py:715] (7/8) Epoch 3, batch 26550, loss[loss=0.1723, simple_loss=0.2326, pruned_loss=0.056, over 4881.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2301, pruned_loss=0.04652, over 971095.54 frames.], batch size: 16, lr: 5.24e-04 +2022-05-04 16:59:48,116 INFO [train.py:715] (7/8) Epoch 3, batch 26600, loss[loss=0.1504, simple_loss=0.2247, pruned_loss=0.03809, over 4832.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2313, pruned_loss=0.04705, over 972334.97 frames.], batch size: 25, lr: 5.24e-04 +2022-05-04 17:00:27,334 INFO [train.py:715] (7/8) Epoch 3, batch 26650, loss[loss=0.1656, simple_loss=0.239, pruned_loss=0.04615, over 4865.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2311, pruned_loss=0.04733, over 971683.83 frames.], batch size: 20, lr: 5.24e-04 +2022-05-04 17:01:07,876 INFO [train.py:715] (7/8) Epoch 3, batch 26700, loss[loss=0.2234, simple_loss=0.2817, pruned_loss=0.08259, over 4751.00 frames.], tot_loss[loss=0.163, simple_loss=0.2316, pruned_loss=0.04715, over 972000.72 frames.], batch size: 16, lr: 5.24e-04 +2022-05-04 17:01:47,355 INFO [train.py:715] (7/8) Epoch 3, batch 26750, loss[loss=0.1501, simple_loss=0.2167, pruned_loss=0.04179, over 4886.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2323, pruned_loss=0.04718, over 972515.54 frames.], batch size: 19, lr: 5.24e-04 +2022-05-04 17:02:26,602 INFO [train.py:715] (7/8) Epoch 3, batch 26800, loss[loss=0.1847, simple_loss=0.2442, pruned_loss=0.06262, over 4935.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2333, pruned_loss=0.04799, over 972633.74 frames.], batch size: 21, lr: 5.24e-04 +2022-05-04 17:03:06,724 INFO [train.py:715] (7/8) Epoch 3, batch 26850, loss[loss=0.1689, simple_loss=0.2351, pruned_loss=0.05136, over 4912.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2319, pruned_loss=0.04727, over 972866.32 frames.], batch size: 17, lr: 5.23e-04 +2022-05-04 17:03:47,104 INFO [train.py:715] (7/8) Epoch 3, batch 26900, loss[loss=0.1876, simple_loss=0.2571, pruned_loss=0.05904, over 4976.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2316, pruned_loss=0.04712, over 972137.76 frames.], batch size: 35, lr: 5.23e-04 +2022-05-04 17:04:26,665 INFO [train.py:715] (7/8) Epoch 3, batch 26950, loss[loss=0.1667, simple_loss=0.236, pruned_loss=0.04873, over 4842.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2308, pruned_loss=0.04643, over 972414.18 frames.], batch size: 15, lr: 5.23e-04 +2022-05-04 17:05:05,431 INFO [train.py:715] (7/8) Epoch 3, batch 27000, loss[loss=0.1691, simple_loss=0.2452, pruned_loss=0.04647, over 4813.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2302, pruned_loss=0.04639, over 972057.16 frames.], batch size: 15, lr: 5.23e-04 +2022-05-04 17:05:05,432 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 17:05:14,909 INFO [train.py:742] (7/8) Epoch 3, validation: loss=0.1134, simple_loss=0.1995, pruned_loss=0.01366, over 914524.00 frames. +2022-05-04 17:05:54,551 INFO [train.py:715] (7/8) Epoch 3, batch 27050, loss[loss=0.1364, simple_loss=0.2089, pruned_loss=0.03192, over 4804.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2298, pruned_loss=0.04632, over 972228.60 frames.], batch size: 13, lr: 5.23e-04 +2022-05-04 17:06:34,876 INFO [train.py:715] (7/8) Epoch 3, batch 27100, loss[loss=0.2272, simple_loss=0.2754, pruned_loss=0.08954, over 4973.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2294, pruned_loss=0.04611, over 972803.84 frames.], batch size: 39, lr: 5.23e-04 +2022-05-04 17:07:14,170 INFO [train.py:715] (7/8) Epoch 3, batch 27150, loss[loss=0.1463, simple_loss=0.2131, pruned_loss=0.03975, over 4759.00 frames.], tot_loss[loss=0.1603, simple_loss=0.229, pruned_loss=0.04581, over 972729.95 frames.], batch size: 16, lr: 5.23e-04 +2022-05-04 17:07:52,933 INFO [train.py:715] (7/8) Epoch 3, batch 27200, loss[loss=0.1521, simple_loss=0.2193, pruned_loss=0.0425, over 4833.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2298, pruned_loss=0.04596, over 972224.00 frames.], batch size: 15, lr: 5.23e-04 +2022-05-04 17:08:32,670 INFO [train.py:715] (7/8) Epoch 3, batch 27250, loss[loss=0.1506, simple_loss=0.2225, pruned_loss=0.03935, over 4816.00 frames.], tot_loss[loss=0.1605, simple_loss=0.229, pruned_loss=0.04603, over 972008.66 frames.], batch size: 26, lr: 5.23e-04 +2022-05-04 17:09:12,368 INFO [train.py:715] (7/8) Epoch 3, batch 27300, loss[loss=0.1849, simple_loss=0.2562, pruned_loss=0.05686, over 4869.00 frames.], tot_loss[loss=0.161, simple_loss=0.2296, pruned_loss=0.0462, over 972502.68 frames.], batch size: 22, lr: 5.23e-04 +2022-05-04 17:09:51,026 INFO [train.py:715] (7/8) Epoch 3, batch 27350, loss[loss=0.1499, simple_loss=0.2294, pruned_loss=0.03523, over 4945.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2297, pruned_loss=0.04623, over 972075.45 frames.], batch size: 21, lr: 5.22e-04 +2022-05-04 17:10:30,273 INFO [train.py:715] (7/8) Epoch 3, batch 27400, loss[loss=0.1367, simple_loss=0.2138, pruned_loss=0.02982, over 4804.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2304, pruned_loss=0.04638, over 971440.34 frames.], batch size: 26, lr: 5.22e-04 +2022-05-04 17:11:10,419 INFO [train.py:715] (7/8) Epoch 3, batch 27450, loss[loss=0.1469, simple_loss=0.2129, pruned_loss=0.04043, over 4927.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2304, pruned_loss=0.04645, over 971242.15 frames.], batch size: 18, lr: 5.22e-04 +2022-05-04 17:11:49,746 INFO [train.py:715] (7/8) Epoch 3, batch 27500, loss[loss=0.1752, simple_loss=0.2376, pruned_loss=0.05645, over 4955.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2311, pruned_loss=0.04679, over 971156.13 frames.], batch size: 21, lr: 5.22e-04 +2022-05-04 17:12:28,643 INFO [train.py:715] (7/8) Epoch 3, batch 27550, loss[loss=0.1639, simple_loss=0.2264, pruned_loss=0.05074, over 4920.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2305, pruned_loss=0.04687, over 971450.54 frames.], batch size: 18, lr: 5.22e-04 +2022-05-04 17:13:08,356 INFO [train.py:715] (7/8) Epoch 3, batch 27600, loss[loss=0.1202, simple_loss=0.1969, pruned_loss=0.02176, over 4796.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2297, pruned_loss=0.04626, over 972107.49 frames.], batch size: 21, lr: 5.22e-04 +2022-05-04 17:13:48,000 INFO [train.py:715] (7/8) Epoch 3, batch 27650, loss[loss=0.1807, simple_loss=0.2296, pruned_loss=0.06596, over 4983.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2285, pruned_loss=0.04642, over 972095.74 frames.], batch size: 14, lr: 5.22e-04 +2022-05-04 17:14:26,624 INFO [train.py:715] (7/8) Epoch 3, batch 27700, loss[loss=0.1696, simple_loss=0.2471, pruned_loss=0.04605, over 4890.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2294, pruned_loss=0.04651, over 972481.74 frames.], batch size: 19, lr: 5.22e-04 +2022-05-04 17:15:06,400 INFO [train.py:715] (7/8) Epoch 3, batch 27750, loss[loss=0.1596, simple_loss=0.2341, pruned_loss=0.04256, over 4865.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2293, pruned_loss=0.04656, over 972123.84 frames.], batch size: 20, lr: 5.22e-04 +2022-05-04 17:15:46,354 INFO [train.py:715] (7/8) Epoch 3, batch 27800, loss[loss=0.1732, simple_loss=0.2276, pruned_loss=0.05938, over 4852.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2292, pruned_loss=0.04646, over 972265.91 frames.], batch size: 32, lr: 5.22e-04 +2022-05-04 17:16:25,745 INFO [train.py:715] (7/8) Epoch 3, batch 27850, loss[loss=0.1751, simple_loss=0.2443, pruned_loss=0.05295, over 4823.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2299, pruned_loss=0.04687, over 972239.71 frames.], batch size: 15, lr: 5.21e-04 +2022-05-04 17:17:04,213 INFO [train.py:715] (7/8) Epoch 3, batch 27900, loss[loss=0.1599, simple_loss=0.2233, pruned_loss=0.04825, over 4770.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2303, pruned_loss=0.04657, over 972215.04 frames.], batch size: 18, lr: 5.21e-04 +2022-05-04 17:17:43,814 INFO [train.py:715] (7/8) Epoch 3, batch 27950, loss[loss=0.1759, simple_loss=0.2485, pruned_loss=0.05168, over 4879.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2297, pruned_loss=0.04665, over 971706.00 frames.], batch size: 16, lr: 5.21e-04 +2022-05-04 17:18:23,717 INFO [train.py:715] (7/8) Epoch 3, batch 28000, loss[loss=0.1466, simple_loss=0.2127, pruned_loss=0.04022, over 4879.00 frames.], tot_loss[loss=0.161, simple_loss=0.2296, pruned_loss=0.0462, over 972299.35 frames.], batch size: 32, lr: 5.21e-04 +2022-05-04 17:19:02,278 INFO [train.py:715] (7/8) Epoch 3, batch 28050, loss[loss=0.1744, simple_loss=0.2422, pruned_loss=0.05329, over 4750.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2292, pruned_loss=0.04588, over 972301.38 frames.], batch size: 19, lr: 5.21e-04 +2022-05-04 17:19:41,712 INFO [train.py:715] (7/8) Epoch 3, batch 28100, loss[loss=0.1884, simple_loss=0.2415, pruned_loss=0.06765, over 4763.00 frames.], tot_loss[loss=0.161, simple_loss=0.2296, pruned_loss=0.04614, over 972588.75 frames.], batch size: 12, lr: 5.21e-04 +2022-05-04 17:20:21,591 INFO [train.py:715] (7/8) Epoch 3, batch 28150, loss[loss=0.1668, simple_loss=0.2357, pruned_loss=0.049, over 4938.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2305, pruned_loss=0.04655, over 973038.33 frames.], batch size: 23, lr: 5.21e-04 +2022-05-04 17:21:00,808 INFO [train.py:715] (7/8) Epoch 3, batch 28200, loss[loss=0.1605, simple_loss=0.2336, pruned_loss=0.04367, over 4990.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2311, pruned_loss=0.0467, over 973391.70 frames.], batch size: 15, lr: 5.21e-04 +2022-05-04 17:21:39,662 INFO [train.py:715] (7/8) Epoch 3, batch 28250, loss[loss=0.1364, simple_loss=0.212, pruned_loss=0.03038, over 4827.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2305, pruned_loss=0.04632, over 973513.78 frames.], batch size: 27, lr: 5.21e-04 +2022-05-04 17:22:19,001 INFO [train.py:715] (7/8) Epoch 3, batch 28300, loss[loss=0.1441, simple_loss=0.2146, pruned_loss=0.03681, over 4924.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2293, pruned_loss=0.04586, over 973683.28 frames.], batch size: 29, lr: 5.21e-04 +2022-05-04 17:22:58,006 INFO [train.py:715] (7/8) Epoch 3, batch 28350, loss[loss=0.1795, simple_loss=0.2429, pruned_loss=0.05806, over 4757.00 frames.], tot_loss[loss=0.1603, simple_loss=0.229, pruned_loss=0.04578, over 973511.10 frames.], batch size: 16, lr: 5.21e-04 +2022-05-04 17:23:37,196 INFO [train.py:715] (7/8) Epoch 3, batch 28400, loss[loss=0.1804, simple_loss=0.2484, pruned_loss=0.05616, over 4934.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2294, pruned_loss=0.04592, over 972806.04 frames.], batch size: 39, lr: 5.20e-04 +2022-05-04 17:24:15,830 INFO [train.py:715] (7/8) Epoch 3, batch 28450, loss[loss=0.1642, simple_loss=0.2354, pruned_loss=0.04653, over 4870.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2296, pruned_loss=0.04577, over 973125.66 frames.], batch size: 16, lr: 5.20e-04 +2022-05-04 17:24:55,568 INFO [train.py:715] (7/8) Epoch 3, batch 28500, loss[loss=0.1621, simple_loss=0.2295, pruned_loss=0.04733, over 4843.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2291, pruned_loss=0.04582, over 973306.35 frames.], batch size: 30, lr: 5.20e-04 +2022-05-04 17:25:34,506 INFO [train.py:715] (7/8) Epoch 3, batch 28550, loss[loss=0.1491, simple_loss=0.2231, pruned_loss=0.0376, over 4939.00 frames.], tot_loss[loss=0.1612, simple_loss=0.23, pruned_loss=0.04619, over 971977.49 frames.], batch size: 18, lr: 5.20e-04 +2022-05-04 17:26:13,420 INFO [train.py:715] (7/8) Epoch 3, batch 28600, loss[loss=0.2118, simple_loss=0.2703, pruned_loss=0.07659, over 4817.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2312, pruned_loss=0.04697, over 972560.50 frames.], batch size: 21, lr: 5.20e-04 +2022-05-04 17:26:53,131 INFO [train.py:715] (7/8) Epoch 3, batch 28650, loss[loss=0.1865, simple_loss=0.2532, pruned_loss=0.05992, over 4778.00 frames.], tot_loss[loss=0.162, simple_loss=0.2306, pruned_loss=0.04673, over 972612.54 frames.], batch size: 18, lr: 5.20e-04 +2022-05-04 17:27:33,009 INFO [train.py:715] (7/8) Epoch 3, batch 28700, loss[loss=0.1517, simple_loss=0.2171, pruned_loss=0.0431, over 4964.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2312, pruned_loss=0.04659, over 973132.62 frames.], batch size: 35, lr: 5.20e-04 +2022-05-04 17:28:12,157 INFO [train.py:715] (7/8) Epoch 3, batch 28750, loss[loss=0.1506, simple_loss=0.2279, pruned_loss=0.03669, over 4858.00 frames.], tot_loss[loss=0.1635, simple_loss=0.232, pruned_loss=0.04745, over 973773.71 frames.], batch size: 20, lr: 5.20e-04 +2022-05-04 17:28:52,002 INFO [train.py:715] (7/8) Epoch 3, batch 28800, loss[loss=0.1943, simple_loss=0.2663, pruned_loss=0.06112, over 4983.00 frames.], tot_loss[loss=0.163, simple_loss=0.2315, pruned_loss=0.04721, over 973351.15 frames.], batch size: 16, lr: 5.20e-04 +2022-05-04 17:29:32,018 INFO [train.py:715] (7/8) Epoch 3, batch 28850, loss[loss=0.1346, simple_loss=0.2127, pruned_loss=0.02824, over 4980.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2313, pruned_loss=0.04706, over 973236.25 frames.], batch size: 28, lr: 5.20e-04 +2022-05-04 17:30:11,202 INFO [train.py:715] (7/8) Epoch 3, batch 28900, loss[loss=0.1347, simple_loss=0.2039, pruned_loss=0.03276, over 4947.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2305, pruned_loss=0.04666, over 972265.80 frames.], batch size: 14, lr: 5.19e-04 +2022-05-04 17:30:50,083 INFO [train.py:715] (7/8) Epoch 3, batch 28950, loss[loss=0.1649, simple_loss=0.2416, pruned_loss=0.04405, over 4884.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2302, pruned_loss=0.04657, over 971875.18 frames.], batch size: 19, lr: 5.19e-04 +2022-05-04 17:31:29,815 INFO [train.py:715] (7/8) Epoch 3, batch 29000, loss[loss=0.1366, simple_loss=0.2132, pruned_loss=0.02997, over 4933.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2314, pruned_loss=0.04719, over 972013.88 frames.], batch size: 21, lr: 5.19e-04 +2022-05-04 17:32:10,060 INFO [train.py:715] (7/8) Epoch 3, batch 29050, loss[loss=0.1524, simple_loss=0.2322, pruned_loss=0.0363, over 4746.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2302, pruned_loss=0.04639, over 971913.57 frames.], batch size: 19, lr: 5.19e-04 +2022-05-04 17:32:48,618 INFO [train.py:715] (7/8) Epoch 3, batch 29100, loss[loss=0.1507, simple_loss=0.2315, pruned_loss=0.03491, over 4919.00 frames.], tot_loss[loss=0.1612, simple_loss=0.23, pruned_loss=0.04616, over 972822.84 frames.], batch size: 23, lr: 5.19e-04 +2022-05-04 17:33:28,198 INFO [train.py:715] (7/8) Epoch 3, batch 29150, loss[loss=0.1852, simple_loss=0.2428, pruned_loss=0.06384, over 4879.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2301, pruned_loss=0.04641, over 972857.88 frames.], batch size: 19, lr: 5.19e-04 +2022-05-04 17:34:08,094 INFO [train.py:715] (7/8) Epoch 3, batch 29200, loss[loss=0.1442, simple_loss=0.2202, pruned_loss=0.03411, over 4893.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2297, pruned_loss=0.04636, over 972604.24 frames.], batch size: 19, lr: 5.19e-04 +2022-05-04 17:34:47,190 INFO [train.py:715] (7/8) Epoch 3, batch 29250, loss[loss=0.1432, simple_loss=0.2078, pruned_loss=0.03932, over 4953.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2292, pruned_loss=0.04604, over 972391.59 frames.], batch size: 24, lr: 5.19e-04 +2022-05-04 17:35:26,072 INFO [train.py:715] (7/8) Epoch 3, batch 29300, loss[loss=0.1606, simple_loss=0.2229, pruned_loss=0.0492, over 4858.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2293, pruned_loss=0.04601, over 972245.73 frames.], batch size: 34, lr: 5.19e-04 +2022-05-04 17:36:06,263 INFO [train.py:715] (7/8) Epoch 3, batch 29350, loss[loss=0.11, simple_loss=0.1826, pruned_loss=0.01871, over 4755.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2287, pruned_loss=0.04556, over 971477.73 frames.], batch size: 12, lr: 5.19e-04 +2022-05-04 17:36:45,939 INFO [train.py:715] (7/8) Epoch 3, batch 29400, loss[loss=0.1421, simple_loss=0.2139, pruned_loss=0.03516, over 4847.00 frames.], tot_loss[loss=0.161, simple_loss=0.2295, pruned_loss=0.0462, over 971789.95 frames.], batch size: 32, lr: 5.18e-04 +2022-05-04 17:37:24,687 INFO [train.py:715] (7/8) Epoch 3, batch 29450, loss[loss=0.1639, simple_loss=0.2328, pruned_loss=0.04745, over 4797.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2291, pruned_loss=0.04624, over 971281.14 frames.], batch size: 25, lr: 5.18e-04 +2022-05-04 17:38:03,874 INFO [train.py:715] (7/8) Epoch 3, batch 29500, loss[loss=0.1474, simple_loss=0.2197, pruned_loss=0.03754, over 4934.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2296, pruned_loss=0.04663, over 971906.04 frames.], batch size: 23, lr: 5.18e-04 +2022-05-04 17:38:43,454 INFO [train.py:715] (7/8) Epoch 3, batch 29550, loss[loss=0.1644, simple_loss=0.2305, pruned_loss=0.0491, over 4770.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2289, pruned_loss=0.04648, over 971314.64 frames.], batch size: 18, lr: 5.18e-04 +2022-05-04 17:39:22,771 INFO [train.py:715] (7/8) Epoch 3, batch 29600, loss[loss=0.1447, simple_loss=0.2137, pruned_loss=0.03784, over 4944.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2292, pruned_loss=0.04652, over 972626.94 frames.], batch size: 23, lr: 5.18e-04 +2022-05-04 17:40:01,840 INFO [train.py:715] (7/8) Epoch 3, batch 29650, loss[loss=0.1461, simple_loss=0.2151, pruned_loss=0.0386, over 4889.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2294, pruned_loss=0.04686, over 971380.69 frames.], batch size: 22, lr: 5.18e-04 +2022-05-04 17:40:41,988 INFO [train.py:715] (7/8) Epoch 3, batch 29700, loss[loss=0.1603, simple_loss=0.224, pruned_loss=0.04825, over 4832.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2299, pruned_loss=0.04693, over 970813.60 frames.], batch size: 30, lr: 5.18e-04 +2022-05-04 17:41:22,020 INFO [train.py:715] (7/8) Epoch 3, batch 29750, loss[loss=0.1426, simple_loss=0.221, pruned_loss=0.03205, over 4975.00 frames.], tot_loss[loss=0.161, simple_loss=0.2294, pruned_loss=0.04628, over 971316.25 frames.], batch size: 15, lr: 5.18e-04 +2022-05-04 17:42:00,533 INFO [train.py:715] (7/8) Epoch 3, batch 29800, loss[loss=0.157, simple_loss=0.2198, pruned_loss=0.04708, over 4886.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2288, pruned_loss=0.04608, over 971788.60 frames.], batch size: 32, lr: 5.18e-04 +2022-05-04 17:42:40,516 INFO [train.py:715] (7/8) Epoch 3, batch 29850, loss[loss=0.153, simple_loss=0.2155, pruned_loss=0.04523, over 4980.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2297, pruned_loss=0.0467, over 972325.62 frames.], batch size: 28, lr: 5.18e-04 +2022-05-04 17:43:20,052 INFO [train.py:715] (7/8) Epoch 3, batch 29900, loss[loss=0.1392, simple_loss=0.2141, pruned_loss=0.03217, over 4921.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2291, pruned_loss=0.04642, over 972895.99 frames.], batch size: 23, lr: 5.18e-04 +2022-05-04 17:43:58,727 INFO [train.py:715] (7/8) Epoch 3, batch 29950, loss[loss=0.18, simple_loss=0.2439, pruned_loss=0.05809, over 4846.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2295, pruned_loss=0.04674, over 972118.57 frames.], batch size: 32, lr: 5.17e-04 +2022-05-04 17:44:37,454 INFO [train.py:715] (7/8) Epoch 3, batch 30000, loss[loss=0.1881, simple_loss=0.2437, pruned_loss=0.06628, over 4738.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2294, pruned_loss=0.04616, over 972315.97 frames.], batch size: 16, lr: 5.17e-04 +2022-05-04 17:44:37,454 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 17:44:47,857 INFO [train.py:742] (7/8) Epoch 3, validation: loss=0.1135, simple_loss=0.1993, pruned_loss=0.01381, over 914524.00 frames. +2022-05-04 17:45:26,669 INFO [train.py:715] (7/8) Epoch 3, batch 30050, loss[loss=0.1622, simple_loss=0.2326, pruned_loss=0.04588, over 4966.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2285, pruned_loss=0.04531, over 972908.49 frames.], batch size: 15, lr: 5.17e-04 +2022-05-04 17:46:06,307 INFO [train.py:715] (7/8) Epoch 3, batch 30100, loss[loss=0.1353, simple_loss=0.2029, pruned_loss=0.03382, over 4818.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2284, pruned_loss=0.04492, over 972864.47 frames.], batch size: 26, lr: 5.17e-04 +2022-05-04 17:46:46,373 INFO [train.py:715] (7/8) Epoch 3, batch 30150, loss[loss=0.1592, simple_loss=0.2156, pruned_loss=0.05137, over 4791.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2293, pruned_loss=0.04546, over 972677.29 frames.], batch size: 12, lr: 5.17e-04 +2022-05-04 17:47:24,504 INFO [train.py:715] (7/8) Epoch 3, batch 30200, loss[loss=0.1521, simple_loss=0.2241, pruned_loss=0.0401, over 4830.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2288, pruned_loss=0.04526, over 971723.86 frames.], batch size: 15, lr: 5.17e-04 +2022-05-04 17:48:04,128 INFO [train.py:715] (7/8) Epoch 3, batch 30250, loss[loss=0.136, simple_loss=0.2046, pruned_loss=0.03367, over 4987.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2293, pruned_loss=0.04586, over 971892.08 frames.], batch size: 28, lr: 5.17e-04 +2022-05-04 17:48:44,313 INFO [train.py:715] (7/8) Epoch 3, batch 30300, loss[loss=0.1421, simple_loss=0.2165, pruned_loss=0.03381, over 4841.00 frames.], tot_loss[loss=0.161, simple_loss=0.2302, pruned_loss=0.04591, over 971779.84 frames.], batch size: 15, lr: 5.17e-04 +2022-05-04 17:49:23,082 INFO [train.py:715] (7/8) Epoch 3, batch 30350, loss[loss=0.1537, simple_loss=0.222, pruned_loss=0.04263, over 4827.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2305, pruned_loss=0.04633, over 971420.54 frames.], batch size: 15, lr: 5.17e-04 +2022-05-04 17:50:02,738 INFO [train.py:715] (7/8) Epoch 3, batch 30400, loss[loss=0.1567, simple_loss=0.228, pruned_loss=0.04272, over 4874.00 frames.], tot_loss[loss=0.162, simple_loss=0.2307, pruned_loss=0.04664, over 971536.27 frames.], batch size: 20, lr: 5.17e-04 +2022-05-04 17:50:42,523 INFO [train.py:715] (7/8) Epoch 3, batch 30450, loss[loss=0.1573, simple_loss=0.237, pruned_loss=0.03885, over 4746.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2302, pruned_loss=0.0465, over 972194.81 frames.], batch size: 16, lr: 5.16e-04 +2022-05-04 17:51:22,935 INFO [train.py:715] (7/8) Epoch 3, batch 30500, loss[loss=0.1728, simple_loss=0.2423, pruned_loss=0.05165, over 4834.00 frames.], tot_loss[loss=0.162, simple_loss=0.2303, pruned_loss=0.04689, over 971760.31 frames.], batch size: 26, lr: 5.16e-04 +2022-05-04 17:52:02,158 INFO [train.py:715] (7/8) Epoch 3, batch 30550, loss[loss=0.1798, simple_loss=0.2535, pruned_loss=0.05303, over 4906.00 frames.], tot_loss[loss=0.1629, simple_loss=0.231, pruned_loss=0.04736, over 972268.13 frames.], batch size: 18, lr: 5.16e-04 +2022-05-04 17:52:41,690 INFO [train.py:715] (7/8) Epoch 3, batch 30600, loss[loss=0.1245, simple_loss=0.192, pruned_loss=0.02852, over 4788.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2297, pruned_loss=0.04651, over 972499.88 frames.], batch size: 12, lr: 5.16e-04 +2022-05-04 17:53:21,645 INFO [train.py:715] (7/8) Epoch 3, batch 30650, loss[loss=0.1697, simple_loss=0.2312, pruned_loss=0.05415, over 4750.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2289, pruned_loss=0.04615, over 972452.27 frames.], batch size: 19, lr: 5.16e-04 +2022-05-04 17:54:00,312 INFO [train.py:715] (7/8) Epoch 3, batch 30700, loss[loss=0.1352, simple_loss=0.2175, pruned_loss=0.02647, over 4743.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2283, pruned_loss=0.04554, over 972272.99 frames.], batch size: 16, lr: 5.16e-04 +2022-05-04 17:54:39,869 INFO [train.py:715] (7/8) Epoch 3, batch 30750, loss[loss=0.1441, simple_loss=0.2175, pruned_loss=0.0353, over 4813.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2284, pruned_loss=0.04541, over 973146.06 frames.], batch size: 27, lr: 5.16e-04 +2022-05-04 17:55:19,272 INFO [train.py:715] (7/8) Epoch 3, batch 30800, loss[loss=0.1719, simple_loss=0.2278, pruned_loss=0.05799, over 4941.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2288, pruned_loss=0.04498, over 972937.49 frames.], batch size: 29, lr: 5.16e-04 +2022-05-04 17:55:59,094 INFO [train.py:715] (7/8) Epoch 3, batch 30850, loss[loss=0.1541, simple_loss=0.2189, pruned_loss=0.0447, over 4992.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2292, pruned_loss=0.04535, over 972527.13 frames.], batch size: 14, lr: 5.16e-04 +2022-05-04 17:56:37,367 INFO [train.py:715] (7/8) Epoch 3, batch 30900, loss[loss=0.1679, simple_loss=0.2463, pruned_loss=0.04481, over 4834.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2307, pruned_loss=0.04613, over 972364.49 frames.], batch size: 15, lr: 5.16e-04 +2022-05-04 17:57:16,437 INFO [train.py:715] (7/8) Epoch 3, batch 30950, loss[loss=0.1972, simple_loss=0.2553, pruned_loss=0.06954, over 4831.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2309, pruned_loss=0.04665, over 972272.05 frames.], batch size: 15, lr: 5.15e-04 +2022-05-04 17:57:55,757 INFO [train.py:715] (7/8) Epoch 3, batch 31000, loss[loss=0.1431, simple_loss=0.2224, pruned_loss=0.03191, over 4808.00 frames.], tot_loss[loss=0.1619, simple_loss=0.231, pruned_loss=0.04641, over 972068.51 frames.], batch size: 26, lr: 5.15e-04 +2022-05-04 17:58:35,036 INFO [train.py:715] (7/8) Epoch 3, batch 31050, loss[loss=0.1546, simple_loss=0.2322, pruned_loss=0.03844, over 4922.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2311, pruned_loss=0.04663, over 972763.02 frames.], batch size: 23, lr: 5.15e-04 +2022-05-04 17:59:13,606 INFO [train.py:715] (7/8) Epoch 3, batch 31100, loss[loss=0.1453, simple_loss=0.212, pruned_loss=0.03929, over 4825.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2307, pruned_loss=0.04649, over 971524.29 frames.], batch size: 26, lr: 5.15e-04 +2022-05-04 17:59:53,188 INFO [train.py:715] (7/8) Epoch 3, batch 31150, loss[loss=0.1435, simple_loss=0.2123, pruned_loss=0.03735, over 4955.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2309, pruned_loss=0.046, over 971953.27 frames.], batch size: 24, lr: 5.15e-04 +2022-05-04 18:00:32,423 INFO [train.py:715] (7/8) Epoch 3, batch 31200, loss[loss=0.1674, simple_loss=0.2361, pruned_loss=0.04935, over 4927.00 frames.], tot_loss[loss=0.161, simple_loss=0.2304, pruned_loss=0.04575, over 972375.62 frames.], batch size: 18, lr: 5.15e-04 +2022-05-04 18:01:11,062 INFO [train.py:715] (7/8) Epoch 3, batch 31250, loss[loss=0.1683, simple_loss=0.2352, pruned_loss=0.05074, over 4743.00 frames.], tot_loss[loss=0.1608, simple_loss=0.23, pruned_loss=0.04582, over 972591.02 frames.], batch size: 16, lr: 5.15e-04 +2022-05-04 18:01:50,136 INFO [train.py:715] (7/8) Epoch 3, batch 31300, loss[loss=0.1559, simple_loss=0.2225, pruned_loss=0.04468, over 4889.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2293, pruned_loss=0.04542, over 972677.17 frames.], batch size: 22, lr: 5.15e-04 +2022-05-04 18:02:29,486 INFO [train.py:715] (7/8) Epoch 3, batch 31350, loss[loss=0.1538, simple_loss=0.214, pruned_loss=0.04682, over 4855.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2296, pruned_loss=0.04554, over 973210.47 frames.], batch size: 16, lr: 5.15e-04 +2022-05-04 18:03:08,649 INFO [train.py:715] (7/8) Epoch 3, batch 31400, loss[loss=0.1553, simple_loss=0.2233, pruned_loss=0.04364, over 4777.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2285, pruned_loss=0.04491, over 972848.99 frames.], batch size: 17, lr: 5.15e-04 +2022-05-04 18:03:47,232 INFO [train.py:715] (7/8) Epoch 3, batch 31450, loss[loss=0.1529, simple_loss=0.2167, pruned_loss=0.04453, over 4775.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2285, pruned_loss=0.04496, over 972189.75 frames.], batch size: 19, lr: 5.15e-04 +2022-05-04 18:04:26,978 INFO [train.py:715] (7/8) Epoch 3, batch 31500, loss[loss=0.1777, simple_loss=0.2577, pruned_loss=0.04885, over 4835.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2283, pruned_loss=0.04525, over 973108.51 frames.], batch size: 15, lr: 5.14e-04 +2022-05-04 18:05:06,850 INFO [train.py:715] (7/8) Epoch 3, batch 31550, loss[loss=0.1716, simple_loss=0.2415, pruned_loss=0.05083, over 4926.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2293, pruned_loss=0.04566, over 973369.29 frames.], batch size: 39, lr: 5.14e-04 +2022-05-04 18:05:47,992 INFO [train.py:715] (7/8) Epoch 3, batch 31600, loss[loss=0.1472, simple_loss=0.2139, pruned_loss=0.04021, over 4759.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2305, pruned_loss=0.04617, over 972705.45 frames.], batch size: 12, lr: 5.14e-04 +2022-05-04 18:06:26,994 INFO [train.py:715] (7/8) Epoch 3, batch 31650, loss[loss=0.1678, simple_loss=0.2252, pruned_loss=0.05518, over 4874.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2307, pruned_loss=0.04636, over 972266.23 frames.], batch size: 32, lr: 5.14e-04 +2022-05-04 18:07:07,174 INFO [train.py:715] (7/8) Epoch 3, batch 31700, loss[loss=0.1407, simple_loss=0.1945, pruned_loss=0.04349, over 4988.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2296, pruned_loss=0.04583, over 972178.20 frames.], batch size: 14, lr: 5.14e-04 +2022-05-04 18:07:46,359 INFO [train.py:715] (7/8) Epoch 3, batch 31750, loss[loss=0.1567, simple_loss=0.2328, pruned_loss=0.04032, over 4936.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2294, pruned_loss=0.04616, over 971386.15 frames.], batch size: 21, lr: 5.14e-04 +2022-05-04 18:08:24,499 INFO [train.py:715] (7/8) Epoch 3, batch 31800, loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03149, over 4780.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2288, pruned_loss=0.04578, over 971394.91 frames.], batch size: 17, lr: 5.14e-04 +2022-05-04 18:09:04,273 INFO [train.py:715] (7/8) Epoch 3, batch 31850, loss[loss=0.1613, simple_loss=0.235, pruned_loss=0.0438, over 4906.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2289, pruned_loss=0.04605, over 971883.55 frames.], batch size: 17, lr: 5.14e-04 +2022-05-04 18:09:43,772 INFO [train.py:715] (7/8) Epoch 3, batch 31900, loss[loss=0.1226, simple_loss=0.1913, pruned_loss=0.0269, over 4831.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2294, pruned_loss=0.04577, over 972046.08 frames.], batch size: 15, lr: 5.14e-04 +2022-05-04 18:10:22,480 INFO [train.py:715] (7/8) Epoch 3, batch 31950, loss[loss=0.1581, simple_loss=0.2241, pruned_loss=0.04606, over 4830.00 frames.], tot_loss[loss=0.1601, simple_loss=0.229, pruned_loss=0.04562, over 972411.85 frames.], batch size: 30, lr: 5.14e-04 +2022-05-04 18:11:01,414 INFO [train.py:715] (7/8) Epoch 3, batch 32000, loss[loss=0.1556, simple_loss=0.2234, pruned_loss=0.04387, over 4768.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2286, pruned_loss=0.04496, over 972704.31 frames.], batch size: 14, lr: 5.14e-04 +2022-05-04 18:11:41,008 INFO [train.py:715] (7/8) Epoch 3, batch 32050, loss[loss=0.1412, simple_loss=0.2108, pruned_loss=0.03576, over 4887.00 frames.], tot_loss[loss=0.1587, simple_loss=0.228, pruned_loss=0.04472, over 972764.69 frames.], batch size: 16, lr: 5.13e-04 +2022-05-04 18:12:19,210 INFO [train.py:715] (7/8) Epoch 3, batch 32100, loss[loss=0.1237, simple_loss=0.2007, pruned_loss=0.02334, over 4768.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2278, pruned_loss=0.04438, over 972371.86 frames.], batch size: 17, lr: 5.13e-04 +2022-05-04 18:12:58,310 INFO [train.py:715] (7/8) Epoch 3, batch 32150, loss[loss=0.1507, simple_loss=0.216, pruned_loss=0.04271, over 4703.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2272, pruned_loss=0.04417, over 971931.35 frames.], batch size: 15, lr: 5.13e-04 +2022-05-04 18:13:37,856 INFO [train.py:715] (7/8) Epoch 3, batch 32200, loss[loss=0.1892, simple_loss=0.242, pruned_loss=0.06818, over 4847.00 frames.], tot_loss[loss=0.158, simple_loss=0.2273, pruned_loss=0.04434, over 972021.22 frames.], batch size: 30, lr: 5.13e-04 +2022-05-04 18:14:16,675 INFO [train.py:715] (7/8) Epoch 3, batch 32250, loss[loss=0.1229, simple_loss=0.1939, pruned_loss=0.026, over 4974.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2278, pruned_loss=0.04479, over 972430.18 frames.], batch size: 14, lr: 5.13e-04 +2022-05-04 18:14:55,236 INFO [train.py:715] (7/8) Epoch 3, batch 32300, loss[loss=0.1446, simple_loss=0.2209, pruned_loss=0.03413, over 4944.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2276, pruned_loss=0.04462, over 972758.76 frames.], batch size: 29, lr: 5.13e-04 +2022-05-04 18:15:34,902 INFO [train.py:715] (7/8) Epoch 3, batch 32350, loss[loss=0.1464, simple_loss=0.2179, pruned_loss=0.03749, over 4915.00 frames.], tot_loss[loss=0.1587, simple_loss=0.228, pruned_loss=0.04463, over 973072.57 frames.], batch size: 17, lr: 5.13e-04 +2022-05-04 18:16:14,624 INFO [train.py:715] (7/8) Epoch 3, batch 32400, loss[loss=0.1663, simple_loss=0.2115, pruned_loss=0.06057, over 4794.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2285, pruned_loss=0.045, over 971799.29 frames.], batch size: 13, lr: 5.13e-04 +2022-05-04 18:16:52,602 INFO [train.py:715] (7/8) Epoch 3, batch 32450, loss[loss=0.1377, simple_loss=0.2091, pruned_loss=0.03317, over 4794.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2286, pruned_loss=0.0451, over 971585.70 frames.], batch size: 14, lr: 5.13e-04 +2022-05-04 18:17:32,081 INFO [train.py:715] (7/8) Epoch 3, batch 32500, loss[loss=0.1909, simple_loss=0.2652, pruned_loss=0.05828, over 4809.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2286, pruned_loss=0.04496, over 971389.02 frames.], batch size: 25, lr: 5.13e-04 +2022-05-04 18:18:11,714 INFO [train.py:715] (7/8) Epoch 3, batch 32550, loss[loss=0.1618, simple_loss=0.2324, pruned_loss=0.04563, over 4829.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2286, pruned_loss=0.04526, over 971097.89 frames.], batch size: 15, lr: 5.12e-04 +2022-05-04 18:18:50,229 INFO [train.py:715] (7/8) Epoch 3, batch 32600, loss[loss=0.1248, simple_loss=0.199, pruned_loss=0.02532, over 4835.00 frames.], tot_loss[loss=0.159, simple_loss=0.2283, pruned_loss=0.04483, over 971037.10 frames.], batch size: 26, lr: 5.12e-04 +2022-05-04 18:19:29,061 INFO [train.py:715] (7/8) Epoch 3, batch 32650, loss[loss=0.163, simple_loss=0.2257, pruned_loss=0.05017, over 4990.00 frames.], tot_loss[loss=0.1597, simple_loss=0.229, pruned_loss=0.04521, over 971506.58 frames.], batch size: 16, lr: 5.12e-04 +2022-05-04 18:20:08,686 INFO [train.py:715] (7/8) Epoch 3, batch 32700, loss[loss=0.159, simple_loss=0.2176, pruned_loss=0.05016, over 4882.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2289, pruned_loss=0.04527, over 971973.15 frames.], batch size: 32, lr: 5.12e-04 +2022-05-04 18:20:47,707 INFO [train.py:715] (7/8) Epoch 3, batch 32750, loss[loss=0.1707, simple_loss=0.2375, pruned_loss=0.052, over 4744.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2288, pruned_loss=0.04524, over 972403.69 frames.], batch size: 16, lr: 5.12e-04 +2022-05-04 18:21:26,287 INFO [train.py:715] (7/8) Epoch 3, batch 32800, loss[loss=0.1462, simple_loss=0.2146, pruned_loss=0.03891, over 4970.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2294, pruned_loss=0.04581, over 972589.76 frames.], batch size: 35, lr: 5.12e-04 +2022-05-04 18:22:05,409 INFO [train.py:715] (7/8) Epoch 3, batch 32850, loss[loss=0.1666, simple_loss=0.2251, pruned_loss=0.05402, over 4793.00 frames.], tot_loss[loss=0.1603, simple_loss=0.229, pruned_loss=0.04585, over 973094.94 frames.], batch size: 14, lr: 5.12e-04 +2022-05-04 18:22:44,593 INFO [train.py:715] (7/8) Epoch 3, batch 32900, loss[loss=0.1539, simple_loss=0.2209, pruned_loss=0.04347, over 4891.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2297, pruned_loss=0.04634, over 972679.25 frames.], batch size: 22, lr: 5.12e-04 +2022-05-04 18:23:23,659 INFO [train.py:715] (7/8) Epoch 3, batch 32950, loss[loss=0.142, simple_loss=0.2195, pruned_loss=0.03225, over 4783.00 frames.], tot_loss[loss=0.1605, simple_loss=0.229, pruned_loss=0.04598, over 971935.93 frames.], batch size: 17, lr: 5.12e-04 +2022-05-04 18:24:02,391 INFO [train.py:715] (7/8) Epoch 3, batch 33000, loss[loss=0.1679, simple_loss=0.2349, pruned_loss=0.05044, over 4797.00 frames.], tot_loss[loss=0.1607, simple_loss=0.229, pruned_loss=0.04623, over 972136.66 frames.], batch size: 24, lr: 5.12e-04 +2022-05-04 18:24:02,391 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 18:24:11,705 INFO [train.py:742] (7/8) Epoch 3, validation: loss=0.1131, simple_loss=0.199, pruned_loss=0.01363, over 914524.00 frames. +2022-05-04 18:24:50,803 INFO [train.py:715] (7/8) Epoch 3, batch 33050, loss[loss=0.1712, simple_loss=0.2295, pruned_loss=0.05643, over 4893.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2289, pruned_loss=0.04629, over 971665.79 frames.], batch size: 39, lr: 5.12e-04 +2022-05-04 18:25:30,713 INFO [train.py:715] (7/8) Epoch 3, batch 33100, loss[loss=0.1501, simple_loss=0.2284, pruned_loss=0.03592, over 4849.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2296, pruned_loss=0.04636, over 971823.88 frames.], batch size: 30, lr: 5.11e-04 +2022-05-04 18:26:09,586 INFO [train.py:715] (7/8) Epoch 3, batch 33150, loss[loss=0.1648, simple_loss=0.2255, pruned_loss=0.05208, over 4898.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2285, pruned_loss=0.04566, over 971445.20 frames.], batch size: 17, lr: 5.11e-04 +2022-05-04 18:26:48,266 INFO [train.py:715] (7/8) Epoch 3, batch 33200, loss[loss=0.1358, simple_loss=0.1961, pruned_loss=0.03776, over 4834.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2284, pruned_loss=0.04553, over 971914.38 frames.], batch size: 13, lr: 5.11e-04 +2022-05-04 18:27:28,165 INFO [train.py:715] (7/8) Epoch 3, batch 33250, loss[loss=0.1353, simple_loss=0.2075, pruned_loss=0.03152, over 4895.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2291, pruned_loss=0.04599, over 972126.94 frames.], batch size: 17, lr: 5.11e-04 +2022-05-04 18:28:07,726 INFO [train.py:715] (7/8) Epoch 3, batch 33300, loss[loss=0.1563, simple_loss=0.2366, pruned_loss=0.038, over 4882.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2294, pruned_loss=0.04594, over 971382.29 frames.], batch size: 19, lr: 5.11e-04 +2022-05-04 18:28:46,234 INFO [train.py:715] (7/8) Epoch 3, batch 33350, loss[loss=0.1536, simple_loss=0.2187, pruned_loss=0.04426, over 4701.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2303, pruned_loss=0.04614, over 971838.18 frames.], batch size: 15, lr: 5.11e-04 +2022-05-04 18:29:25,534 INFO [train.py:715] (7/8) Epoch 3, batch 33400, loss[loss=0.1585, simple_loss=0.2404, pruned_loss=0.03828, over 4797.00 frames.], tot_loss[loss=0.161, simple_loss=0.23, pruned_loss=0.04601, over 972486.47 frames.], batch size: 21, lr: 5.11e-04 +2022-05-04 18:30:05,187 INFO [train.py:715] (7/8) Epoch 3, batch 33450, loss[loss=0.191, simple_loss=0.2626, pruned_loss=0.0597, over 4774.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2304, pruned_loss=0.04629, over 972861.30 frames.], batch size: 14, lr: 5.11e-04 +2022-05-04 18:30:44,206 INFO [train.py:715] (7/8) Epoch 3, batch 33500, loss[loss=0.1943, simple_loss=0.2669, pruned_loss=0.06089, over 4901.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2301, pruned_loss=0.04581, over 972849.34 frames.], batch size: 19, lr: 5.11e-04 +2022-05-04 18:31:23,296 INFO [train.py:715] (7/8) Epoch 3, batch 33550, loss[loss=0.1677, simple_loss=0.2289, pruned_loss=0.05324, over 4823.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2297, pruned_loss=0.0457, over 972801.92 frames.], batch size: 13, lr: 5.11e-04 +2022-05-04 18:32:03,657 INFO [train.py:715] (7/8) Epoch 3, batch 33600, loss[loss=0.2296, simple_loss=0.2845, pruned_loss=0.08741, over 4979.00 frames.], tot_loss[loss=0.16, simple_loss=0.2293, pruned_loss=0.04542, over 973603.98 frames.], batch size: 15, lr: 5.11e-04 +2022-05-04 18:32:43,014 INFO [train.py:715] (7/8) Epoch 3, batch 33650, loss[loss=0.129, simple_loss=0.1949, pruned_loss=0.0316, over 4802.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2296, pruned_loss=0.04548, over 973622.87 frames.], batch size: 12, lr: 5.10e-04 +2022-05-04 18:33:21,656 INFO [train.py:715] (7/8) Epoch 3, batch 33700, loss[loss=0.1567, simple_loss=0.2304, pruned_loss=0.0415, over 4961.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2293, pruned_loss=0.04572, over 973414.62 frames.], batch size: 24, lr: 5.10e-04 +2022-05-04 18:34:01,452 INFO [train.py:715] (7/8) Epoch 3, batch 33750, loss[loss=0.1667, simple_loss=0.2413, pruned_loss=0.04604, over 4793.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2297, pruned_loss=0.04637, over 974279.40 frames.], batch size: 21, lr: 5.10e-04 +2022-05-04 18:34:40,935 INFO [train.py:715] (7/8) Epoch 3, batch 33800, loss[loss=0.1699, simple_loss=0.2517, pruned_loss=0.04402, over 4879.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2286, pruned_loss=0.04559, over 973592.02 frames.], batch size: 22, lr: 5.10e-04 +2022-05-04 18:35:19,313 INFO [train.py:715] (7/8) Epoch 3, batch 33850, loss[loss=0.1567, simple_loss=0.2339, pruned_loss=0.0398, over 4855.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2282, pruned_loss=0.04534, over 973939.25 frames.], batch size: 20, lr: 5.10e-04 +2022-05-04 18:35:58,144 INFO [train.py:715] (7/8) Epoch 3, batch 33900, loss[loss=0.1434, simple_loss=0.2177, pruned_loss=0.03452, over 4756.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2287, pruned_loss=0.04558, over 972800.05 frames.], batch size: 19, lr: 5.10e-04 +2022-05-04 18:36:38,303 INFO [train.py:715] (7/8) Epoch 3, batch 33950, loss[loss=0.148, simple_loss=0.2189, pruned_loss=0.03858, over 4975.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2294, pruned_loss=0.04598, over 972961.78 frames.], batch size: 15, lr: 5.10e-04 +2022-05-04 18:37:17,241 INFO [train.py:715] (7/8) Epoch 3, batch 34000, loss[loss=0.166, simple_loss=0.2358, pruned_loss=0.04808, over 4782.00 frames.], tot_loss[loss=0.161, simple_loss=0.2298, pruned_loss=0.04613, over 972742.78 frames.], batch size: 14, lr: 5.10e-04 +2022-05-04 18:37:55,983 INFO [train.py:715] (7/8) Epoch 3, batch 34050, loss[loss=0.1516, simple_loss=0.2215, pruned_loss=0.04091, over 4821.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2293, pruned_loss=0.04579, over 973271.68 frames.], batch size: 25, lr: 5.10e-04 +2022-05-04 18:38:35,314 INFO [train.py:715] (7/8) Epoch 3, batch 34100, loss[loss=0.1767, simple_loss=0.2254, pruned_loss=0.06397, over 4807.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2295, pruned_loss=0.04615, over 972029.56 frames.], batch size: 12, lr: 5.10e-04 +2022-05-04 18:39:15,282 INFO [train.py:715] (7/8) Epoch 3, batch 34150, loss[loss=0.1625, simple_loss=0.2311, pruned_loss=0.04698, over 4763.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2276, pruned_loss=0.04517, over 971843.38 frames.], batch size: 16, lr: 5.10e-04 +2022-05-04 18:39:53,559 INFO [train.py:715] (7/8) Epoch 3, batch 34200, loss[loss=0.125, simple_loss=0.1909, pruned_loss=0.02954, over 4872.00 frames.], tot_loss[loss=0.158, simple_loss=0.2271, pruned_loss=0.04443, over 972183.13 frames.], batch size: 20, lr: 5.09e-04 +2022-05-04 18:40:33,007 INFO [train.py:715] (7/8) Epoch 3, batch 34250, loss[loss=0.1748, simple_loss=0.2434, pruned_loss=0.05306, over 4869.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2282, pruned_loss=0.04474, over 971450.49 frames.], batch size: 20, lr: 5.09e-04 +2022-05-04 18:41:13,067 INFO [train.py:715] (7/8) Epoch 3, batch 34300, loss[loss=0.1852, simple_loss=0.2589, pruned_loss=0.05574, over 4960.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2286, pruned_loss=0.0444, over 972125.37 frames.], batch size: 39, lr: 5.09e-04 +2022-05-04 18:41:52,492 INFO [train.py:715] (7/8) Epoch 3, batch 34350, loss[loss=0.1547, simple_loss=0.2265, pruned_loss=0.04144, over 4967.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2277, pruned_loss=0.04379, over 973204.36 frames.], batch size: 15, lr: 5.09e-04 +2022-05-04 18:42:31,605 INFO [train.py:715] (7/8) Epoch 3, batch 34400, loss[loss=0.2023, simple_loss=0.2749, pruned_loss=0.06487, over 4847.00 frames.], tot_loss[loss=0.159, simple_loss=0.2288, pruned_loss=0.04456, over 972566.36 frames.], batch size: 30, lr: 5.09e-04 +2022-05-04 18:43:11,186 INFO [train.py:715] (7/8) Epoch 3, batch 34450, loss[loss=0.1954, simple_loss=0.2621, pruned_loss=0.06438, over 4935.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2301, pruned_loss=0.04553, over 971791.48 frames.], batch size: 18, lr: 5.09e-04 +2022-05-04 18:43:51,342 INFO [train.py:715] (7/8) Epoch 3, batch 34500, loss[loss=0.1417, simple_loss=0.204, pruned_loss=0.03963, over 4779.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2305, pruned_loss=0.04595, over 971714.91 frames.], batch size: 12, lr: 5.09e-04 +2022-05-04 18:44:29,767 INFO [train.py:715] (7/8) Epoch 3, batch 34550, loss[loss=0.142, simple_loss=0.2172, pruned_loss=0.03337, over 4773.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2302, pruned_loss=0.04608, over 972006.85 frames.], batch size: 14, lr: 5.09e-04 +2022-05-04 18:45:08,814 INFO [train.py:715] (7/8) Epoch 3, batch 34600, loss[loss=0.1135, simple_loss=0.1814, pruned_loss=0.02275, over 4840.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2303, pruned_loss=0.04629, over 972283.21 frames.], batch size: 20, lr: 5.09e-04 +2022-05-04 18:45:49,192 INFO [train.py:715] (7/8) Epoch 3, batch 34650, loss[loss=0.1431, simple_loss=0.2071, pruned_loss=0.03957, over 4864.00 frames.], tot_loss[loss=0.161, simple_loss=0.23, pruned_loss=0.04601, over 972352.73 frames.], batch size: 32, lr: 5.09e-04 +2022-05-04 18:46:28,777 INFO [train.py:715] (7/8) Epoch 3, batch 34700, loss[loss=0.1664, simple_loss=0.2317, pruned_loss=0.05054, over 4869.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2306, pruned_loss=0.0463, over 972542.53 frames.], batch size: 16, lr: 5.09e-04 +2022-05-04 18:47:07,076 INFO [train.py:715] (7/8) Epoch 3, batch 34750, loss[loss=0.1557, simple_loss=0.2155, pruned_loss=0.04792, over 4771.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2308, pruned_loss=0.04627, over 972144.55 frames.], batch size: 12, lr: 5.08e-04 +2022-05-04 18:47:44,746 INFO [train.py:715] (7/8) Epoch 3, batch 34800, loss[loss=0.1378, simple_loss=0.2042, pruned_loss=0.03574, over 4744.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2287, pruned_loss=0.04535, over 971603.00 frames.], batch size: 12, lr: 5.08e-04 +2022-05-04 18:48:35,140 INFO [train.py:715] (7/8) Epoch 4, batch 0, loss[loss=0.1805, simple_loss=0.2358, pruned_loss=0.0626, over 4884.00 frames.], tot_loss[loss=0.1805, simple_loss=0.2358, pruned_loss=0.0626, over 4884.00 frames.], batch size: 32, lr: 4.78e-04 +2022-05-04 18:49:16,507 INFO [train.py:715] (7/8) Epoch 4, batch 50, loss[loss=0.1392, simple_loss=0.2153, pruned_loss=0.03149, over 4890.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2272, pruned_loss=0.04416, over 219954.10 frames.], batch size: 22, lr: 4.78e-04 +2022-05-04 18:49:57,177 INFO [train.py:715] (7/8) Epoch 4, batch 100, loss[loss=0.1673, simple_loss=0.2333, pruned_loss=0.05065, over 4855.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2268, pruned_loss=0.04414, over 386816.34 frames.], batch size: 32, lr: 4.78e-04 +2022-05-04 18:50:37,985 INFO [train.py:715] (7/8) Epoch 4, batch 150, loss[loss=0.2016, simple_loss=0.2784, pruned_loss=0.06237, over 4854.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2284, pruned_loss=0.0452, over 516582.89 frames.], batch size: 32, lr: 4.78e-04 +2022-05-04 18:51:19,049 INFO [train.py:715] (7/8) Epoch 4, batch 200, loss[loss=0.1082, simple_loss=0.1793, pruned_loss=0.01853, over 4803.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2288, pruned_loss=0.04623, over 617464.09 frames.], batch size: 12, lr: 4.78e-04 +2022-05-04 18:52:00,238 INFO [train.py:715] (7/8) Epoch 4, batch 250, loss[loss=0.1477, simple_loss=0.2242, pruned_loss=0.03562, over 4967.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2275, pruned_loss=0.0461, over 696074.74 frames.], batch size: 24, lr: 4.77e-04 +2022-05-04 18:52:41,175 INFO [train.py:715] (7/8) Epoch 4, batch 300, loss[loss=0.1708, simple_loss=0.2442, pruned_loss=0.04867, over 4964.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2289, pruned_loss=0.04625, over 756724.20 frames.], batch size: 24, lr: 4.77e-04 +2022-05-04 18:53:22,426 INFO [train.py:715] (7/8) Epoch 4, batch 350, loss[loss=0.1713, simple_loss=0.2433, pruned_loss=0.04965, over 4734.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2296, pruned_loss=0.04626, over 804138.95 frames.], batch size: 16, lr: 4.77e-04 +2022-05-04 18:54:04,553 INFO [train.py:715] (7/8) Epoch 4, batch 400, loss[loss=0.1343, simple_loss=0.2116, pruned_loss=0.02845, over 4868.00 frames.], tot_loss[loss=0.162, simple_loss=0.2301, pruned_loss=0.04695, over 841338.35 frames.], batch size: 16, lr: 4.77e-04 +2022-05-04 18:54:45,181 INFO [train.py:715] (7/8) Epoch 4, batch 450, loss[loss=0.1515, simple_loss=0.2222, pruned_loss=0.04039, over 4970.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2298, pruned_loss=0.04685, over 870407.82 frames.], batch size: 15, lr: 4.77e-04 +2022-05-04 18:55:26,263 INFO [train.py:715] (7/8) Epoch 4, batch 500, loss[loss=0.1517, simple_loss=0.2246, pruned_loss=0.0394, over 4962.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2291, pruned_loss=0.04623, over 893817.08 frames.], batch size: 21, lr: 4.77e-04 +2022-05-04 18:56:07,508 INFO [train.py:715] (7/8) Epoch 4, batch 550, loss[loss=0.155, simple_loss=0.2234, pruned_loss=0.04328, over 4922.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2286, pruned_loss=0.0461, over 910419.38 frames.], batch size: 29, lr: 4.77e-04 +2022-05-04 18:56:48,435 INFO [train.py:715] (7/8) Epoch 4, batch 600, loss[loss=0.143, simple_loss=0.2233, pruned_loss=0.03137, over 4875.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2283, pruned_loss=0.04546, over 924557.87 frames.], batch size: 22, lr: 4.77e-04 +2022-05-04 18:57:28,918 INFO [train.py:715] (7/8) Epoch 4, batch 650, loss[loss=0.1673, simple_loss=0.226, pruned_loss=0.05433, over 4954.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2279, pruned_loss=0.04481, over 936173.47 frames.], batch size: 14, lr: 4.77e-04 +2022-05-04 18:58:09,998 INFO [train.py:715] (7/8) Epoch 4, batch 700, loss[loss=0.1533, simple_loss=0.2263, pruned_loss=0.04015, over 4965.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2272, pruned_loss=0.04489, over 943828.95 frames.], batch size: 39, lr: 4.77e-04 +2022-05-04 18:58:51,944 INFO [train.py:715] (7/8) Epoch 4, batch 750, loss[loss=0.1556, simple_loss=0.23, pruned_loss=0.0406, over 4924.00 frames.], tot_loss[loss=0.159, simple_loss=0.2276, pruned_loss=0.04527, over 949515.28 frames.], batch size: 18, lr: 4.77e-04 +2022-05-04 18:59:33,006 INFO [train.py:715] (7/8) Epoch 4, batch 800, loss[loss=0.1526, simple_loss=0.2383, pruned_loss=0.03347, over 4835.00 frames.], tot_loss[loss=0.1593, simple_loss=0.228, pruned_loss=0.04531, over 954912.17 frames.], batch size: 13, lr: 4.77e-04 +2022-05-04 19:00:13,434 INFO [train.py:715] (7/8) Epoch 4, batch 850, loss[loss=0.1725, simple_loss=0.2475, pruned_loss=0.04869, over 4942.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2278, pruned_loss=0.04554, over 958370.69 frames.], batch size: 21, lr: 4.76e-04 +2022-05-04 19:00:54,496 INFO [train.py:715] (7/8) Epoch 4, batch 900, loss[loss=0.1378, simple_loss=0.2181, pruned_loss=0.02879, over 4986.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2282, pruned_loss=0.04556, over 962210.48 frames.], batch size: 28, lr: 4.76e-04 +2022-05-04 19:01:35,346 INFO [train.py:715] (7/8) Epoch 4, batch 950, loss[loss=0.1761, simple_loss=0.2378, pruned_loss=0.05717, over 4782.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2287, pruned_loss=0.04572, over 963422.55 frames.], batch size: 14, lr: 4.76e-04 +2022-05-04 19:02:16,228 INFO [train.py:715] (7/8) Epoch 4, batch 1000, loss[loss=0.1467, simple_loss=0.2135, pruned_loss=0.03992, over 4785.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2289, pruned_loss=0.04608, over 965794.37 frames.], batch size: 18, lr: 4.76e-04 +2022-05-04 19:02:56,954 INFO [train.py:715] (7/8) Epoch 4, batch 1050, loss[loss=0.1975, simple_loss=0.2608, pruned_loss=0.06706, over 4816.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2288, pruned_loss=0.04615, over 966749.60 frames.], batch size: 21, lr: 4.76e-04 +2022-05-04 19:03:38,129 INFO [train.py:715] (7/8) Epoch 4, batch 1100, loss[loss=0.1684, simple_loss=0.2469, pruned_loss=0.0449, over 4821.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2294, pruned_loss=0.04588, over 968582.87 frames.], batch size: 15, lr: 4.76e-04 +2022-05-04 19:04:18,528 INFO [train.py:715] (7/8) Epoch 4, batch 1150, loss[loss=0.2034, simple_loss=0.2672, pruned_loss=0.06979, over 4879.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2295, pruned_loss=0.04606, over 969580.22 frames.], batch size: 16, lr: 4.76e-04 +2022-05-04 19:04:58,025 INFO [train.py:715] (7/8) Epoch 4, batch 1200, loss[loss=0.1361, simple_loss=0.2093, pruned_loss=0.03148, over 4975.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2287, pruned_loss=0.04505, over 969939.40 frames.], batch size: 25, lr: 4.76e-04 +2022-05-04 19:05:38,582 INFO [train.py:715] (7/8) Epoch 4, batch 1250, loss[loss=0.167, simple_loss=0.2338, pruned_loss=0.05006, over 4826.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2287, pruned_loss=0.04505, over 970272.34 frames.], batch size: 13, lr: 4.76e-04 +2022-05-04 19:06:19,654 INFO [train.py:715] (7/8) Epoch 4, batch 1300, loss[loss=0.1476, simple_loss=0.2269, pruned_loss=0.03421, over 4992.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2284, pruned_loss=0.04503, over 970903.05 frames.], batch size: 15, lr: 4.76e-04 +2022-05-04 19:06:59,663 INFO [train.py:715] (7/8) Epoch 4, batch 1350, loss[loss=0.1669, simple_loss=0.2387, pruned_loss=0.04756, over 4831.00 frames.], tot_loss[loss=0.159, simple_loss=0.2282, pruned_loss=0.04493, over 971909.05 frames.], batch size: 30, lr: 4.76e-04 +2022-05-04 19:07:40,376 INFO [train.py:715] (7/8) Epoch 4, batch 1400, loss[loss=0.1317, simple_loss=0.1958, pruned_loss=0.0338, over 4842.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2273, pruned_loss=0.04474, over 972334.42 frames.], batch size: 32, lr: 4.76e-04 +2022-05-04 19:08:21,350 INFO [train.py:715] (7/8) Epoch 4, batch 1450, loss[loss=0.1695, simple_loss=0.2303, pruned_loss=0.05433, over 4690.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2281, pruned_loss=0.04561, over 972873.78 frames.], batch size: 15, lr: 4.75e-04 +2022-05-04 19:09:02,423 INFO [train.py:715] (7/8) Epoch 4, batch 1500, loss[loss=0.1827, simple_loss=0.2326, pruned_loss=0.06639, over 4991.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2271, pruned_loss=0.04484, over 973148.11 frames.], batch size: 14, lr: 4.75e-04 +2022-05-04 19:09:42,045 INFO [train.py:715] (7/8) Epoch 4, batch 1550, loss[loss=0.153, simple_loss=0.2265, pruned_loss=0.03973, over 4973.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2276, pruned_loss=0.04503, over 972591.71 frames.], batch size: 24, lr: 4.75e-04 +2022-05-04 19:10:23,008 INFO [train.py:715] (7/8) Epoch 4, batch 1600, loss[loss=0.1389, simple_loss=0.2063, pruned_loss=0.03576, over 4849.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2273, pruned_loss=0.04452, over 973050.00 frames.], batch size: 13, lr: 4.75e-04 +2022-05-04 19:11:04,741 INFO [train.py:715] (7/8) Epoch 4, batch 1650, loss[loss=0.2108, simple_loss=0.2698, pruned_loss=0.07591, over 4762.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2275, pruned_loss=0.04461, over 973060.50 frames.], batch size: 16, lr: 4.75e-04 +2022-05-04 19:11:45,104 INFO [train.py:715] (7/8) Epoch 4, batch 1700, loss[loss=0.1525, simple_loss=0.2266, pruned_loss=0.0392, over 4940.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2284, pruned_loss=0.04487, over 973169.99 frames.], batch size: 29, lr: 4.75e-04 +2022-05-04 19:12:25,111 INFO [train.py:715] (7/8) Epoch 4, batch 1750, loss[loss=0.1223, simple_loss=0.1844, pruned_loss=0.03005, over 4987.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2279, pruned_loss=0.04472, over 973711.93 frames.], batch size: 14, lr: 4.75e-04 +2022-05-04 19:13:06,316 INFO [train.py:715] (7/8) Epoch 4, batch 1800, loss[loss=0.1308, simple_loss=0.1931, pruned_loss=0.03428, over 4649.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2273, pruned_loss=0.04482, over 972329.65 frames.], batch size: 13, lr: 4.75e-04 +2022-05-04 19:13:47,665 INFO [train.py:715] (7/8) Epoch 4, batch 1850, loss[loss=0.1552, simple_loss=0.2386, pruned_loss=0.0359, over 4947.00 frames.], tot_loss[loss=0.158, simple_loss=0.2269, pruned_loss=0.04452, over 972462.26 frames.], batch size: 21, lr: 4.75e-04 +2022-05-04 19:14:27,699 INFO [train.py:715] (7/8) Epoch 4, batch 1900, loss[loss=0.1679, simple_loss=0.2384, pruned_loss=0.04874, over 4873.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2271, pruned_loss=0.04473, over 972172.27 frames.], batch size: 16, lr: 4.75e-04 +2022-05-04 19:15:08,452 INFO [train.py:715] (7/8) Epoch 4, batch 1950, loss[loss=0.1459, simple_loss=0.2148, pruned_loss=0.03852, over 4709.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2272, pruned_loss=0.04451, over 971747.37 frames.], batch size: 15, lr: 4.75e-04 +2022-05-04 19:15:48,968 INFO [train.py:715] (7/8) Epoch 4, batch 2000, loss[loss=0.1571, simple_loss=0.2285, pruned_loss=0.04289, over 4972.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2272, pruned_loss=0.04426, over 972615.86 frames.], batch size: 24, lr: 4.74e-04 +2022-05-04 19:16:28,968 INFO [train.py:715] (7/8) Epoch 4, batch 2050, loss[loss=0.2008, simple_loss=0.2502, pruned_loss=0.07568, over 4827.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2279, pruned_loss=0.04465, over 972347.50 frames.], batch size: 13, lr: 4.74e-04 +2022-05-04 19:17:08,517 INFO [train.py:715] (7/8) Epoch 4, batch 2100, loss[loss=0.1709, simple_loss=0.236, pruned_loss=0.05291, over 4923.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2278, pruned_loss=0.04456, over 971342.02 frames.], batch size: 39, lr: 4.74e-04 +2022-05-04 19:17:48,269 INFO [train.py:715] (7/8) Epoch 4, batch 2150, loss[loss=0.1619, simple_loss=0.239, pruned_loss=0.04238, over 4833.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2279, pruned_loss=0.04488, over 971059.42 frames.], batch size: 15, lr: 4.74e-04 +2022-05-04 19:18:29,064 INFO [train.py:715] (7/8) Epoch 4, batch 2200, loss[loss=0.144, simple_loss=0.2207, pruned_loss=0.03361, over 4976.00 frames.], tot_loss[loss=0.1586, simple_loss=0.228, pruned_loss=0.04458, over 970870.14 frames.], batch size: 28, lr: 4.74e-04 +2022-05-04 19:19:09,442 INFO [train.py:715] (7/8) Epoch 4, batch 2250, loss[loss=0.1311, simple_loss=0.1998, pruned_loss=0.03123, over 4985.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2269, pruned_loss=0.04413, over 971781.18 frames.], batch size: 25, lr: 4.74e-04 +2022-05-04 19:19:48,813 INFO [train.py:715] (7/8) Epoch 4, batch 2300, loss[loss=0.1393, simple_loss=0.217, pruned_loss=0.03077, over 4897.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2261, pruned_loss=0.0438, over 971856.42 frames.], batch size: 22, lr: 4.74e-04 +2022-05-04 19:20:28,750 INFO [train.py:715] (7/8) Epoch 4, batch 2350, loss[loss=0.1625, simple_loss=0.2321, pruned_loss=0.04642, over 4826.00 frames.], tot_loss[loss=0.1568, simple_loss=0.226, pruned_loss=0.04385, over 972124.54 frames.], batch size: 15, lr: 4.74e-04 +2022-05-04 19:21:08,835 INFO [train.py:715] (7/8) Epoch 4, batch 2400, loss[loss=0.1491, simple_loss=0.2189, pruned_loss=0.03969, over 4924.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2262, pruned_loss=0.04429, over 972075.18 frames.], batch size: 29, lr: 4.74e-04 +2022-05-04 19:21:48,322 INFO [train.py:715] (7/8) Epoch 4, batch 2450, loss[loss=0.1236, simple_loss=0.1992, pruned_loss=0.02398, over 4883.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2264, pruned_loss=0.04426, over 972122.33 frames.], batch size: 22, lr: 4.74e-04 +2022-05-04 19:22:28,661 INFO [train.py:715] (7/8) Epoch 4, batch 2500, loss[loss=0.158, simple_loss=0.2267, pruned_loss=0.04464, over 4861.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2266, pruned_loss=0.04446, over 970913.86 frames.], batch size: 15, lr: 4.74e-04 +2022-05-04 19:23:09,576 INFO [train.py:715] (7/8) Epoch 4, batch 2550, loss[loss=0.1874, simple_loss=0.2583, pruned_loss=0.05829, over 4797.00 frames.], tot_loss[loss=0.158, simple_loss=0.2271, pruned_loss=0.04446, over 971458.37 frames.], batch size: 24, lr: 4.74e-04 +2022-05-04 19:23:49,885 INFO [train.py:715] (7/8) Epoch 4, batch 2600, loss[loss=0.195, simple_loss=0.2524, pruned_loss=0.0688, over 4687.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2276, pruned_loss=0.04447, over 971083.51 frames.], batch size: 15, lr: 4.73e-04 +2022-05-04 19:24:29,137 INFO [train.py:715] (7/8) Epoch 4, batch 2650, loss[loss=0.1387, simple_loss=0.2155, pruned_loss=0.03096, over 4990.00 frames.], tot_loss[loss=0.159, simple_loss=0.2281, pruned_loss=0.04489, over 970725.01 frames.], batch size: 14, lr: 4.73e-04 +2022-05-04 19:25:09,502 INFO [train.py:715] (7/8) Epoch 4, batch 2700, loss[loss=0.1556, simple_loss=0.2311, pruned_loss=0.04002, over 4893.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2279, pruned_loss=0.04444, over 971478.41 frames.], batch size: 19, lr: 4.73e-04 +2022-05-04 19:25:49,766 INFO [train.py:715] (7/8) Epoch 4, batch 2750, loss[loss=0.1621, simple_loss=0.2225, pruned_loss=0.05088, over 4827.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2277, pruned_loss=0.0444, over 971745.83 frames.], batch size: 15, lr: 4.73e-04 +2022-05-04 19:26:29,541 INFO [train.py:715] (7/8) Epoch 4, batch 2800, loss[loss=0.1698, simple_loss=0.2439, pruned_loss=0.04783, over 4884.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2268, pruned_loss=0.04419, over 972249.46 frames.], batch size: 19, lr: 4.73e-04 +2022-05-04 19:27:08,932 INFO [train.py:715] (7/8) Epoch 4, batch 2850, loss[loss=0.1234, simple_loss=0.1935, pruned_loss=0.02672, over 4969.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2275, pruned_loss=0.04478, over 972647.28 frames.], batch size: 24, lr: 4.73e-04 +2022-05-04 19:27:49,243 INFO [train.py:715] (7/8) Epoch 4, batch 2900, loss[loss=0.1352, simple_loss=0.2028, pruned_loss=0.03378, over 4898.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2284, pruned_loss=0.0449, over 972376.97 frames.], batch size: 19, lr: 4.73e-04 +2022-05-04 19:28:29,134 INFO [train.py:715] (7/8) Epoch 4, batch 2950, loss[loss=0.2189, simple_loss=0.2738, pruned_loss=0.08198, over 4901.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2279, pruned_loss=0.0452, over 971779.36 frames.], batch size: 18, lr: 4.73e-04 +2022-05-04 19:29:08,449 INFO [train.py:715] (7/8) Epoch 4, batch 3000, loss[loss=0.1725, simple_loss=0.2509, pruned_loss=0.047, over 4685.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2278, pruned_loss=0.04482, over 972074.99 frames.], batch size: 15, lr: 4.73e-04 +2022-05-04 19:29:08,450 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 19:29:17,944 INFO [train.py:742] (7/8) Epoch 4, validation: loss=0.1127, simple_loss=0.1984, pruned_loss=0.01346, over 914524.00 frames. +2022-05-04 19:29:57,092 INFO [train.py:715] (7/8) Epoch 4, batch 3050, loss[loss=0.1677, simple_loss=0.2236, pruned_loss=0.05588, over 4818.00 frames.], tot_loss[loss=0.158, simple_loss=0.2272, pruned_loss=0.04438, over 971293.97 frames.], batch size: 12, lr: 4.73e-04 +2022-05-04 19:30:37,136 INFO [train.py:715] (7/8) Epoch 4, batch 3100, loss[loss=0.1911, simple_loss=0.2467, pruned_loss=0.06779, over 4812.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2273, pruned_loss=0.04461, over 971300.43 frames.], batch size: 15, lr: 4.73e-04 +2022-05-04 19:31:17,416 INFO [train.py:715] (7/8) Epoch 4, batch 3150, loss[loss=0.1451, simple_loss=0.2032, pruned_loss=0.04353, over 4818.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2271, pruned_loss=0.04453, over 971918.13 frames.], batch size: 27, lr: 4.73e-04 +2022-05-04 19:31:57,025 INFO [train.py:715] (7/8) Epoch 4, batch 3200, loss[loss=0.1385, simple_loss=0.2129, pruned_loss=0.03204, over 4832.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2285, pruned_loss=0.04502, over 971657.09 frames.], batch size: 25, lr: 4.72e-04 +2022-05-04 19:32:36,972 INFO [train.py:715] (7/8) Epoch 4, batch 3250, loss[loss=0.1351, simple_loss=0.2049, pruned_loss=0.03269, over 4761.00 frames.], tot_loss[loss=0.1598, simple_loss=0.229, pruned_loss=0.0453, over 971853.40 frames.], batch size: 18, lr: 4.72e-04 +2022-05-04 19:33:16,913 INFO [train.py:715] (7/8) Epoch 4, batch 3300, loss[loss=0.1617, simple_loss=0.2385, pruned_loss=0.0424, over 4764.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2288, pruned_loss=0.04528, over 971785.07 frames.], batch size: 18, lr: 4.72e-04 +2022-05-04 19:33:56,287 INFO [train.py:715] (7/8) Epoch 4, batch 3350, loss[loss=0.1693, simple_loss=0.2452, pruned_loss=0.04671, over 4915.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2281, pruned_loss=0.04453, over 971381.19 frames.], batch size: 29, lr: 4.72e-04 +2022-05-04 19:34:35,331 INFO [train.py:715] (7/8) Epoch 4, batch 3400, loss[loss=0.1784, simple_loss=0.2521, pruned_loss=0.05232, over 4802.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2273, pruned_loss=0.04398, over 971621.46 frames.], batch size: 21, lr: 4.72e-04 +2022-05-04 19:35:15,774 INFO [train.py:715] (7/8) Epoch 4, batch 3450, loss[loss=0.1375, simple_loss=0.2071, pruned_loss=0.03392, over 4979.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2275, pruned_loss=0.04364, over 971834.38 frames.], batch size: 15, lr: 4.72e-04 +2022-05-04 19:35:55,191 INFO [train.py:715] (7/8) Epoch 4, batch 3500, loss[loss=0.1691, simple_loss=0.2412, pruned_loss=0.04852, over 4793.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2271, pruned_loss=0.04395, over 971563.87 frames.], batch size: 21, lr: 4.72e-04 +2022-05-04 19:36:34,859 INFO [train.py:715] (7/8) Epoch 4, batch 3550, loss[loss=0.194, simple_loss=0.2652, pruned_loss=0.06136, over 4908.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2278, pruned_loss=0.04438, over 972205.48 frames.], batch size: 17, lr: 4.72e-04 +2022-05-04 19:37:14,695 INFO [train.py:715] (7/8) Epoch 4, batch 3600, loss[loss=0.1704, simple_loss=0.2363, pruned_loss=0.0522, over 4957.00 frames.], tot_loss[loss=0.159, simple_loss=0.2282, pruned_loss=0.04495, over 972719.76 frames.], batch size: 39, lr: 4.72e-04 +2022-05-04 19:37:54,699 INFO [train.py:715] (7/8) Epoch 4, batch 3650, loss[loss=0.1366, simple_loss=0.2129, pruned_loss=0.03013, over 4881.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2283, pruned_loss=0.04472, over 973063.02 frames.], batch size: 22, lr: 4.72e-04 +2022-05-04 19:38:34,070 INFO [train.py:715] (7/8) Epoch 4, batch 3700, loss[loss=0.1561, simple_loss=0.2332, pruned_loss=0.03947, over 4887.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2288, pruned_loss=0.04514, over 972200.17 frames.], batch size: 16, lr: 4.72e-04 +2022-05-04 19:39:13,352 INFO [train.py:715] (7/8) Epoch 4, batch 3750, loss[loss=0.1579, simple_loss=0.2395, pruned_loss=0.03818, over 4870.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2289, pruned_loss=0.04527, over 972227.30 frames.], batch size: 32, lr: 4.72e-04 +2022-05-04 19:39:53,221 INFO [train.py:715] (7/8) Epoch 4, batch 3800, loss[loss=0.1503, simple_loss=0.2208, pruned_loss=0.03985, over 4793.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2285, pruned_loss=0.04521, over 973220.79 frames.], batch size: 17, lr: 4.72e-04 +2022-05-04 19:40:32,935 INFO [train.py:715] (7/8) Epoch 4, batch 3850, loss[loss=0.1254, simple_loss=0.1958, pruned_loss=0.02752, over 4796.00 frames.], tot_loss[loss=0.1588, simple_loss=0.228, pruned_loss=0.04485, over 973020.80 frames.], batch size: 12, lr: 4.71e-04 +2022-05-04 19:41:13,116 INFO [train.py:715] (7/8) Epoch 4, batch 3900, loss[loss=0.1495, simple_loss=0.2133, pruned_loss=0.04279, over 4984.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2278, pruned_loss=0.04475, over 973186.12 frames.], batch size: 15, lr: 4.71e-04 +2022-05-04 19:41:53,259 INFO [train.py:715] (7/8) Epoch 4, batch 3950, loss[loss=0.1778, simple_loss=0.2456, pruned_loss=0.05502, over 4713.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2277, pruned_loss=0.04441, over 973660.27 frames.], batch size: 15, lr: 4.71e-04 +2022-05-04 19:42:33,633 INFO [train.py:715] (7/8) Epoch 4, batch 4000, loss[loss=0.1429, simple_loss=0.2102, pruned_loss=0.03778, over 4780.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2269, pruned_loss=0.04411, over 973095.73 frames.], batch size: 18, lr: 4.71e-04 +2022-05-04 19:43:13,666 INFO [train.py:715] (7/8) Epoch 4, batch 4050, loss[loss=0.1314, simple_loss=0.1949, pruned_loss=0.03393, over 4707.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2262, pruned_loss=0.04395, over 972906.97 frames.], batch size: 12, lr: 4.71e-04 +2022-05-04 19:43:53,242 INFO [train.py:715] (7/8) Epoch 4, batch 4100, loss[loss=0.1562, simple_loss=0.2434, pruned_loss=0.03449, over 4778.00 frames.], tot_loss[loss=0.1578, simple_loss=0.227, pruned_loss=0.04428, over 972065.80 frames.], batch size: 18, lr: 4.71e-04 +2022-05-04 19:44:33,950 INFO [train.py:715] (7/8) Epoch 4, batch 4150, loss[loss=0.1196, simple_loss=0.1942, pruned_loss=0.02251, over 4765.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2281, pruned_loss=0.04476, over 972380.32 frames.], batch size: 12, lr: 4.71e-04 +2022-05-04 19:45:13,436 INFO [train.py:715] (7/8) Epoch 4, batch 4200, loss[loss=0.124, simple_loss=0.1914, pruned_loss=0.0283, over 4799.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2277, pruned_loss=0.04465, over 971852.43 frames.], batch size: 12, lr: 4.71e-04 +2022-05-04 19:45:52,911 INFO [train.py:715] (7/8) Epoch 4, batch 4250, loss[loss=0.1771, simple_loss=0.2512, pruned_loss=0.05151, over 4907.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2277, pruned_loss=0.04439, over 971456.53 frames.], batch size: 17, lr: 4.71e-04 +2022-05-04 19:46:33,010 INFO [train.py:715] (7/8) Epoch 4, batch 4300, loss[loss=0.1491, simple_loss=0.2188, pruned_loss=0.03974, over 4743.00 frames.], tot_loss[loss=0.1574, simple_loss=0.227, pruned_loss=0.04395, over 972104.47 frames.], batch size: 16, lr: 4.71e-04 +2022-05-04 19:47:13,035 INFO [train.py:715] (7/8) Epoch 4, batch 4350, loss[loss=0.1391, simple_loss=0.2111, pruned_loss=0.03358, over 4927.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2273, pruned_loss=0.04429, over 972259.95 frames.], batch size: 23, lr: 4.71e-04 +2022-05-04 19:47:52,118 INFO [train.py:715] (7/8) Epoch 4, batch 4400, loss[loss=0.2208, simple_loss=0.2653, pruned_loss=0.08814, over 4758.00 frames.], tot_loss[loss=0.159, simple_loss=0.2278, pruned_loss=0.04507, over 972230.04 frames.], batch size: 14, lr: 4.71e-04 +2022-05-04 19:48:31,828 INFO [train.py:715] (7/8) Epoch 4, batch 4450, loss[loss=0.1785, simple_loss=0.234, pruned_loss=0.06152, over 4868.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2271, pruned_loss=0.04471, over 973175.52 frames.], batch size: 39, lr: 4.70e-04 +2022-05-04 19:49:12,003 INFO [train.py:715] (7/8) Epoch 4, batch 4500, loss[loss=0.14, simple_loss=0.2035, pruned_loss=0.03823, over 4962.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2269, pruned_loss=0.04439, over 973228.93 frames.], batch size: 24, lr: 4.70e-04 +2022-05-04 19:49:51,274 INFO [train.py:715] (7/8) Epoch 4, batch 4550, loss[loss=0.1308, simple_loss=0.2003, pruned_loss=0.03068, over 4858.00 frames.], tot_loss[loss=0.158, simple_loss=0.2272, pruned_loss=0.04441, over 972046.71 frames.], batch size: 20, lr: 4.70e-04 +2022-05-04 19:50:30,678 INFO [train.py:715] (7/8) Epoch 4, batch 4600, loss[loss=0.145, simple_loss=0.2095, pruned_loss=0.04025, over 4984.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2274, pruned_loss=0.0448, over 972263.58 frames.], batch size: 25, lr: 4.70e-04 +2022-05-04 19:51:10,986 INFO [train.py:715] (7/8) Epoch 4, batch 4650, loss[loss=0.1577, simple_loss=0.2228, pruned_loss=0.04631, over 4699.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2264, pruned_loss=0.04422, over 972533.04 frames.], batch size: 15, lr: 4.70e-04 +2022-05-04 19:51:51,344 INFO [train.py:715] (7/8) Epoch 4, batch 4700, loss[loss=0.1693, simple_loss=0.2271, pruned_loss=0.05575, over 4773.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2265, pruned_loss=0.04411, over 973110.70 frames.], batch size: 14, lr: 4.70e-04 +2022-05-04 19:52:31,246 INFO [train.py:715] (7/8) Epoch 4, batch 4750, loss[loss=0.156, simple_loss=0.2243, pruned_loss=0.04388, over 4979.00 frames.], tot_loss[loss=0.157, simple_loss=0.2261, pruned_loss=0.04396, over 973314.78 frames.], batch size: 31, lr: 4.70e-04 +2022-05-04 19:53:13,037 INFO [train.py:715] (7/8) Epoch 4, batch 4800, loss[loss=0.1693, simple_loss=0.2345, pruned_loss=0.05202, over 4894.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2272, pruned_loss=0.04454, over 972764.69 frames.], batch size: 16, lr: 4.70e-04 +2022-05-04 19:53:53,559 INFO [train.py:715] (7/8) Epoch 4, batch 4850, loss[loss=0.1543, simple_loss=0.2227, pruned_loss=0.04292, over 4698.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2273, pruned_loss=0.04483, over 973017.81 frames.], batch size: 15, lr: 4.70e-04 +2022-05-04 19:54:32,958 INFO [train.py:715] (7/8) Epoch 4, batch 4900, loss[loss=0.1635, simple_loss=0.2319, pruned_loss=0.0475, over 4964.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2278, pruned_loss=0.04535, over 973389.19 frames.], batch size: 39, lr: 4.70e-04 +2022-05-04 19:55:12,350 INFO [train.py:715] (7/8) Epoch 4, batch 4950, loss[loss=0.1508, simple_loss=0.214, pruned_loss=0.04378, over 4897.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2272, pruned_loss=0.04488, over 973281.18 frames.], batch size: 17, lr: 4.70e-04 +2022-05-04 19:55:52,408 INFO [train.py:715] (7/8) Epoch 4, batch 5000, loss[loss=0.1715, simple_loss=0.2361, pruned_loss=0.05346, over 4698.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2271, pruned_loss=0.0442, over 972280.42 frames.], batch size: 15, lr: 4.70e-04 +2022-05-04 19:56:32,443 INFO [train.py:715] (7/8) Epoch 4, batch 5050, loss[loss=0.1494, simple_loss=0.215, pruned_loss=0.04187, over 4981.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2286, pruned_loss=0.04486, over 972395.16 frames.], batch size: 14, lr: 4.69e-04 +2022-05-04 19:57:12,348 INFO [train.py:715] (7/8) Epoch 4, batch 5100, loss[loss=0.1499, simple_loss=0.2201, pruned_loss=0.03991, over 4875.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2283, pruned_loss=0.04453, over 972701.44 frames.], batch size: 22, lr: 4.69e-04 +2022-05-04 19:57:51,520 INFO [train.py:715] (7/8) Epoch 4, batch 5150, loss[loss=0.186, simple_loss=0.2536, pruned_loss=0.05918, over 4688.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2279, pruned_loss=0.04466, over 971759.85 frames.], batch size: 15, lr: 4.69e-04 +2022-05-04 19:58:31,724 INFO [train.py:715] (7/8) Epoch 4, batch 5200, loss[loss=0.1357, simple_loss=0.2067, pruned_loss=0.0324, over 4740.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2279, pruned_loss=0.04459, over 971022.95 frames.], batch size: 16, lr: 4.69e-04 +2022-05-04 19:59:11,082 INFO [train.py:715] (7/8) Epoch 4, batch 5250, loss[loss=0.1788, simple_loss=0.2435, pruned_loss=0.05704, over 4690.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2275, pruned_loss=0.04397, over 971965.39 frames.], batch size: 15, lr: 4.69e-04 +2022-05-04 19:59:50,713 INFO [train.py:715] (7/8) Epoch 4, batch 5300, loss[loss=0.1546, simple_loss=0.2375, pruned_loss=0.03587, over 4842.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2273, pruned_loss=0.04351, over 971606.58 frames.], batch size: 20, lr: 4.69e-04 +2022-05-04 20:00:30,979 INFO [train.py:715] (7/8) Epoch 4, batch 5350, loss[loss=0.1439, simple_loss=0.2085, pruned_loss=0.03964, over 4941.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2259, pruned_loss=0.04332, over 971551.67 frames.], batch size: 21, lr: 4.69e-04 +2022-05-04 20:01:11,128 INFO [train.py:715] (7/8) Epoch 4, batch 5400, loss[loss=0.1239, simple_loss=0.1951, pruned_loss=0.02636, over 4830.00 frames.], tot_loss[loss=0.157, simple_loss=0.2266, pruned_loss=0.04375, over 971947.69 frames.], batch size: 13, lr: 4.69e-04 +2022-05-04 20:01:51,436 INFO [train.py:715] (7/8) Epoch 4, batch 5450, loss[loss=0.1553, simple_loss=0.2352, pruned_loss=0.03776, over 4993.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2259, pruned_loss=0.04315, over 972923.23 frames.], batch size: 16, lr: 4.69e-04 +2022-05-04 20:02:30,842 INFO [train.py:715] (7/8) Epoch 4, batch 5500, loss[loss=0.1548, simple_loss=0.2354, pruned_loss=0.03708, over 4796.00 frames.], tot_loss[loss=0.1575, simple_loss=0.227, pruned_loss=0.04396, over 973007.69 frames.], batch size: 24, lr: 4.69e-04 +2022-05-04 20:03:11,386 INFO [train.py:715] (7/8) Epoch 4, batch 5550, loss[loss=0.1917, simple_loss=0.254, pruned_loss=0.06473, over 4921.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2284, pruned_loss=0.04432, over 972657.68 frames.], batch size: 18, lr: 4.69e-04 +2022-05-04 20:03:51,124 INFO [train.py:715] (7/8) Epoch 4, batch 5600, loss[loss=0.1759, simple_loss=0.2458, pruned_loss=0.05296, over 4746.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2278, pruned_loss=0.04381, over 972037.00 frames.], batch size: 16, lr: 4.69e-04 +2022-05-04 20:04:31,009 INFO [train.py:715] (7/8) Epoch 4, batch 5650, loss[loss=0.1658, simple_loss=0.2324, pruned_loss=0.04963, over 4962.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2283, pruned_loss=0.04433, over 972260.64 frames.], batch size: 35, lr: 4.68e-04 +2022-05-04 20:05:10,993 INFO [train.py:715] (7/8) Epoch 4, batch 5700, loss[loss=0.1769, simple_loss=0.2381, pruned_loss=0.05789, over 4786.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2283, pruned_loss=0.04468, over 972436.88 frames.], batch size: 17, lr: 4.68e-04 +2022-05-04 20:05:51,206 INFO [train.py:715] (7/8) Epoch 4, batch 5750, loss[loss=0.1345, simple_loss=0.2064, pruned_loss=0.03131, over 4926.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2285, pruned_loss=0.04517, over 973158.78 frames.], batch size: 18, lr: 4.68e-04 +2022-05-04 20:06:31,306 INFO [train.py:715] (7/8) Epoch 4, batch 5800, loss[loss=0.1759, simple_loss=0.2436, pruned_loss=0.05405, over 4924.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2288, pruned_loss=0.04485, over 972909.67 frames.], batch size: 18, lr: 4.68e-04 +2022-05-04 20:07:10,963 INFO [train.py:715] (7/8) Epoch 4, batch 5850, loss[loss=0.1239, simple_loss=0.1925, pruned_loss=0.02767, over 4792.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2284, pruned_loss=0.04459, over 972539.43 frames.], batch size: 12, lr: 4.68e-04 +2022-05-04 20:07:51,259 INFO [train.py:715] (7/8) Epoch 4, batch 5900, loss[loss=0.1388, simple_loss=0.2105, pruned_loss=0.0335, over 4977.00 frames.], tot_loss[loss=0.16, simple_loss=0.2294, pruned_loss=0.04531, over 972620.15 frames.], batch size: 14, lr: 4.68e-04 +2022-05-04 20:08:30,938 INFO [train.py:715] (7/8) Epoch 4, batch 5950, loss[loss=0.1447, simple_loss=0.2113, pruned_loss=0.03905, over 4958.00 frames.], tot_loss[loss=0.159, simple_loss=0.2288, pruned_loss=0.04464, over 972345.94 frames.], batch size: 15, lr: 4.68e-04 +2022-05-04 20:09:10,573 INFO [train.py:715] (7/8) Epoch 4, batch 6000, loss[loss=0.1704, simple_loss=0.2425, pruned_loss=0.04913, over 4829.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2284, pruned_loss=0.04474, over 973229.52 frames.], batch size: 15, lr: 4.68e-04 +2022-05-04 20:09:10,574 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 20:09:20,451 INFO [train.py:742] (7/8) Epoch 4, validation: loss=0.1124, simple_loss=0.1981, pruned_loss=0.01337, over 914524.00 frames. +2022-05-04 20:10:00,571 INFO [train.py:715] (7/8) Epoch 4, batch 6050, loss[loss=0.1474, simple_loss=0.2211, pruned_loss=0.03681, over 4813.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2271, pruned_loss=0.04384, over 972959.89 frames.], batch size: 24, lr: 4.68e-04 +2022-05-04 20:10:40,768 INFO [train.py:715] (7/8) Epoch 4, batch 6100, loss[loss=0.2125, simple_loss=0.2826, pruned_loss=0.07125, over 4778.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2269, pruned_loss=0.04422, over 972764.64 frames.], batch size: 18, lr: 4.68e-04 +2022-05-04 20:11:21,162 INFO [train.py:715] (7/8) Epoch 4, batch 6150, loss[loss=0.1423, simple_loss=0.2046, pruned_loss=0.03995, over 4835.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2265, pruned_loss=0.04428, over 972566.29 frames.], batch size: 12, lr: 4.68e-04 +2022-05-04 20:12:01,191 INFO [train.py:715] (7/8) Epoch 4, batch 6200, loss[loss=0.1567, simple_loss=0.2343, pruned_loss=0.03954, over 4988.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2277, pruned_loss=0.04436, over 972466.85 frames.], batch size: 14, lr: 4.68e-04 +2022-05-04 20:12:40,828 INFO [train.py:715] (7/8) Epoch 4, batch 6250, loss[loss=0.1424, simple_loss=0.2149, pruned_loss=0.03492, over 4903.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2272, pruned_loss=0.04433, over 972754.35 frames.], batch size: 19, lr: 4.68e-04 +2022-05-04 20:13:21,464 INFO [train.py:715] (7/8) Epoch 4, batch 6300, loss[loss=0.1353, simple_loss=0.1871, pruned_loss=0.04181, over 4634.00 frames.], tot_loss[loss=0.158, simple_loss=0.2273, pruned_loss=0.04433, over 972110.43 frames.], batch size: 13, lr: 4.67e-04 +2022-05-04 20:14:00,899 INFO [train.py:715] (7/8) Epoch 4, batch 6350, loss[loss=0.1485, simple_loss=0.2109, pruned_loss=0.04303, over 4746.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2266, pruned_loss=0.04399, over 972768.10 frames.], batch size: 16, lr: 4.67e-04 +2022-05-04 20:14:41,824 INFO [train.py:715] (7/8) Epoch 4, batch 6400, loss[loss=0.1456, simple_loss=0.2174, pruned_loss=0.03689, over 4956.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2268, pruned_loss=0.04378, over 972770.76 frames.], batch size: 15, lr: 4.67e-04 +2022-05-04 20:15:21,561 INFO [train.py:715] (7/8) Epoch 4, batch 6450, loss[loss=0.1619, simple_loss=0.2324, pruned_loss=0.04568, over 4887.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2271, pruned_loss=0.04425, over 973095.00 frames.], batch size: 22, lr: 4.67e-04 +2022-05-04 20:16:01,665 INFO [train.py:715] (7/8) Epoch 4, batch 6500, loss[loss=0.173, simple_loss=0.2495, pruned_loss=0.04822, over 4956.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2263, pruned_loss=0.04423, over 972975.12 frames.], batch size: 35, lr: 4.67e-04 +2022-05-04 20:16:41,330 INFO [train.py:715] (7/8) Epoch 4, batch 6550, loss[loss=0.1725, simple_loss=0.2363, pruned_loss=0.0543, over 4960.00 frames.], tot_loss[loss=0.1581, simple_loss=0.227, pruned_loss=0.04459, over 972394.70 frames.], batch size: 15, lr: 4.67e-04 +2022-05-04 20:17:20,647 INFO [train.py:715] (7/8) Epoch 4, batch 6600, loss[loss=0.1847, simple_loss=0.2371, pruned_loss=0.06618, over 4846.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2269, pruned_loss=0.04469, over 972682.76 frames.], batch size: 30, lr: 4.67e-04 +2022-05-04 20:18:01,340 INFO [train.py:715] (7/8) Epoch 4, batch 6650, loss[loss=0.1451, simple_loss=0.2186, pruned_loss=0.03585, over 4946.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2275, pruned_loss=0.04497, over 972017.71 frames.], batch size: 21, lr: 4.67e-04 +2022-05-04 20:18:40,887 INFO [train.py:715] (7/8) Epoch 4, batch 6700, loss[loss=0.1543, simple_loss=0.2199, pruned_loss=0.04436, over 4990.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2276, pruned_loss=0.04468, over 972438.54 frames.], batch size: 16, lr: 4.67e-04 +2022-05-04 20:19:21,002 INFO [train.py:715] (7/8) Epoch 4, batch 6750, loss[loss=0.1974, simple_loss=0.2639, pruned_loss=0.06544, over 4801.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2276, pruned_loss=0.04498, over 971457.46 frames.], batch size: 21, lr: 4.67e-04 +2022-05-04 20:20:00,766 INFO [train.py:715] (7/8) Epoch 4, batch 6800, loss[loss=0.14, simple_loss=0.2219, pruned_loss=0.02906, over 4908.00 frames.], tot_loss[loss=0.1592, simple_loss=0.228, pruned_loss=0.04525, over 970873.67 frames.], batch size: 18, lr: 4.67e-04 +2022-05-04 20:20:40,793 INFO [train.py:715] (7/8) Epoch 4, batch 6850, loss[loss=0.2043, simple_loss=0.2672, pruned_loss=0.0707, over 4786.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2274, pruned_loss=0.04462, over 972141.78 frames.], batch size: 18, lr: 4.67e-04 +2022-05-04 20:21:20,098 INFO [train.py:715] (7/8) Epoch 4, batch 6900, loss[loss=0.1443, simple_loss=0.2202, pruned_loss=0.03419, over 4739.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2268, pruned_loss=0.04446, over 972183.03 frames.], batch size: 16, lr: 4.66e-04 +2022-05-04 20:21:59,581 INFO [train.py:715] (7/8) Epoch 4, batch 6950, loss[loss=0.1381, simple_loss=0.2016, pruned_loss=0.03726, over 4886.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2263, pruned_loss=0.04408, over 972622.88 frames.], batch size: 22, lr: 4.66e-04 +2022-05-04 20:22:39,325 INFO [train.py:715] (7/8) Epoch 4, batch 7000, loss[loss=0.1445, simple_loss=0.2047, pruned_loss=0.04215, over 4988.00 frames.], tot_loss[loss=0.156, simple_loss=0.2251, pruned_loss=0.0434, over 972858.13 frames.], batch size: 31, lr: 4.66e-04 +2022-05-04 20:23:19,197 INFO [train.py:715] (7/8) Epoch 4, batch 7050, loss[loss=0.1535, simple_loss=0.2294, pruned_loss=0.03878, over 4795.00 frames.], tot_loss[loss=0.1569, simple_loss=0.226, pruned_loss=0.04391, over 972155.15 frames.], batch size: 17, lr: 4.66e-04 +2022-05-04 20:23:58,926 INFO [train.py:715] (7/8) Epoch 4, batch 7100, loss[loss=0.1624, simple_loss=0.2428, pruned_loss=0.04101, over 4855.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2267, pruned_loss=0.04406, over 972015.58 frames.], batch size: 20, lr: 4.66e-04 +2022-05-04 20:24:39,019 INFO [train.py:715] (7/8) Epoch 4, batch 7150, loss[loss=0.128, simple_loss=0.2057, pruned_loss=0.02513, over 4871.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2269, pruned_loss=0.04426, over 971578.95 frames.], batch size: 16, lr: 4.66e-04 +2022-05-04 20:25:18,947 INFO [train.py:715] (7/8) Epoch 4, batch 7200, loss[loss=0.1726, simple_loss=0.2413, pruned_loss=0.05197, over 4841.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2273, pruned_loss=0.04445, over 970654.97 frames.], batch size: 32, lr: 4.66e-04 +2022-05-04 20:25:59,101 INFO [train.py:715] (7/8) Epoch 4, batch 7250, loss[loss=0.146, simple_loss=0.2082, pruned_loss=0.04192, over 4812.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2276, pruned_loss=0.04445, over 970567.38 frames.], batch size: 12, lr: 4.66e-04 +2022-05-04 20:26:38,422 INFO [train.py:715] (7/8) Epoch 4, batch 7300, loss[loss=0.1509, simple_loss=0.2083, pruned_loss=0.0467, over 4965.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2278, pruned_loss=0.04473, over 971052.20 frames.], batch size: 14, lr: 4.66e-04 +2022-05-04 20:27:18,103 INFO [train.py:715] (7/8) Epoch 4, batch 7350, loss[loss=0.1428, simple_loss=0.2196, pruned_loss=0.03298, over 4917.00 frames.], tot_loss[loss=0.1588, simple_loss=0.228, pruned_loss=0.0448, over 971960.08 frames.], batch size: 29, lr: 4.66e-04 +2022-05-04 20:27:58,076 INFO [train.py:715] (7/8) Epoch 4, batch 7400, loss[loss=0.1568, simple_loss=0.2202, pruned_loss=0.04672, over 4712.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2278, pruned_loss=0.04464, over 971511.49 frames.], batch size: 15, lr: 4.66e-04 +2022-05-04 20:28:38,815 INFO [train.py:715] (7/8) Epoch 4, batch 7450, loss[loss=0.1353, simple_loss=0.2023, pruned_loss=0.03416, over 4814.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2276, pruned_loss=0.04439, over 971823.34 frames.], batch size: 12, lr: 4.66e-04 +2022-05-04 20:29:18,224 INFO [train.py:715] (7/8) Epoch 4, batch 7500, loss[loss=0.1447, simple_loss=0.2077, pruned_loss=0.04081, over 4842.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2281, pruned_loss=0.04465, over 972891.34 frames.], batch size: 30, lr: 4.66e-04 +2022-05-04 20:29:58,242 INFO [train.py:715] (7/8) Epoch 4, batch 7550, loss[loss=0.1691, simple_loss=0.245, pruned_loss=0.04658, over 4947.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2284, pruned_loss=0.04497, over 972951.30 frames.], batch size: 23, lr: 4.65e-04 +2022-05-04 20:30:38,895 INFO [train.py:715] (7/8) Epoch 4, batch 7600, loss[loss=0.1858, simple_loss=0.2469, pruned_loss=0.06233, over 4975.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2287, pruned_loss=0.0451, over 972039.38 frames.], batch size: 25, lr: 4.65e-04 +2022-05-04 20:31:18,407 INFO [train.py:715] (7/8) Epoch 4, batch 7650, loss[loss=0.118, simple_loss=0.1903, pruned_loss=0.02281, over 4944.00 frames.], tot_loss[loss=0.1587, simple_loss=0.228, pruned_loss=0.04466, over 972373.76 frames.], batch size: 21, lr: 4.65e-04 +2022-05-04 20:31:58,068 INFO [train.py:715] (7/8) Epoch 4, batch 7700, loss[loss=0.1448, simple_loss=0.22, pruned_loss=0.03483, over 4989.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2272, pruned_loss=0.0443, over 972427.30 frames.], batch size: 26, lr: 4.65e-04 +2022-05-04 20:32:38,171 INFO [train.py:715] (7/8) Epoch 4, batch 7750, loss[loss=0.1277, simple_loss=0.186, pruned_loss=0.03472, over 4796.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2269, pruned_loss=0.04382, over 972521.03 frames.], batch size: 12, lr: 4.65e-04 +2022-05-04 20:33:18,305 INFO [train.py:715] (7/8) Epoch 4, batch 7800, loss[loss=0.1271, simple_loss=0.2003, pruned_loss=0.027, over 4981.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2263, pruned_loss=0.04364, over 972747.97 frames.], batch size: 28, lr: 4.65e-04 +2022-05-04 20:33:57,311 INFO [train.py:715] (7/8) Epoch 4, batch 7850, loss[loss=0.1428, simple_loss=0.2141, pruned_loss=0.03575, over 4920.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2262, pruned_loss=0.04376, over 971951.07 frames.], batch size: 23, lr: 4.65e-04 +2022-05-04 20:34:36,904 INFO [train.py:715] (7/8) Epoch 4, batch 7900, loss[loss=0.1289, simple_loss=0.2023, pruned_loss=0.0278, over 4894.00 frames.], tot_loss[loss=0.157, simple_loss=0.2265, pruned_loss=0.04374, over 971842.63 frames.], batch size: 39, lr: 4.65e-04 +2022-05-04 20:35:16,764 INFO [train.py:715] (7/8) Epoch 4, batch 7950, loss[loss=0.1621, simple_loss=0.2385, pruned_loss=0.04281, over 4778.00 frames.], tot_loss[loss=0.1576, simple_loss=0.227, pruned_loss=0.04413, over 971954.52 frames.], batch size: 18, lr: 4.65e-04 +2022-05-04 20:35:56,348 INFO [train.py:715] (7/8) Epoch 4, batch 8000, loss[loss=0.1399, simple_loss=0.2095, pruned_loss=0.03512, over 4787.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2269, pruned_loss=0.04449, over 971922.14 frames.], batch size: 21, lr: 4.65e-04 +2022-05-04 20:36:36,311 INFO [train.py:715] (7/8) Epoch 4, batch 8050, loss[loss=0.1659, simple_loss=0.236, pruned_loss=0.04789, over 4943.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2287, pruned_loss=0.0453, over 972723.27 frames.], batch size: 23, lr: 4.65e-04 +2022-05-04 20:37:16,270 INFO [train.py:715] (7/8) Epoch 4, batch 8100, loss[loss=0.2114, simple_loss=0.27, pruned_loss=0.07643, over 4835.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2294, pruned_loss=0.04556, over 972235.07 frames.], batch size: 15, lr: 4.65e-04 +2022-05-04 20:37:56,510 INFO [train.py:715] (7/8) Epoch 4, batch 8150, loss[loss=0.185, simple_loss=0.2436, pruned_loss=0.06322, over 4918.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2288, pruned_loss=0.04532, over 971919.85 frames.], batch size: 39, lr: 4.65e-04 +2022-05-04 20:38:35,990 INFO [train.py:715] (7/8) Epoch 4, batch 8200, loss[loss=0.1207, simple_loss=0.1864, pruned_loss=0.02749, over 4988.00 frames.], tot_loss[loss=0.159, simple_loss=0.2285, pruned_loss=0.04471, over 972243.66 frames.], batch size: 25, lr: 4.64e-04 +2022-05-04 20:39:15,727 INFO [train.py:715] (7/8) Epoch 4, batch 8250, loss[loss=0.1407, simple_loss=0.21, pruned_loss=0.03567, over 4795.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2272, pruned_loss=0.04395, over 971821.95 frames.], batch size: 17, lr: 4.64e-04 +2022-05-04 20:39:55,879 INFO [train.py:715] (7/8) Epoch 4, batch 8300, loss[loss=0.1728, simple_loss=0.2413, pruned_loss=0.05222, over 4888.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2271, pruned_loss=0.04399, over 971630.11 frames.], batch size: 38, lr: 4.64e-04 +2022-05-04 20:40:35,312 INFO [train.py:715] (7/8) Epoch 4, batch 8350, loss[loss=0.1466, simple_loss=0.2207, pruned_loss=0.03618, over 4823.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2269, pruned_loss=0.04395, over 971796.39 frames.], batch size: 27, lr: 4.64e-04 +2022-05-04 20:41:15,404 INFO [train.py:715] (7/8) Epoch 4, batch 8400, loss[loss=0.1697, simple_loss=0.2491, pruned_loss=0.04514, over 4872.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2277, pruned_loss=0.04421, over 972282.02 frames.], batch size: 20, lr: 4.64e-04 +2022-05-04 20:41:55,746 INFO [train.py:715] (7/8) Epoch 4, batch 8450, loss[loss=0.1643, simple_loss=0.2349, pruned_loss=0.04682, over 4787.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2274, pruned_loss=0.04442, over 972483.79 frames.], batch size: 18, lr: 4.64e-04 +2022-05-04 20:42:35,855 INFO [train.py:715] (7/8) Epoch 4, batch 8500, loss[loss=0.1397, simple_loss=0.214, pruned_loss=0.03267, over 4947.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2276, pruned_loss=0.04493, over 972473.12 frames.], batch size: 21, lr: 4.64e-04 +2022-05-04 20:43:15,264 INFO [train.py:715] (7/8) Epoch 4, batch 8550, loss[loss=0.1183, simple_loss=0.1841, pruned_loss=0.02628, over 4817.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2268, pruned_loss=0.04477, over 971782.10 frames.], batch size: 13, lr: 4.64e-04 +2022-05-04 20:43:55,080 INFO [train.py:715] (7/8) Epoch 4, batch 8600, loss[loss=0.1675, simple_loss=0.2432, pruned_loss=0.04594, over 4797.00 frames.], tot_loss[loss=0.1596, simple_loss=0.228, pruned_loss=0.04557, over 972433.67 frames.], batch size: 17, lr: 4.64e-04 +2022-05-04 20:44:35,242 INFO [train.py:715] (7/8) Epoch 4, batch 8650, loss[loss=0.178, simple_loss=0.2478, pruned_loss=0.05408, over 4944.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2274, pruned_loss=0.04491, over 972797.84 frames.], batch size: 39, lr: 4.64e-04 +2022-05-04 20:45:14,872 INFO [train.py:715] (7/8) Epoch 4, batch 8700, loss[loss=0.1278, simple_loss=0.2066, pruned_loss=0.02451, over 4811.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2268, pruned_loss=0.04437, over 972499.37 frames.], batch size: 25, lr: 4.64e-04 +2022-05-04 20:45:55,168 INFO [train.py:715] (7/8) Epoch 4, batch 8750, loss[loss=0.1607, simple_loss=0.2305, pruned_loss=0.04546, over 4877.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2262, pruned_loss=0.04359, over 972718.90 frames.], batch size: 16, lr: 4.64e-04 +2022-05-04 20:46:35,399 INFO [train.py:715] (7/8) Epoch 4, batch 8800, loss[loss=0.1385, simple_loss=0.2026, pruned_loss=0.03723, over 4879.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2261, pruned_loss=0.0434, over 972229.19 frames.], batch size: 16, lr: 4.63e-04 +2022-05-04 20:47:15,431 INFO [train.py:715] (7/8) Epoch 4, batch 8850, loss[loss=0.139, simple_loss=0.2145, pruned_loss=0.03181, over 4813.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2267, pruned_loss=0.04379, over 972633.42 frames.], batch size: 26, lr: 4.63e-04 +2022-05-04 20:47:55,128 INFO [train.py:715] (7/8) Epoch 4, batch 8900, loss[loss=0.1477, simple_loss=0.2217, pruned_loss=0.0368, over 4861.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2258, pruned_loss=0.043, over 973072.67 frames.], batch size: 20, lr: 4.63e-04 +2022-05-04 20:48:34,762 INFO [train.py:715] (7/8) Epoch 4, batch 8950, loss[loss=0.1581, simple_loss=0.2274, pruned_loss=0.04436, over 4942.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2263, pruned_loss=0.04343, over 973176.82 frames.], batch size: 29, lr: 4.63e-04 +2022-05-04 20:49:15,025 INFO [train.py:715] (7/8) Epoch 4, batch 9000, loss[loss=0.1692, simple_loss=0.2345, pruned_loss=0.05195, over 4925.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2257, pruned_loss=0.04333, over 973098.36 frames.], batch size: 29, lr: 4.63e-04 +2022-05-04 20:49:15,025 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 20:49:24,977 INFO [train.py:742] (7/8) Epoch 4, validation: loss=0.1123, simple_loss=0.1979, pruned_loss=0.01336, over 914524.00 frames. +2022-05-04 20:50:05,305 INFO [train.py:715] (7/8) Epoch 4, batch 9050, loss[loss=0.1601, simple_loss=0.212, pruned_loss=0.05412, over 4985.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2254, pruned_loss=0.04318, over 973531.71 frames.], batch size: 14, lr: 4.63e-04 +2022-05-04 20:50:45,314 INFO [train.py:715] (7/8) Epoch 4, batch 9100, loss[loss=0.1584, simple_loss=0.2289, pruned_loss=0.04391, over 4760.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2262, pruned_loss=0.04362, over 973778.91 frames.], batch size: 19, lr: 4.63e-04 +2022-05-04 20:51:24,713 INFO [train.py:715] (7/8) Epoch 4, batch 9150, loss[loss=0.1468, simple_loss=0.2214, pruned_loss=0.03616, over 4815.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2264, pruned_loss=0.0436, over 972866.18 frames.], batch size: 13, lr: 4.63e-04 +2022-05-04 20:52:04,885 INFO [train.py:715] (7/8) Epoch 4, batch 9200, loss[loss=0.1354, simple_loss=0.2007, pruned_loss=0.03511, over 4940.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2261, pruned_loss=0.04356, over 972169.31 frames.], batch size: 21, lr: 4.63e-04 +2022-05-04 20:52:45,290 INFO [train.py:715] (7/8) Epoch 4, batch 9250, loss[loss=0.1725, simple_loss=0.2379, pruned_loss=0.05355, over 4843.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2272, pruned_loss=0.04391, over 971616.56 frames.], batch size: 30, lr: 4.63e-04 +2022-05-04 20:53:24,536 INFO [train.py:715] (7/8) Epoch 4, batch 9300, loss[loss=0.1592, simple_loss=0.2382, pruned_loss=0.04011, over 4915.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2268, pruned_loss=0.04381, over 972181.01 frames.], batch size: 17, lr: 4.63e-04 +2022-05-04 20:54:04,530 INFO [train.py:715] (7/8) Epoch 4, batch 9350, loss[loss=0.1206, simple_loss=0.1917, pruned_loss=0.02476, over 4864.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2271, pruned_loss=0.04403, over 972646.72 frames.], batch size: 20, lr: 4.63e-04 +2022-05-04 20:54:44,473 INFO [train.py:715] (7/8) Epoch 4, batch 9400, loss[loss=0.1901, simple_loss=0.2453, pruned_loss=0.06744, over 4741.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2265, pruned_loss=0.04365, over 972735.68 frames.], batch size: 16, lr: 4.63e-04 +2022-05-04 20:55:24,005 INFO [train.py:715] (7/8) Epoch 4, batch 9450, loss[loss=0.1579, simple_loss=0.2357, pruned_loss=0.04008, over 4952.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2261, pruned_loss=0.04356, over 971760.90 frames.], batch size: 29, lr: 4.62e-04 +2022-05-04 20:56:04,095 INFO [train.py:715] (7/8) Epoch 4, batch 9500, loss[loss=0.1981, simple_loss=0.2624, pruned_loss=0.06686, over 4930.00 frames.], tot_loss[loss=0.157, simple_loss=0.2262, pruned_loss=0.04388, over 972107.00 frames.], batch size: 18, lr: 4.62e-04 +2022-05-04 20:56:44,151 INFO [train.py:715] (7/8) Epoch 4, batch 9550, loss[loss=0.142, simple_loss=0.2299, pruned_loss=0.02708, over 4978.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2261, pruned_loss=0.04374, over 972038.88 frames.], batch size: 14, lr: 4.62e-04 +2022-05-04 20:57:24,671 INFO [train.py:715] (7/8) Epoch 4, batch 9600, loss[loss=0.1485, simple_loss=0.226, pruned_loss=0.0355, over 4858.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2265, pruned_loss=0.04362, over 972647.70 frames.], batch size: 38, lr: 4.62e-04 +2022-05-04 20:58:04,095 INFO [train.py:715] (7/8) Epoch 4, batch 9650, loss[loss=0.184, simple_loss=0.2452, pruned_loss=0.06138, over 4923.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2273, pruned_loss=0.04398, over 972833.81 frames.], batch size: 39, lr: 4.62e-04 +2022-05-04 20:58:44,657 INFO [train.py:715] (7/8) Epoch 4, batch 9700, loss[loss=0.1659, simple_loss=0.2293, pruned_loss=0.0513, over 4798.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2275, pruned_loss=0.04457, over 972479.91 frames.], batch size: 14, lr: 4.62e-04 +2022-05-04 20:59:25,199 INFO [train.py:715] (7/8) Epoch 4, batch 9750, loss[loss=0.1661, simple_loss=0.2326, pruned_loss=0.04982, over 4909.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2274, pruned_loss=0.04491, over 972288.76 frames.], batch size: 18, lr: 4.62e-04 +2022-05-04 21:00:04,725 INFO [train.py:715] (7/8) Epoch 4, batch 9800, loss[loss=0.1511, simple_loss=0.2216, pruned_loss=0.04032, over 4966.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2276, pruned_loss=0.04488, over 972917.55 frames.], batch size: 21, lr: 4.62e-04 +2022-05-04 21:00:43,864 INFO [train.py:715] (7/8) Epoch 4, batch 9850, loss[loss=0.1759, simple_loss=0.2447, pruned_loss=0.05354, over 4751.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2273, pruned_loss=0.04455, over 972957.24 frames.], batch size: 19, lr: 4.62e-04 +2022-05-04 21:01:23,904 INFO [train.py:715] (7/8) Epoch 4, batch 9900, loss[loss=0.1673, simple_loss=0.2282, pruned_loss=0.05316, over 4846.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2277, pruned_loss=0.04469, over 973752.73 frames.], batch size: 32, lr: 4.62e-04 +2022-05-04 21:02:03,379 INFO [train.py:715] (7/8) Epoch 4, batch 9950, loss[loss=0.1436, simple_loss=0.2206, pruned_loss=0.03328, over 4936.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2276, pruned_loss=0.04531, over 973778.55 frames.], batch size: 23, lr: 4.62e-04 +2022-05-04 21:02:42,753 INFO [train.py:715] (7/8) Epoch 4, batch 10000, loss[loss=0.2013, simple_loss=0.2516, pruned_loss=0.0755, over 4962.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2276, pruned_loss=0.04507, over 973440.71 frames.], batch size: 35, lr: 4.62e-04 +2022-05-04 21:03:22,517 INFO [train.py:715] (7/8) Epoch 4, batch 10050, loss[loss=0.2144, simple_loss=0.2826, pruned_loss=0.07312, over 4895.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2282, pruned_loss=0.04508, over 973585.01 frames.], batch size: 16, lr: 4.62e-04 +2022-05-04 21:04:02,314 INFO [train.py:715] (7/8) Epoch 4, batch 10100, loss[loss=0.1749, simple_loss=0.2382, pruned_loss=0.0558, over 4840.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2278, pruned_loss=0.0446, over 972766.58 frames.], batch size: 30, lr: 4.61e-04 +2022-05-04 21:04:41,555 INFO [train.py:715] (7/8) Epoch 4, batch 10150, loss[loss=0.164, simple_loss=0.2309, pruned_loss=0.04859, over 4926.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2278, pruned_loss=0.04487, over 972487.17 frames.], batch size: 21, lr: 4.61e-04 +2022-05-04 21:05:21,475 INFO [train.py:715] (7/8) Epoch 4, batch 10200, loss[loss=0.1572, simple_loss=0.2234, pruned_loss=0.04545, over 4690.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2267, pruned_loss=0.04432, over 972165.35 frames.], batch size: 15, lr: 4.61e-04 +2022-05-04 21:06:02,061 INFO [train.py:715] (7/8) Epoch 4, batch 10250, loss[loss=0.1514, simple_loss=0.2159, pruned_loss=0.04343, over 4776.00 frames.], tot_loss[loss=0.1566, simple_loss=0.226, pruned_loss=0.04362, over 971963.86 frames.], batch size: 14, lr: 4.61e-04 +2022-05-04 21:06:41,845 INFO [train.py:715] (7/8) Epoch 4, batch 10300, loss[loss=0.1392, simple_loss=0.2037, pruned_loss=0.03733, over 4806.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2262, pruned_loss=0.04336, over 971209.78 frames.], batch size: 13, lr: 4.61e-04 +2022-05-04 21:07:21,503 INFO [train.py:715] (7/8) Epoch 4, batch 10350, loss[loss=0.1658, simple_loss=0.2404, pruned_loss=0.04562, over 4848.00 frames.], tot_loss[loss=0.156, simple_loss=0.2259, pruned_loss=0.0431, over 971017.52 frames.], batch size: 32, lr: 4.61e-04 +2022-05-04 21:08:01,706 INFO [train.py:715] (7/8) Epoch 4, batch 10400, loss[loss=0.1447, simple_loss=0.2156, pruned_loss=0.0369, over 4937.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2261, pruned_loss=0.04378, over 971808.48 frames.], batch size: 35, lr: 4.61e-04 +2022-05-04 21:08:42,282 INFO [train.py:715] (7/8) Epoch 4, batch 10450, loss[loss=0.1525, simple_loss=0.2357, pruned_loss=0.03466, over 4917.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2269, pruned_loss=0.04406, over 972404.02 frames.], batch size: 18, lr: 4.61e-04 +2022-05-04 21:09:21,888 INFO [train.py:715] (7/8) Epoch 4, batch 10500, loss[loss=0.1729, simple_loss=0.2445, pruned_loss=0.05069, over 4796.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2268, pruned_loss=0.04384, over 972827.90 frames.], batch size: 24, lr: 4.61e-04 +2022-05-04 21:10:02,144 INFO [train.py:715] (7/8) Epoch 4, batch 10550, loss[loss=0.1448, simple_loss=0.2243, pruned_loss=0.03267, over 4872.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2267, pruned_loss=0.04377, over 973192.29 frames.], batch size: 16, lr: 4.61e-04 +2022-05-04 21:10:42,494 INFO [train.py:715] (7/8) Epoch 4, batch 10600, loss[loss=0.1428, simple_loss=0.2247, pruned_loss=0.03046, over 4909.00 frames.], tot_loss[loss=0.1563, simple_loss=0.226, pruned_loss=0.04327, over 972872.51 frames.], batch size: 29, lr: 4.61e-04 +2022-05-04 21:11:22,292 INFO [train.py:715] (7/8) Epoch 4, batch 10650, loss[loss=0.135, simple_loss=0.2005, pruned_loss=0.03474, over 4773.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2256, pruned_loss=0.04351, over 972150.70 frames.], batch size: 14, lr: 4.61e-04 +2022-05-04 21:12:02,342 INFO [train.py:715] (7/8) Epoch 4, batch 10700, loss[loss=0.1898, simple_loss=0.2487, pruned_loss=0.06541, over 4815.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2272, pruned_loss=0.04399, over 971923.69 frames.], batch size: 25, lr: 4.61e-04 +2022-05-04 21:12:42,036 INFO [train.py:715] (7/8) Epoch 4, batch 10750, loss[loss=0.1841, simple_loss=0.2564, pruned_loss=0.05593, over 4900.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2277, pruned_loss=0.04442, over 971468.40 frames.], batch size: 22, lr: 4.60e-04 +2022-05-04 21:13:22,456 INFO [train.py:715] (7/8) Epoch 4, batch 10800, loss[loss=0.1678, simple_loss=0.2417, pruned_loss=0.04696, over 4821.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2288, pruned_loss=0.04531, over 971653.11 frames.], batch size: 25, lr: 4.60e-04 +2022-05-04 21:14:01,773 INFO [train.py:715] (7/8) Epoch 4, batch 10850, loss[loss=0.1721, simple_loss=0.2312, pruned_loss=0.05646, over 4773.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2278, pruned_loss=0.04498, over 972361.24 frames.], batch size: 17, lr: 4.60e-04 +2022-05-04 21:14:41,703 INFO [train.py:715] (7/8) Epoch 4, batch 10900, loss[loss=0.1579, simple_loss=0.2309, pruned_loss=0.04247, over 4759.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2274, pruned_loss=0.04475, over 972003.23 frames.], batch size: 19, lr: 4.60e-04 +2022-05-04 21:15:22,021 INFO [train.py:715] (7/8) Epoch 4, batch 10950, loss[loss=0.1435, simple_loss=0.2317, pruned_loss=0.02763, over 4973.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2273, pruned_loss=0.04427, over 972775.87 frames.], batch size: 28, lr: 4.60e-04 +2022-05-04 21:16:01,655 INFO [train.py:715] (7/8) Epoch 4, batch 11000, loss[loss=0.1497, simple_loss=0.2099, pruned_loss=0.04477, over 4852.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2274, pruned_loss=0.0444, over 972021.88 frames.], batch size: 15, lr: 4.60e-04 +2022-05-04 21:16:44,088 INFO [train.py:715] (7/8) Epoch 4, batch 11050, loss[loss=0.1458, simple_loss=0.2218, pruned_loss=0.03491, over 4752.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2272, pruned_loss=0.0439, over 971399.03 frames.], batch size: 19, lr: 4.60e-04 +2022-05-04 21:17:24,569 INFO [train.py:715] (7/8) Epoch 4, batch 11100, loss[loss=0.1438, simple_loss=0.2101, pruned_loss=0.03873, over 4749.00 frames.], tot_loss[loss=0.1566, simple_loss=0.226, pruned_loss=0.04356, over 970942.16 frames.], batch size: 16, lr: 4.60e-04 +2022-05-04 21:18:07,374 INFO [train.py:715] (7/8) Epoch 4, batch 11150, loss[loss=0.1425, simple_loss=0.2101, pruned_loss=0.03747, over 4973.00 frames.], tot_loss[loss=0.1563, simple_loss=0.226, pruned_loss=0.04325, over 971281.94 frames.], batch size: 15, lr: 4.60e-04 +2022-05-04 21:18:49,591 INFO [train.py:715] (7/8) Epoch 4, batch 11200, loss[loss=0.1553, simple_loss=0.2167, pruned_loss=0.04692, over 4777.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2261, pruned_loss=0.04319, over 970854.75 frames.], batch size: 18, lr: 4.60e-04 +2022-05-04 21:19:29,994 INFO [train.py:715] (7/8) Epoch 4, batch 11250, loss[loss=0.1423, simple_loss=0.2064, pruned_loss=0.0391, over 4865.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2268, pruned_loss=0.04332, over 971393.82 frames.], batch size: 30, lr: 4.60e-04 +2022-05-04 21:20:12,912 INFO [train.py:715] (7/8) Epoch 4, batch 11300, loss[loss=0.1852, simple_loss=0.2522, pruned_loss=0.05914, over 4643.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2274, pruned_loss=0.04391, over 971702.09 frames.], batch size: 13, lr: 4.60e-04 +2022-05-04 21:20:52,371 INFO [train.py:715] (7/8) Epoch 4, batch 11350, loss[loss=0.2142, simple_loss=0.2665, pruned_loss=0.08094, over 4778.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2268, pruned_loss=0.04402, over 972383.03 frames.], batch size: 18, lr: 4.60e-04 +2022-05-04 21:21:31,878 INFO [train.py:715] (7/8) Epoch 4, batch 11400, loss[loss=0.1469, simple_loss=0.2214, pruned_loss=0.0362, over 4926.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2264, pruned_loss=0.04345, over 972674.46 frames.], batch size: 29, lr: 4.59e-04 +2022-05-04 21:22:11,705 INFO [train.py:715] (7/8) Epoch 4, batch 11450, loss[loss=0.1594, simple_loss=0.2181, pruned_loss=0.05039, over 4833.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2274, pruned_loss=0.04437, over 972676.99 frames.], batch size: 13, lr: 4.59e-04 +2022-05-04 21:22:51,367 INFO [train.py:715] (7/8) Epoch 4, batch 11500, loss[loss=0.1651, simple_loss=0.2261, pruned_loss=0.05203, over 4936.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2278, pruned_loss=0.04405, over 973642.48 frames.], batch size: 21, lr: 4.59e-04 +2022-05-04 21:23:30,615 INFO [train.py:715] (7/8) Epoch 4, batch 11550, loss[loss=0.1711, simple_loss=0.2408, pruned_loss=0.05067, over 4949.00 frames.], tot_loss[loss=0.158, simple_loss=0.2276, pruned_loss=0.04423, over 973832.90 frames.], batch size: 29, lr: 4.59e-04 +2022-05-04 21:24:09,876 INFO [train.py:715] (7/8) Epoch 4, batch 11600, loss[loss=0.1466, simple_loss=0.2199, pruned_loss=0.03664, over 4927.00 frames.], tot_loss[loss=0.1575, simple_loss=0.227, pruned_loss=0.04402, over 973560.71 frames.], batch size: 29, lr: 4.59e-04 +2022-05-04 21:24:50,388 INFO [train.py:715] (7/8) Epoch 4, batch 11650, loss[loss=0.1649, simple_loss=0.2306, pruned_loss=0.04963, over 4975.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2261, pruned_loss=0.04324, over 973686.61 frames.], batch size: 24, lr: 4.59e-04 +2022-05-04 21:25:30,285 INFO [train.py:715] (7/8) Epoch 4, batch 11700, loss[loss=0.1495, simple_loss=0.2165, pruned_loss=0.04119, over 4843.00 frames.], tot_loss[loss=0.156, simple_loss=0.2258, pruned_loss=0.04311, over 974116.54 frames.], batch size: 32, lr: 4.59e-04 +2022-05-04 21:26:10,259 INFO [train.py:715] (7/8) Epoch 4, batch 11750, loss[loss=0.156, simple_loss=0.2346, pruned_loss=0.03866, over 4939.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2256, pruned_loss=0.0431, over 972679.00 frames.], batch size: 21, lr: 4.59e-04 +2022-05-04 21:26:50,008 INFO [train.py:715] (7/8) Epoch 4, batch 11800, loss[loss=0.1575, simple_loss=0.2254, pruned_loss=0.04484, over 4785.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2257, pruned_loss=0.04345, over 972485.97 frames.], batch size: 17, lr: 4.59e-04 +2022-05-04 21:27:30,278 INFO [train.py:715] (7/8) Epoch 4, batch 11850, loss[loss=0.1968, simple_loss=0.2621, pruned_loss=0.06571, over 4831.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2266, pruned_loss=0.04363, over 972332.09 frames.], batch size: 15, lr: 4.59e-04 +2022-05-04 21:28:09,533 INFO [train.py:715] (7/8) Epoch 4, batch 11900, loss[loss=0.1686, simple_loss=0.2331, pruned_loss=0.05204, over 4884.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2262, pruned_loss=0.0436, over 972502.05 frames.], batch size: 22, lr: 4.59e-04 +2022-05-04 21:28:49,309 INFO [train.py:715] (7/8) Epoch 4, batch 11950, loss[loss=0.1318, simple_loss=0.2038, pruned_loss=0.0299, over 4918.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2261, pruned_loss=0.04375, over 972276.71 frames.], batch size: 29, lr: 4.59e-04 +2022-05-04 21:29:29,767 INFO [train.py:715] (7/8) Epoch 4, batch 12000, loss[loss=0.1493, simple_loss=0.2179, pruned_loss=0.04031, over 4867.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2268, pruned_loss=0.04436, over 971868.18 frames.], batch size: 20, lr: 4.59e-04 +2022-05-04 21:29:29,767 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 21:29:49,526 INFO [train.py:742] (7/8) Epoch 4, validation: loss=0.1122, simple_loss=0.198, pruned_loss=0.01324, over 914524.00 frames. +2022-05-04 21:30:30,068 INFO [train.py:715] (7/8) Epoch 4, batch 12050, loss[loss=0.1637, simple_loss=0.233, pruned_loss=0.0472, over 4938.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2265, pruned_loss=0.04417, over 972199.26 frames.], batch size: 29, lr: 4.58e-04 +2022-05-04 21:31:09,881 INFO [train.py:715] (7/8) Epoch 4, batch 12100, loss[loss=0.1718, simple_loss=0.2257, pruned_loss=0.0589, over 4862.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2272, pruned_loss=0.04446, over 972269.53 frames.], batch size: 16, lr: 4.58e-04 +2022-05-04 21:31:50,059 INFO [train.py:715] (7/8) Epoch 4, batch 12150, loss[loss=0.1681, simple_loss=0.2379, pruned_loss=0.04918, over 4931.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2271, pruned_loss=0.04483, over 972719.39 frames.], batch size: 21, lr: 4.58e-04 +2022-05-04 21:32:30,107 INFO [train.py:715] (7/8) Epoch 4, batch 12200, loss[loss=0.1222, simple_loss=0.1976, pruned_loss=0.02335, over 4799.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2265, pruned_loss=0.04416, over 971669.32 frames.], batch size: 21, lr: 4.58e-04 +2022-05-04 21:33:10,435 INFO [train.py:715] (7/8) Epoch 4, batch 12250, loss[loss=0.1404, simple_loss=0.2053, pruned_loss=0.03777, over 4860.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2265, pruned_loss=0.04367, over 971563.89 frames.], batch size: 13, lr: 4.58e-04 +2022-05-04 21:33:49,417 INFO [train.py:715] (7/8) Epoch 4, batch 12300, loss[loss=0.1672, simple_loss=0.2356, pruned_loss=0.0494, over 4782.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2264, pruned_loss=0.04343, over 971550.62 frames.], batch size: 14, lr: 4.58e-04 +2022-05-04 21:34:29,436 INFO [train.py:715] (7/8) Epoch 4, batch 12350, loss[loss=0.1307, simple_loss=0.2095, pruned_loss=0.02593, over 4814.00 frames.], tot_loss[loss=0.156, simple_loss=0.2259, pruned_loss=0.04305, over 971568.05 frames.], batch size: 25, lr: 4.58e-04 +2022-05-04 21:35:10,025 INFO [train.py:715] (7/8) Epoch 4, batch 12400, loss[loss=0.1492, simple_loss=0.2207, pruned_loss=0.03888, over 4760.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2265, pruned_loss=0.04383, over 971912.89 frames.], batch size: 19, lr: 4.58e-04 +2022-05-04 21:35:49,237 INFO [train.py:715] (7/8) Epoch 4, batch 12450, loss[loss=0.1574, simple_loss=0.2303, pruned_loss=0.04228, over 4985.00 frames.], tot_loss[loss=0.1569, simple_loss=0.226, pruned_loss=0.04392, over 972768.93 frames.], batch size: 28, lr: 4.58e-04 +2022-05-04 21:36:29,201 INFO [train.py:715] (7/8) Epoch 4, batch 12500, loss[loss=0.174, simple_loss=0.2454, pruned_loss=0.05132, over 4765.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2266, pruned_loss=0.04412, over 972710.54 frames.], batch size: 14, lr: 4.58e-04 +2022-05-04 21:37:08,759 INFO [train.py:715] (7/8) Epoch 4, batch 12550, loss[loss=0.1859, simple_loss=0.2477, pruned_loss=0.06205, over 4827.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2271, pruned_loss=0.04435, over 973069.22 frames.], batch size: 15, lr: 4.58e-04 +2022-05-04 21:37:48,546 INFO [train.py:715] (7/8) Epoch 4, batch 12600, loss[loss=0.1575, simple_loss=0.2301, pruned_loss=0.04244, over 4856.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2276, pruned_loss=0.04435, over 972544.17 frames.], batch size: 20, lr: 4.58e-04 +2022-05-04 21:38:27,436 INFO [train.py:715] (7/8) Epoch 4, batch 12650, loss[loss=0.1404, simple_loss=0.2176, pruned_loss=0.03158, over 4826.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2265, pruned_loss=0.04395, over 971249.27 frames.], batch size: 12, lr: 4.58e-04 +2022-05-04 21:39:07,276 INFO [train.py:715] (7/8) Epoch 4, batch 12700, loss[loss=0.1423, simple_loss=0.2234, pruned_loss=0.03057, over 4749.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2256, pruned_loss=0.04354, over 971283.33 frames.], batch size: 19, lr: 4.58e-04 +2022-05-04 21:39:47,347 INFO [train.py:715] (7/8) Epoch 4, batch 12750, loss[loss=0.1356, simple_loss=0.2019, pruned_loss=0.03464, over 4909.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2269, pruned_loss=0.04419, over 971917.90 frames.], batch size: 19, lr: 4.57e-04 +2022-05-04 21:40:29,600 INFO [train.py:715] (7/8) Epoch 4, batch 12800, loss[loss=0.1634, simple_loss=0.2308, pruned_loss=0.04795, over 4948.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2268, pruned_loss=0.04442, over 971652.10 frames.], batch size: 21, lr: 4.57e-04 +2022-05-04 21:41:08,995 INFO [train.py:715] (7/8) Epoch 4, batch 12850, loss[loss=0.1594, simple_loss=0.2421, pruned_loss=0.03837, over 4704.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2255, pruned_loss=0.04396, over 971696.34 frames.], batch size: 15, lr: 4.57e-04 +2022-05-04 21:41:49,126 INFO [train.py:715] (7/8) Epoch 4, batch 12900, loss[loss=0.1539, simple_loss=0.2254, pruned_loss=0.04119, over 4785.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2258, pruned_loss=0.04393, over 972175.81 frames.], batch size: 18, lr: 4.57e-04 +2022-05-04 21:42:29,050 INFO [train.py:715] (7/8) Epoch 4, batch 12950, loss[loss=0.1952, simple_loss=0.2568, pruned_loss=0.06681, over 4880.00 frames.], tot_loss[loss=0.157, simple_loss=0.2259, pruned_loss=0.04403, over 971779.75 frames.], batch size: 39, lr: 4.57e-04 +2022-05-04 21:43:07,918 INFO [train.py:715] (7/8) Epoch 4, batch 13000, loss[loss=0.1323, simple_loss=0.2032, pruned_loss=0.03071, over 4854.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2254, pruned_loss=0.04379, over 971778.99 frames.], batch size: 13, lr: 4.57e-04 +2022-05-04 21:43:47,508 INFO [train.py:715] (7/8) Epoch 4, batch 13050, loss[loss=0.145, simple_loss=0.2179, pruned_loss=0.03604, over 4783.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2267, pruned_loss=0.04437, over 971559.70 frames.], batch size: 18, lr: 4.57e-04 +2022-05-04 21:44:27,465 INFO [train.py:715] (7/8) Epoch 4, batch 13100, loss[loss=0.1401, simple_loss=0.2121, pruned_loss=0.03406, over 4931.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2276, pruned_loss=0.04441, over 971878.29 frames.], batch size: 21, lr: 4.57e-04 +2022-05-04 21:45:06,506 INFO [train.py:715] (7/8) Epoch 4, batch 13150, loss[loss=0.1629, simple_loss=0.2347, pruned_loss=0.04549, over 4830.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2278, pruned_loss=0.04427, over 971950.25 frames.], batch size: 15, lr: 4.57e-04 +2022-05-04 21:45:46,247 INFO [train.py:715] (7/8) Epoch 4, batch 13200, loss[loss=0.1954, simple_loss=0.2676, pruned_loss=0.06157, over 4817.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2269, pruned_loss=0.04438, over 972320.54 frames.], batch size: 15, lr: 4.57e-04 +2022-05-04 21:46:26,567 INFO [train.py:715] (7/8) Epoch 4, batch 13250, loss[loss=0.1721, simple_loss=0.2343, pruned_loss=0.05496, over 4935.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2273, pruned_loss=0.04488, over 973323.16 frames.], batch size: 18, lr: 4.57e-04 +2022-05-04 21:47:06,175 INFO [train.py:715] (7/8) Epoch 4, batch 13300, loss[loss=0.1674, simple_loss=0.233, pruned_loss=0.05093, over 4774.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2263, pruned_loss=0.0444, over 973653.53 frames.], batch size: 17, lr: 4.57e-04 +2022-05-04 21:47:45,760 INFO [train.py:715] (7/8) Epoch 4, batch 13350, loss[loss=0.1604, simple_loss=0.2302, pruned_loss=0.04526, over 4809.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2268, pruned_loss=0.04436, over 973144.12 frames.], batch size: 21, lr: 4.57e-04 +2022-05-04 21:48:25,401 INFO [train.py:715] (7/8) Epoch 4, batch 13400, loss[loss=0.131, simple_loss=0.2024, pruned_loss=0.02983, over 4939.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2267, pruned_loss=0.04446, over 972384.07 frames.], batch size: 21, lr: 4.56e-04 +2022-05-04 21:49:05,431 INFO [train.py:715] (7/8) Epoch 4, batch 13450, loss[loss=0.1551, simple_loss=0.2278, pruned_loss=0.04116, over 4863.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2254, pruned_loss=0.04352, over 972406.15 frames.], batch size: 20, lr: 4.56e-04 +2022-05-04 21:49:45,241 INFO [train.py:715] (7/8) Epoch 4, batch 13500, loss[loss=0.1316, simple_loss=0.1997, pruned_loss=0.03177, over 4716.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2255, pruned_loss=0.04347, over 973015.58 frames.], batch size: 15, lr: 4.56e-04 +2022-05-04 21:50:27,086 INFO [train.py:715] (7/8) Epoch 4, batch 13550, loss[loss=0.1202, simple_loss=0.2053, pruned_loss=0.01761, over 4884.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2249, pruned_loss=0.04302, over 973087.97 frames.], batch size: 22, lr: 4.56e-04 +2022-05-04 21:51:07,664 INFO [train.py:715] (7/8) Epoch 4, batch 13600, loss[loss=0.1507, simple_loss=0.2155, pruned_loss=0.04299, over 4818.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2239, pruned_loss=0.04218, over 972772.87 frames.], batch size: 26, lr: 4.56e-04 +2022-05-04 21:51:47,214 INFO [train.py:715] (7/8) Epoch 4, batch 13650, loss[loss=0.1576, simple_loss=0.2333, pruned_loss=0.04097, over 4968.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2245, pruned_loss=0.04241, over 973104.88 frames.], batch size: 15, lr: 4.56e-04 +2022-05-04 21:52:26,527 INFO [train.py:715] (7/8) Epoch 4, batch 13700, loss[loss=0.1627, simple_loss=0.2228, pruned_loss=0.05129, over 4838.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2249, pruned_loss=0.04296, over 972301.86 frames.], batch size: 13, lr: 4.56e-04 +2022-05-04 21:53:06,454 INFO [train.py:715] (7/8) Epoch 4, batch 13750, loss[loss=0.1716, simple_loss=0.2444, pruned_loss=0.04935, over 4908.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2259, pruned_loss=0.04349, over 972770.10 frames.], batch size: 19, lr: 4.56e-04 +2022-05-04 21:53:48,116 INFO [train.py:715] (7/8) Epoch 4, batch 13800, loss[loss=0.155, simple_loss=0.2368, pruned_loss=0.03664, over 4870.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2266, pruned_loss=0.04359, over 972347.83 frames.], batch size: 16, lr: 4.56e-04 +2022-05-04 21:54:29,039 INFO [train.py:715] (7/8) Epoch 4, batch 13850, loss[loss=0.1489, simple_loss=0.2182, pruned_loss=0.03976, over 4865.00 frames.], tot_loss[loss=0.1561, simple_loss=0.226, pruned_loss=0.04304, over 972896.94 frames.], batch size: 20, lr: 4.56e-04 +2022-05-04 21:55:10,925 INFO [train.py:715] (7/8) Epoch 4, batch 13900, loss[loss=0.1323, simple_loss=0.211, pruned_loss=0.02682, over 4836.00 frames.], tot_loss[loss=0.1554, simple_loss=0.225, pruned_loss=0.04287, over 973306.51 frames.], batch size: 15, lr: 4.56e-04 +2022-05-04 21:55:52,334 INFO [train.py:715] (7/8) Epoch 4, batch 13950, loss[loss=0.1632, simple_loss=0.23, pruned_loss=0.04816, over 4908.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2249, pruned_loss=0.04306, over 972896.95 frames.], batch size: 17, lr: 4.56e-04 +2022-05-04 21:56:31,849 INFO [train.py:715] (7/8) Epoch 4, batch 14000, loss[loss=0.1566, simple_loss=0.2338, pruned_loss=0.03968, over 4661.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2253, pruned_loss=0.04283, over 972384.89 frames.], batch size: 13, lr: 4.56e-04 +2022-05-04 21:57:12,906 INFO [train.py:715] (7/8) Epoch 4, batch 14050, loss[loss=0.1849, simple_loss=0.2556, pruned_loss=0.05708, over 4847.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2253, pruned_loss=0.04275, over 972421.85 frames.], batch size: 30, lr: 4.55e-04 +2022-05-04 21:57:52,566 INFO [train.py:715] (7/8) Epoch 4, batch 14100, loss[loss=0.1671, simple_loss=0.2324, pruned_loss=0.05096, over 4754.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2262, pruned_loss=0.04298, over 972977.46 frames.], batch size: 16, lr: 4.55e-04 +2022-05-04 21:58:32,931 INFO [train.py:715] (7/8) Epoch 4, batch 14150, loss[loss=0.1402, simple_loss=0.2121, pruned_loss=0.03412, over 4839.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2269, pruned_loss=0.0434, over 972906.77 frames.], batch size: 30, lr: 4.55e-04 +2022-05-04 21:59:12,288 INFO [train.py:715] (7/8) Epoch 4, batch 14200, loss[loss=0.1582, simple_loss=0.2333, pruned_loss=0.04154, over 4940.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2265, pruned_loss=0.04315, over 972822.97 frames.], batch size: 23, lr: 4.55e-04 +2022-05-04 21:59:51,980 INFO [train.py:715] (7/8) Epoch 4, batch 14250, loss[loss=0.1347, simple_loss=0.2012, pruned_loss=0.03408, over 4960.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2254, pruned_loss=0.04265, over 973097.86 frames.], batch size: 15, lr: 4.55e-04 +2022-05-04 22:00:32,133 INFO [train.py:715] (7/8) Epoch 4, batch 14300, loss[loss=0.1519, simple_loss=0.2245, pruned_loss=0.03964, over 4753.00 frames.], tot_loss[loss=0.1554, simple_loss=0.225, pruned_loss=0.04291, over 972985.98 frames.], batch size: 16, lr: 4.55e-04 +2022-05-04 22:01:10,605 INFO [train.py:715] (7/8) Epoch 4, batch 14350, loss[loss=0.1583, simple_loss=0.2303, pruned_loss=0.04313, over 4972.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2253, pruned_loss=0.0434, over 972555.12 frames.], batch size: 35, lr: 4.55e-04 +2022-05-04 22:01:50,874 INFO [train.py:715] (7/8) Epoch 4, batch 14400, loss[loss=0.185, simple_loss=0.249, pruned_loss=0.06052, over 4833.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2258, pruned_loss=0.04332, over 972649.30 frames.], batch size: 15, lr: 4.55e-04 +2022-05-04 22:02:30,298 INFO [train.py:715] (7/8) Epoch 4, batch 14450, loss[loss=0.1712, simple_loss=0.2497, pruned_loss=0.04635, over 4861.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2263, pruned_loss=0.0434, over 972692.15 frames.], batch size: 20, lr: 4.55e-04 +2022-05-04 22:03:09,289 INFO [train.py:715] (7/8) Epoch 4, batch 14500, loss[loss=0.1185, simple_loss=0.1979, pruned_loss=0.01957, over 4803.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2279, pruned_loss=0.04436, over 972374.39 frames.], batch size: 21, lr: 4.55e-04 +2022-05-04 22:03:48,141 INFO [train.py:715] (7/8) Epoch 4, batch 14550, loss[loss=0.1786, simple_loss=0.255, pruned_loss=0.05114, over 4907.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2292, pruned_loss=0.04481, over 972729.22 frames.], batch size: 19, lr: 4.55e-04 +2022-05-04 22:04:27,644 INFO [train.py:715] (7/8) Epoch 4, batch 14600, loss[loss=0.1749, simple_loss=0.2309, pruned_loss=0.05942, over 4850.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2289, pruned_loss=0.04536, over 971817.64 frames.], batch size: 32, lr: 4.55e-04 +2022-05-04 22:05:07,543 INFO [train.py:715] (7/8) Epoch 4, batch 14650, loss[loss=0.137, simple_loss=0.2136, pruned_loss=0.03025, over 4942.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2281, pruned_loss=0.045, over 972161.95 frames.], batch size: 29, lr: 4.55e-04 +2022-05-04 22:05:46,292 INFO [train.py:715] (7/8) Epoch 4, batch 14700, loss[loss=0.1588, simple_loss=0.2262, pruned_loss=0.04576, over 4975.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2278, pruned_loss=0.04492, over 972443.21 frames.], batch size: 25, lr: 4.55e-04 +2022-05-04 22:06:26,149 INFO [train.py:715] (7/8) Epoch 4, batch 14750, loss[loss=0.1761, simple_loss=0.2482, pruned_loss=0.05195, over 4856.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2275, pruned_loss=0.04462, over 972412.65 frames.], batch size: 30, lr: 4.54e-04 +2022-05-04 22:07:06,149 INFO [train.py:715] (7/8) Epoch 4, batch 14800, loss[loss=0.1609, simple_loss=0.2245, pruned_loss=0.04858, over 4683.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2275, pruned_loss=0.04487, over 971369.87 frames.], batch size: 15, lr: 4.54e-04 +2022-05-04 22:07:51,027 INFO [train.py:715] (7/8) Epoch 4, batch 14850, loss[loss=0.1514, simple_loss=0.2145, pruned_loss=0.04416, over 4983.00 frames.], tot_loss[loss=0.158, simple_loss=0.2274, pruned_loss=0.04426, over 971264.71 frames.], batch size: 14, lr: 4.54e-04 +2022-05-04 22:08:31,251 INFO [train.py:715] (7/8) Epoch 4, batch 14900, loss[loss=0.1469, simple_loss=0.2105, pruned_loss=0.0416, over 4827.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2273, pruned_loss=0.0444, over 971182.76 frames.], batch size: 15, lr: 4.54e-04 +2022-05-04 22:09:11,333 INFO [train.py:715] (7/8) Epoch 4, batch 14950, loss[loss=0.1646, simple_loss=0.2291, pruned_loss=0.05004, over 4928.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2277, pruned_loss=0.04433, over 971014.17 frames.], batch size: 35, lr: 4.54e-04 +2022-05-04 22:09:51,668 INFO [train.py:715] (7/8) Epoch 4, batch 15000, loss[loss=0.1365, simple_loss=0.2169, pruned_loss=0.02805, over 4881.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2269, pruned_loss=0.04398, over 970984.56 frames.], batch size: 22, lr: 4.54e-04 +2022-05-04 22:09:51,669 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 22:10:32,004 INFO [train.py:742] (7/8) Epoch 4, validation: loss=0.1122, simple_loss=0.1978, pruned_loss=0.01336, over 914524.00 frames. +2022-05-04 22:11:12,734 INFO [train.py:715] (7/8) Epoch 4, batch 15050, loss[loss=0.132, simple_loss=0.2042, pruned_loss=0.02987, over 4781.00 frames.], tot_loss[loss=0.1564, simple_loss=0.226, pruned_loss=0.04335, over 971651.98 frames.], batch size: 14, lr: 4.54e-04 +2022-05-04 22:11:52,175 INFO [train.py:715] (7/8) Epoch 4, batch 15100, loss[loss=0.1622, simple_loss=0.2254, pruned_loss=0.04948, over 4947.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2254, pruned_loss=0.04313, over 972234.53 frames.], batch size: 35, lr: 4.54e-04 +2022-05-04 22:12:32,073 INFO [train.py:715] (7/8) Epoch 4, batch 15150, loss[loss=0.1517, simple_loss=0.2271, pruned_loss=0.03818, over 4849.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2258, pruned_loss=0.04289, over 972141.95 frames.], batch size: 20, lr: 4.54e-04 +2022-05-04 22:13:12,025 INFO [train.py:715] (7/8) Epoch 4, batch 15200, loss[loss=0.1498, simple_loss=0.222, pruned_loss=0.03881, over 4705.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2263, pruned_loss=0.04326, over 972248.32 frames.], batch size: 15, lr: 4.54e-04 +2022-05-04 22:13:51,747 INFO [train.py:715] (7/8) Epoch 4, batch 15250, loss[loss=0.1792, simple_loss=0.2477, pruned_loss=0.05537, over 4987.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2266, pruned_loss=0.04362, over 972690.89 frames.], batch size: 14, lr: 4.54e-04 +2022-05-04 22:14:31,957 INFO [train.py:715] (7/8) Epoch 4, batch 15300, loss[loss=0.1654, simple_loss=0.2281, pruned_loss=0.05138, over 4947.00 frames.], tot_loss[loss=0.157, simple_loss=0.2264, pruned_loss=0.04384, over 972224.98 frames.], batch size: 39, lr: 4.54e-04 +2022-05-04 22:15:12,423 INFO [train.py:715] (7/8) Epoch 4, batch 15350, loss[loss=0.1439, simple_loss=0.2247, pruned_loss=0.0315, over 4744.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2268, pruned_loss=0.04412, over 972150.61 frames.], batch size: 16, lr: 4.54e-04 +2022-05-04 22:15:52,256 INFO [train.py:715] (7/8) Epoch 4, batch 15400, loss[loss=0.1413, simple_loss=0.213, pruned_loss=0.03476, over 4952.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2258, pruned_loss=0.04365, over 972432.97 frames.], batch size: 23, lr: 4.53e-04 +2022-05-04 22:16:32,479 INFO [train.py:715] (7/8) Epoch 4, batch 15450, loss[loss=0.1632, simple_loss=0.2366, pruned_loss=0.04493, over 4814.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2252, pruned_loss=0.04333, over 973333.95 frames.], batch size: 26, lr: 4.53e-04 +2022-05-04 22:17:12,932 INFO [train.py:715] (7/8) Epoch 4, batch 15500, loss[loss=0.1791, simple_loss=0.2377, pruned_loss=0.06031, over 4957.00 frames.], tot_loss[loss=0.157, simple_loss=0.2261, pruned_loss=0.04399, over 973018.83 frames.], batch size: 35, lr: 4.53e-04 +2022-05-04 22:17:53,283 INFO [train.py:715] (7/8) Epoch 4, batch 15550, loss[loss=0.1777, simple_loss=0.2412, pruned_loss=0.05708, over 4771.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2272, pruned_loss=0.04454, over 973046.59 frames.], batch size: 18, lr: 4.53e-04 +2022-05-04 22:18:32,667 INFO [train.py:715] (7/8) Epoch 4, batch 15600, loss[loss=0.1587, simple_loss=0.2321, pruned_loss=0.04265, over 4877.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2277, pruned_loss=0.04488, over 973272.90 frames.], batch size: 22, lr: 4.53e-04 +2022-05-04 22:19:13,497 INFO [train.py:715] (7/8) Epoch 4, batch 15650, loss[loss=0.1868, simple_loss=0.2548, pruned_loss=0.05942, over 4881.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2278, pruned_loss=0.04457, over 973223.34 frames.], batch size: 22, lr: 4.53e-04 +2022-05-04 22:19:53,086 INFO [train.py:715] (7/8) Epoch 4, batch 15700, loss[loss=0.1461, simple_loss=0.2155, pruned_loss=0.03838, over 4990.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2277, pruned_loss=0.04437, over 974202.58 frames.], batch size: 28, lr: 4.53e-04 +2022-05-04 22:20:33,263 INFO [train.py:715] (7/8) Epoch 4, batch 15750, loss[loss=0.1455, simple_loss=0.2346, pruned_loss=0.02824, over 4955.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2276, pruned_loss=0.04444, over 973769.94 frames.], batch size: 21, lr: 4.53e-04 +2022-05-04 22:21:12,810 INFO [train.py:715] (7/8) Epoch 4, batch 15800, loss[loss=0.1372, simple_loss=0.1966, pruned_loss=0.03888, over 4753.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2273, pruned_loss=0.04416, over 973495.00 frames.], batch size: 12, lr: 4.53e-04 +2022-05-04 22:21:53,777 INFO [train.py:715] (7/8) Epoch 4, batch 15850, loss[loss=0.1348, simple_loss=0.2044, pruned_loss=0.0326, over 4974.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2275, pruned_loss=0.04447, over 972627.00 frames.], batch size: 25, lr: 4.53e-04 +2022-05-04 22:22:34,972 INFO [train.py:715] (7/8) Epoch 4, batch 15900, loss[loss=0.1524, simple_loss=0.223, pruned_loss=0.04096, over 4990.00 frames.], tot_loss[loss=0.157, simple_loss=0.2262, pruned_loss=0.04387, over 972237.34 frames.], batch size: 14, lr: 4.53e-04 +2022-05-04 22:23:14,297 INFO [train.py:715] (7/8) Epoch 4, batch 15950, loss[loss=0.2111, simple_loss=0.258, pruned_loss=0.08207, over 4655.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2268, pruned_loss=0.04406, over 972854.43 frames.], batch size: 13, lr: 4.53e-04 +2022-05-04 22:23:54,445 INFO [train.py:715] (7/8) Epoch 4, batch 16000, loss[loss=0.1954, simple_loss=0.2463, pruned_loss=0.07228, over 4865.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2265, pruned_loss=0.04385, over 972695.95 frames.], batch size: 32, lr: 4.53e-04 +2022-05-04 22:24:34,929 INFO [train.py:715] (7/8) Epoch 4, batch 16050, loss[loss=0.1659, simple_loss=0.2386, pruned_loss=0.04658, over 4973.00 frames.], tot_loss[loss=0.1573, simple_loss=0.227, pruned_loss=0.04382, over 972289.14 frames.], batch size: 33, lr: 4.53e-04 +2022-05-04 22:25:14,768 INFO [train.py:715] (7/8) Epoch 4, batch 16100, loss[loss=0.1349, simple_loss=0.2138, pruned_loss=0.02801, over 4983.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2269, pruned_loss=0.04359, over 972094.02 frames.], batch size: 24, lr: 4.52e-04 +2022-05-04 22:25:54,144 INFO [train.py:715] (7/8) Epoch 4, batch 16150, loss[loss=0.1637, simple_loss=0.2329, pruned_loss=0.04719, over 4832.00 frames.], tot_loss[loss=0.157, simple_loss=0.2269, pruned_loss=0.04355, over 972257.77 frames.], batch size: 15, lr: 4.52e-04 +2022-05-04 22:26:34,756 INFO [train.py:715] (7/8) Epoch 4, batch 16200, loss[loss=0.141, simple_loss=0.2001, pruned_loss=0.04094, over 4782.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2264, pruned_loss=0.04356, over 971960.09 frames.], batch size: 18, lr: 4.52e-04 +2022-05-04 22:27:15,074 INFO [train.py:715] (7/8) Epoch 4, batch 16250, loss[loss=0.1658, simple_loss=0.2321, pruned_loss=0.04978, over 4865.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2259, pruned_loss=0.04323, over 971997.76 frames.], batch size: 20, lr: 4.52e-04 +2022-05-04 22:27:54,411 INFO [train.py:715] (7/8) Epoch 4, batch 16300, loss[loss=0.1545, simple_loss=0.2376, pruned_loss=0.03573, over 4689.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2262, pruned_loss=0.0432, over 971662.36 frames.], batch size: 15, lr: 4.52e-04 +2022-05-04 22:28:34,989 INFO [train.py:715] (7/8) Epoch 4, batch 16350, loss[loss=0.1569, simple_loss=0.2152, pruned_loss=0.04928, over 4857.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2262, pruned_loss=0.04361, over 972004.45 frames.], batch size: 20, lr: 4.52e-04 +2022-05-04 22:29:15,662 INFO [train.py:715] (7/8) Epoch 4, batch 16400, loss[loss=0.161, simple_loss=0.2255, pruned_loss=0.04826, over 4745.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2265, pruned_loss=0.04367, over 972277.31 frames.], batch size: 16, lr: 4.52e-04 +2022-05-04 22:29:56,020 INFO [train.py:715] (7/8) Epoch 4, batch 16450, loss[loss=0.1609, simple_loss=0.2422, pruned_loss=0.03985, over 4775.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2263, pruned_loss=0.04328, over 972902.69 frames.], batch size: 16, lr: 4.52e-04 +2022-05-04 22:30:35,456 INFO [train.py:715] (7/8) Epoch 4, batch 16500, loss[loss=0.1577, simple_loss=0.2241, pruned_loss=0.04566, over 4889.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2253, pruned_loss=0.04321, over 972502.51 frames.], batch size: 22, lr: 4.52e-04 +2022-05-04 22:31:15,349 INFO [train.py:715] (7/8) Epoch 4, batch 16550, loss[loss=0.169, simple_loss=0.2441, pruned_loss=0.04699, over 4968.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2255, pruned_loss=0.0429, over 973415.27 frames.], batch size: 28, lr: 4.52e-04 +2022-05-04 22:31:55,172 INFO [train.py:715] (7/8) Epoch 4, batch 16600, loss[loss=0.1552, simple_loss=0.2285, pruned_loss=0.04098, over 4915.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2255, pruned_loss=0.04264, over 972469.56 frames.], batch size: 39, lr: 4.52e-04 +2022-05-04 22:32:33,993 INFO [train.py:715] (7/8) Epoch 4, batch 16650, loss[loss=0.131, simple_loss=0.198, pruned_loss=0.03194, over 4882.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2252, pruned_loss=0.04296, over 971878.34 frames.], batch size: 22, lr: 4.52e-04 +2022-05-04 22:33:12,849 INFO [train.py:715] (7/8) Epoch 4, batch 16700, loss[loss=0.1667, simple_loss=0.2415, pruned_loss=0.04589, over 4824.00 frames.], tot_loss[loss=0.1553, simple_loss=0.225, pruned_loss=0.04278, over 971817.30 frames.], batch size: 25, lr: 4.52e-04 +2022-05-04 22:33:52,194 INFO [train.py:715] (7/8) Epoch 4, batch 16750, loss[loss=0.1894, simple_loss=0.2487, pruned_loss=0.06511, over 4857.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2254, pruned_loss=0.04346, over 971984.50 frames.], batch size: 20, lr: 4.52e-04 +2022-05-04 22:34:31,639 INFO [train.py:715] (7/8) Epoch 4, batch 16800, loss[loss=0.1309, simple_loss=0.2046, pruned_loss=0.02857, over 4823.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2244, pruned_loss=0.04284, over 971994.54 frames.], batch size: 27, lr: 4.51e-04 +2022-05-04 22:35:10,399 INFO [train.py:715] (7/8) Epoch 4, batch 16850, loss[loss=0.151, simple_loss=0.223, pruned_loss=0.03953, over 4818.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2255, pruned_loss=0.0429, over 971474.34 frames.], batch size: 13, lr: 4.51e-04 +2022-05-04 22:35:50,738 INFO [train.py:715] (7/8) Epoch 4, batch 16900, loss[loss=0.1477, simple_loss=0.2283, pruned_loss=0.03353, over 4990.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2261, pruned_loss=0.04337, over 971303.94 frames.], batch size: 25, lr: 4.51e-04 +2022-05-04 22:36:31,088 INFO [train.py:715] (7/8) Epoch 4, batch 16950, loss[loss=0.1567, simple_loss=0.2421, pruned_loss=0.03568, over 4813.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2272, pruned_loss=0.04393, over 972032.76 frames.], batch size: 25, lr: 4.51e-04 +2022-05-04 22:37:10,625 INFO [train.py:715] (7/8) Epoch 4, batch 17000, loss[loss=0.1337, simple_loss=0.2034, pruned_loss=0.03199, over 4795.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2256, pruned_loss=0.04286, over 971475.00 frames.], batch size: 14, lr: 4.51e-04 +2022-05-04 22:37:50,453 INFO [train.py:715] (7/8) Epoch 4, batch 17050, loss[loss=0.1946, simple_loss=0.257, pruned_loss=0.06607, over 4834.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2268, pruned_loss=0.0438, over 971226.33 frames.], batch size: 15, lr: 4.51e-04 +2022-05-04 22:38:30,858 INFO [train.py:715] (7/8) Epoch 4, batch 17100, loss[loss=0.1411, simple_loss=0.2018, pruned_loss=0.04021, over 4639.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2259, pruned_loss=0.04344, over 971302.17 frames.], batch size: 13, lr: 4.51e-04 +2022-05-04 22:39:10,963 INFO [train.py:715] (7/8) Epoch 4, batch 17150, loss[loss=0.1675, simple_loss=0.2316, pruned_loss=0.05169, over 4740.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2265, pruned_loss=0.04387, over 972012.66 frames.], batch size: 16, lr: 4.51e-04 +2022-05-04 22:39:50,100 INFO [train.py:715] (7/8) Epoch 4, batch 17200, loss[loss=0.175, simple_loss=0.2424, pruned_loss=0.0538, over 4701.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2257, pruned_loss=0.04323, over 971245.21 frames.], batch size: 15, lr: 4.51e-04 +2022-05-04 22:40:30,249 INFO [train.py:715] (7/8) Epoch 4, batch 17250, loss[loss=0.1521, simple_loss=0.2255, pruned_loss=0.03934, over 4884.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2265, pruned_loss=0.04355, over 971955.15 frames.], batch size: 19, lr: 4.51e-04 +2022-05-04 22:41:10,196 INFO [train.py:715] (7/8) Epoch 4, batch 17300, loss[loss=0.181, simple_loss=0.2456, pruned_loss=0.05823, over 4955.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2257, pruned_loss=0.0431, over 972158.16 frames.], batch size: 23, lr: 4.51e-04 +2022-05-04 22:41:49,926 INFO [train.py:715] (7/8) Epoch 4, batch 17350, loss[loss=0.1544, simple_loss=0.2315, pruned_loss=0.03861, over 4929.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2265, pruned_loss=0.0435, over 972700.08 frames.], batch size: 39, lr: 4.51e-04 +2022-05-04 22:42:29,444 INFO [train.py:715] (7/8) Epoch 4, batch 17400, loss[loss=0.142, simple_loss=0.2032, pruned_loss=0.04039, over 4817.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2259, pruned_loss=0.04315, over 972424.28 frames.], batch size: 13, lr: 4.51e-04 +2022-05-04 22:43:09,754 INFO [train.py:715] (7/8) Epoch 4, batch 17450, loss[loss=0.1654, simple_loss=0.2294, pruned_loss=0.05067, over 4969.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2257, pruned_loss=0.04308, over 972284.81 frames.], batch size: 40, lr: 4.51e-04 +2022-05-04 22:43:50,028 INFO [train.py:715] (7/8) Epoch 4, batch 17500, loss[loss=0.1587, simple_loss=0.2217, pruned_loss=0.04778, over 4858.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2263, pruned_loss=0.0435, over 971925.18 frames.], batch size: 34, lr: 4.50e-04 +2022-05-04 22:44:29,245 INFO [train.py:715] (7/8) Epoch 4, batch 17550, loss[loss=0.1841, simple_loss=0.2583, pruned_loss=0.05496, over 4748.00 frames.], tot_loss[loss=0.156, simple_loss=0.2262, pruned_loss=0.0429, over 971465.78 frames.], batch size: 16, lr: 4.50e-04 +2022-05-04 22:45:09,117 INFO [train.py:715] (7/8) Epoch 4, batch 17600, loss[loss=0.1497, simple_loss=0.2253, pruned_loss=0.03707, over 4839.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2269, pruned_loss=0.04347, over 971508.61 frames.], batch size: 30, lr: 4.50e-04 +2022-05-04 22:45:49,512 INFO [train.py:715] (7/8) Epoch 4, batch 17650, loss[loss=0.1407, simple_loss=0.2114, pruned_loss=0.03495, over 4907.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2254, pruned_loss=0.04289, over 971247.29 frames.], batch size: 19, lr: 4.50e-04 +2022-05-04 22:46:29,572 INFO [train.py:715] (7/8) Epoch 4, batch 17700, loss[loss=0.108, simple_loss=0.174, pruned_loss=0.021, over 4794.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2262, pruned_loss=0.0436, over 971363.12 frames.], batch size: 12, lr: 4.50e-04 +2022-05-04 22:47:09,161 INFO [train.py:715] (7/8) Epoch 4, batch 17750, loss[loss=0.1253, simple_loss=0.1947, pruned_loss=0.02796, over 4763.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2251, pruned_loss=0.04301, over 969731.00 frames.], batch size: 19, lr: 4.50e-04 +2022-05-04 22:47:49,260 INFO [train.py:715] (7/8) Epoch 4, batch 17800, loss[loss=0.1477, simple_loss=0.2215, pruned_loss=0.03697, over 4874.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2258, pruned_loss=0.04391, over 970099.29 frames.], batch size: 22, lr: 4.50e-04 +2022-05-04 22:48:29,928 INFO [train.py:715] (7/8) Epoch 4, batch 17850, loss[loss=0.1409, simple_loss=0.2082, pruned_loss=0.03682, over 4806.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2252, pruned_loss=0.04333, over 970260.48 frames.], batch size: 25, lr: 4.50e-04 +2022-05-04 22:49:09,026 INFO [train.py:715] (7/8) Epoch 4, batch 17900, loss[loss=0.1461, simple_loss=0.2205, pruned_loss=0.03584, over 4986.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2257, pruned_loss=0.04353, over 971575.37 frames.], batch size: 26, lr: 4.50e-04 +2022-05-04 22:49:49,024 INFO [train.py:715] (7/8) Epoch 4, batch 17950, loss[loss=0.136, simple_loss=0.2048, pruned_loss=0.03354, over 4910.00 frames.], tot_loss[loss=0.1555, simple_loss=0.225, pruned_loss=0.04299, over 972486.66 frames.], batch size: 29, lr: 4.50e-04 +2022-05-04 22:50:29,183 INFO [train.py:715] (7/8) Epoch 4, batch 18000, loss[loss=0.1765, simple_loss=0.2317, pruned_loss=0.06063, over 4767.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2262, pruned_loss=0.04375, over 972445.58 frames.], batch size: 12, lr: 4.50e-04 +2022-05-04 22:50:29,184 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 22:50:38,824 INFO [train.py:742] (7/8) Epoch 4, validation: loss=0.1119, simple_loss=0.1976, pruned_loss=0.01313, over 914524.00 frames. +2022-05-04 22:51:19,285 INFO [train.py:715] (7/8) Epoch 4, batch 18050, loss[loss=0.1394, simple_loss=0.2165, pruned_loss=0.03111, over 4893.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2257, pruned_loss=0.04373, over 971996.28 frames.], batch size: 19, lr: 4.50e-04 +2022-05-04 22:51:59,527 INFO [train.py:715] (7/8) Epoch 4, batch 18100, loss[loss=0.2039, simple_loss=0.2431, pruned_loss=0.08237, over 4852.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2252, pruned_loss=0.04335, over 972458.32 frames.], batch size: 12, lr: 4.50e-04 +2022-05-04 22:52:39,108 INFO [train.py:715] (7/8) Epoch 4, batch 18150, loss[loss=0.194, simple_loss=0.2661, pruned_loss=0.06096, over 4788.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2259, pruned_loss=0.04357, over 972262.53 frames.], batch size: 18, lr: 4.50e-04 +2022-05-04 22:53:19,406 INFO [train.py:715] (7/8) Epoch 4, batch 18200, loss[loss=0.1377, simple_loss=0.2166, pruned_loss=0.02936, over 4901.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2258, pruned_loss=0.04351, over 972623.88 frames.], batch size: 19, lr: 4.49e-04 +2022-05-04 22:53:59,870 INFO [train.py:715] (7/8) Epoch 4, batch 18250, loss[loss=0.1493, simple_loss=0.2126, pruned_loss=0.04304, over 4966.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2257, pruned_loss=0.0436, over 973110.01 frames.], batch size: 15, lr: 4.49e-04 +2022-05-04 22:54:39,570 INFO [train.py:715] (7/8) Epoch 4, batch 18300, loss[loss=0.1586, simple_loss=0.2303, pruned_loss=0.04344, over 4898.00 frames.], tot_loss[loss=0.156, simple_loss=0.2254, pruned_loss=0.0433, over 973748.88 frames.], batch size: 17, lr: 4.49e-04 +2022-05-04 22:55:19,280 INFO [train.py:715] (7/8) Epoch 4, batch 18350, loss[loss=0.1463, simple_loss=0.2177, pruned_loss=0.03746, over 4921.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2263, pruned_loss=0.04357, over 973667.37 frames.], batch size: 23, lr: 4.49e-04 +2022-05-04 22:56:00,394 INFO [train.py:715] (7/8) Epoch 4, batch 18400, loss[loss=0.1664, simple_loss=0.2435, pruned_loss=0.04465, over 4745.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2269, pruned_loss=0.04394, over 973080.03 frames.], batch size: 19, lr: 4.49e-04 +2022-05-04 22:56:40,808 INFO [train.py:715] (7/8) Epoch 4, batch 18450, loss[loss=0.1692, simple_loss=0.2335, pruned_loss=0.05245, over 4865.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2284, pruned_loss=0.04503, over 973071.88 frames.], batch size: 34, lr: 4.49e-04 +2022-05-04 22:57:20,898 INFO [train.py:715] (7/8) Epoch 4, batch 18500, loss[loss=0.1468, simple_loss=0.2293, pruned_loss=0.03221, over 4971.00 frames.], tot_loss[loss=0.158, simple_loss=0.2276, pruned_loss=0.04421, over 972916.90 frames.], batch size: 14, lr: 4.49e-04 +2022-05-04 22:58:01,188 INFO [train.py:715] (7/8) Epoch 4, batch 18550, loss[loss=0.1198, simple_loss=0.1921, pruned_loss=0.02371, over 4740.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2275, pruned_loss=0.04393, over 972966.07 frames.], batch size: 12, lr: 4.49e-04 +2022-05-04 22:58:41,892 INFO [train.py:715] (7/8) Epoch 4, batch 18600, loss[loss=0.1379, simple_loss=0.2103, pruned_loss=0.03271, over 4838.00 frames.], tot_loss[loss=0.1561, simple_loss=0.226, pruned_loss=0.04312, over 972773.74 frames.], batch size: 15, lr: 4.49e-04 +2022-05-04 22:59:21,459 INFO [train.py:715] (7/8) Epoch 4, batch 18650, loss[loss=0.166, simple_loss=0.2164, pruned_loss=0.05782, over 4699.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2267, pruned_loss=0.0438, over 972519.14 frames.], batch size: 15, lr: 4.49e-04 +2022-05-04 23:00:01,607 INFO [train.py:715] (7/8) Epoch 4, batch 18700, loss[loss=0.1597, simple_loss=0.228, pruned_loss=0.04572, over 4813.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2273, pruned_loss=0.04402, over 972498.72 frames.], batch size: 27, lr: 4.49e-04 +2022-05-04 23:00:42,466 INFO [train.py:715] (7/8) Epoch 4, batch 18750, loss[loss=0.1827, simple_loss=0.2476, pruned_loss=0.05892, over 4842.00 frames.], tot_loss[loss=0.158, simple_loss=0.2275, pruned_loss=0.04427, over 971919.72 frames.], batch size: 34, lr: 4.49e-04 +2022-05-04 23:01:21,943 INFO [train.py:715] (7/8) Epoch 4, batch 18800, loss[loss=0.1627, simple_loss=0.236, pruned_loss=0.04473, over 4796.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2271, pruned_loss=0.04422, over 972038.74 frames.], batch size: 21, lr: 4.49e-04 +2022-05-04 23:02:02,023 INFO [train.py:715] (7/8) Epoch 4, batch 18850, loss[loss=0.1428, simple_loss=0.22, pruned_loss=0.03286, over 4750.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2275, pruned_loss=0.04442, over 972073.30 frames.], batch size: 16, lr: 4.49e-04 +2022-05-04 23:02:42,439 INFO [train.py:715] (7/8) Epoch 4, batch 18900, loss[loss=0.1914, simple_loss=0.2607, pruned_loss=0.06108, over 4924.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2262, pruned_loss=0.04396, over 971835.51 frames.], batch size: 18, lr: 4.48e-04 +2022-05-04 23:03:22,746 INFO [train.py:715] (7/8) Epoch 4, batch 18950, loss[loss=0.1371, simple_loss=0.2113, pruned_loss=0.0314, over 4706.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2253, pruned_loss=0.04349, over 972065.87 frames.], batch size: 15, lr: 4.48e-04 +2022-05-04 23:04:01,995 INFO [train.py:715] (7/8) Epoch 4, batch 19000, loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03042, over 4965.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2255, pruned_loss=0.04328, over 971416.50 frames.], batch size: 15, lr: 4.48e-04 +2022-05-04 23:04:42,499 INFO [train.py:715] (7/8) Epoch 4, batch 19050, loss[loss=0.1828, simple_loss=0.2451, pruned_loss=0.06024, over 4766.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2262, pruned_loss=0.0438, over 971564.95 frames.], batch size: 19, lr: 4.48e-04 +2022-05-04 23:05:23,219 INFO [train.py:715] (7/8) Epoch 4, batch 19100, loss[loss=0.159, simple_loss=0.2384, pruned_loss=0.0398, over 4810.00 frames.], tot_loss[loss=0.156, simple_loss=0.2256, pruned_loss=0.04324, over 972633.92 frames.], batch size: 24, lr: 4.48e-04 +2022-05-04 23:06:03,172 INFO [train.py:715] (7/8) Epoch 4, batch 19150, loss[loss=0.1563, simple_loss=0.2358, pruned_loss=0.03842, over 4696.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2257, pruned_loss=0.04298, over 972284.11 frames.], batch size: 15, lr: 4.48e-04 +2022-05-04 23:06:43,533 INFO [train.py:715] (7/8) Epoch 4, batch 19200, loss[loss=0.1507, simple_loss=0.2215, pruned_loss=0.03991, over 4808.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2249, pruned_loss=0.0422, over 973396.38 frames.], batch size: 21, lr: 4.48e-04 +2022-05-04 23:07:24,307 INFO [train.py:715] (7/8) Epoch 4, batch 19250, loss[loss=0.129, simple_loss=0.2017, pruned_loss=0.02811, over 4939.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2248, pruned_loss=0.04246, over 973716.88 frames.], batch size: 18, lr: 4.48e-04 +2022-05-04 23:08:04,909 INFO [train.py:715] (7/8) Epoch 4, batch 19300, loss[loss=0.1131, simple_loss=0.1831, pruned_loss=0.02161, over 4738.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2243, pruned_loss=0.04224, over 973401.40 frames.], batch size: 16, lr: 4.48e-04 +2022-05-04 23:08:44,083 INFO [train.py:715] (7/8) Epoch 4, batch 19350, loss[loss=0.1766, simple_loss=0.2338, pruned_loss=0.05967, over 4700.00 frames.], tot_loss[loss=0.1554, simple_loss=0.225, pruned_loss=0.04292, over 972927.47 frames.], batch size: 15, lr: 4.48e-04 +2022-05-04 23:09:24,775 INFO [train.py:715] (7/8) Epoch 4, batch 19400, loss[loss=0.1223, simple_loss=0.1828, pruned_loss=0.03091, over 4840.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2251, pruned_loss=0.04264, over 972438.95 frames.], batch size: 13, lr: 4.48e-04 +2022-05-04 23:10:06,275 INFO [train.py:715] (7/8) Epoch 4, batch 19450, loss[loss=0.1365, simple_loss=0.2069, pruned_loss=0.03307, over 4784.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2249, pruned_loss=0.04279, over 972744.92 frames.], batch size: 14, lr: 4.48e-04 +2022-05-04 23:10:47,438 INFO [train.py:715] (7/8) Epoch 4, batch 19500, loss[loss=0.136, simple_loss=0.2076, pruned_loss=0.03216, over 4836.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2252, pruned_loss=0.04292, over 972045.44 frames.], batch size: 20, lr: 4.48e-04 +2022-05-04 23:11:27,085 INFO [train.py:715] (7/8) Epoch 4, batch 19550, loss[loss=0.1457, simple_loss=0.2125, pruned_loss=0.03947, over 4759.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2256, pruned_loss=0.04282, over 972347.85 frames.], batch size: 14, lr: 4.48e-04 +2022-05-04 23:12:07,483 INFO [train.py:715] (7/8) Epoch 4, batch 19600, loss[loss=0.1526, simple_loss=0.2242, pruned_loss=0.04057, over 4953.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2256, pruned_loss=0.04302, over 972263.73 frames.], batch size: 39, lr: 4.47e-04 +2022-05-04 23:12:47,697 INFO [train.py:715] (7/8) Epoch 4, batch 19650, loss[loss=0.1574, simple_loss=0.228, pruned_loss=0.0434, over 4741.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2251, pruned_loss=0.0428, over 971977.11 frames.], batch size: 19, lr: 4.47e-04 +2022-05-04 23:13:26,466 INFO [train.py:715] (7/8) Epoch 4, batch 19700, loss[loss=0.1419, simple_loss=0.205, pruned_loss=0.03938, over 4743.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2255, pruned_loss=0.04283, over 972336.50 frames.], batch size: 16, lr: 4.47e-04 +2022-05-04 23:14:07,146 INFO [train.py:715] (7/8) Epoch 4, batch 19750, loss[loss=0.1721, simple_loss=0.2358, pruned_loss=0.05415, over 4985.00 frames.], tot_loss[loss=0.1562, simple_loss=0.226, pruned_loss=0.04314, over 972795.06 frames.], batch size: 15, lr: 4.47e-04 +2022-05-04 23:14:47,971 INFO [train.py:715] (7/8) Epoch 4, batch 19800, loss[loss=0.1778, simple_loss=0.2392, pruned_loss=0.05818, over 4912.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2257, pruned_loss=0.04288, over 972857.10 frames.], batch size: 19, lr: 4.47e-04 +2022-05-04 23:15:27,712 INFO [train.py:715] (7/8) Epoch 4, batch 19850, loss[loss=0.1827, simple_loss=0.2533, pruned_loss=0.05603, over 4788.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2267, pruned_loss=0.04306, over 973274.94 frames.], batch size: 24, lr: 4.47e-04 +2022-05-04 23:16:07,786 INFO [train.py:715] (7/8) Epoch 4, batch 19900, loss[loss=0.1524, simple_loss=0.2187, pruned_loss=0.04305, over 4972.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2264, pruned_loss=0.04322, over 972410.03 frames.], batch size: 15, lr: 4.47e-04 +2022-05-04 23:16:47,903 INFO [train.py:715] (7/8) Epoch 4, batch 19950, loss[loss=0.1672, simple_loss=0.2385, pruned_loss=0.0479, over 4776.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2269, pruned_loss=0.0432, over 972247.85 frames.], batch size: 18, lr: 4.47e-04 +2022-05-04 23:17:28,067 INFO [train.py:715] (7/8) Epoch 4, batch 20000, loss[loss=0.162, simple_loss=0.2292, pruned_loss=0.0474, over 4890.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2275, pruned_loss=0.04376, over 972783.80 frames.], batch size: 17, lr: 4.47e-04 +2022-05-04 23:18:06,771 INFO [train.py:715] (7/8) Epoch 4, batch 20050, loss[loss=0.1437, simple_loss=0.2171, pruned_loss=0.03515, over 4805.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2267, pruned_loss=0.04356, over 972709.14 frames.], batch size: 21, lr: 4.47e-04 +2022-05-04 23:18:46,565 INFO [train.py:715] (7/8) Epoch 4, batch 20100, loss[loss=0.1576, simple_loss=0.2422, pruned_loss=0.03652, over 4965.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2263, pruned_loss=0.04333, over 972393.48 frames.], batch size: 15, lr: 4.47e-04 +2022-05-04 23:19:26,630 INFO [train.py:715] (7/8) Epoch 4, batch 20150, loss[loss=0.1643, simple_loss=0.239, pruned_loss=0.04483, over 4941.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2254, pruned_loss=0.04253, over 972211.73 frames.], batch size: 29, lr: 4.47e-04 +2022-05-04 23:20:06,053 INFO [train.py:715] (7/8) Epoch 4, batch 20200, loss[loss=0.1545, simple_loss=0.2237, pruned_loss=0.04262, over 4865.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2246, pruned_loss=0.04215, over 972481.85 frames.], batch size: 32, lr: 4.47e-04 +2022-05-04 23:20:45,795 INFO [train.py:715] (7/8) Epoch 4, batch 20250, loss[loss=0.142, simple_loss=0.2123, pruned_loss=0.03586, over 4901.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2251, pruned_loss=0.04238, over 972670.13 frames.], batch size: 19, lr: 4.47e-04 +2022-05-04 23:21:26,114 INFO [train.py:715] (7/8) Epoch 4, batch 20300, loss[loss=0.118, simple_loss=0.1893, pruned_loss=0.02338, over 4766.00 frames.], tot_loss[loss=0.1545, simple_loss=0.225, pruned_loss=0.04201, over 972993.37 frames.], batch size: 18, lr: 4.46e-04 +2022-05-04 23:22:06,210 INFO [train.py:715] (7/8) Epoch 4, batch 20350, loss[loss=0.1544, simple_loss=0.2288, pruned_loss=0.03996, over 4795.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2251, pruned_loss=0.04251, over 972682.31 frames.], batch size: 24, lr: 4.46e-04 +2022-05-04 23:22:45,052 INFO [train.py:715] (7/8) Epoch 4, batch 20400, loss[loss=0.1476, simple_loss=0.2188, pruned_loss=0.03822, over 4979.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2233, pruned_loss=0.04191, over 972689.70 frames.], batch size: 15, lr: 4.46e-04 +2022-05-04 23:23:25,039 INFO [train.py:715] (7/8) Epoch 4, batch 20450, loss[loss=0.1418, simple_loss=0.2186, pruned_loss=0.03244, over 4836.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2242, pruned_loss=0.04265, over 973288.40 frames.], batch size: 30, lr: 4.46e-04 +2022-05-04 23:24:04,958 INFO [train.py:715] (7/8) Epoch 4, batch 20500, loss[loss=0.1469, simple_loss=0.2255, pruned_loss=0.0342, over 4771.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2245, pruned_loss=0.04254, over 971795.44 frames.], batch size: 18, lr: 4.46e-04 +2022-05-04 23:24:44,750 INFO [train.py:715] (7/8) Epoch 4, batch 20550, loss[loss=0.15, simple_loss=0.2227, pruned_loss=0.03867, over 4856.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2246, pruned_loss=0.04258, over 972659.43 frames.], batch size: 20, lr: 4.46e-04 +2022-05-04 23:25:23,722 INFO [train.py:715] (7/8) Epoch 4, batch 20600, loss[loss=0.1608, simple_loss=0.2315, pruned_loss=0.04506, over 4826.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2247, pruned_loss=0.04292, over 971961.75 frames.], batch size: 27, lr: 4.46e-04 +2022-05-04 23:26:03,658 INFO [train.py:715] (7/8) Epoch 4, batch 20650, loss[loss=0.1525, simple_loss=0.2314, pruned_loss=0.03676, over 4772.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2248, pruned_loss=0.04295, over 972484.22 frames.], batch size: 18, lr: 4.46e-04 +2022-05-04 23:26:44,154 INFO [train.py:715] (7/8) Epoch 4, batch 20700, loss[loss=0.1807, simple_loss=0.2481, pruned_loss=0.05662, over 4701.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2251, pruned_loss=0.0429, over 972206.90 frames.], batch size: 15, lr: 4.46e-04 +2022-05-04 23:27:22,813 INFO [train.py:715] (7/8) Epoch 4, batch 20750, loss[loss=0.1381, simple_loss=0.1914, pruned_loss=0.04241, over 4787.00 frames.], tot_loss[loss=0.1552, simple_loss=0.225, pruned_loss=0.0427, over 972153.21 frames.], batch size: 12, lr: 4.46e-04 +2022-05-04 23:28:04,817 INFO [train.py:715] (7/8) Epoch 4, batch 20800, loss[loss=0.1422, simple_loss=0.2074, pruned_loss=0.03847, over 4981.00 frames.], tot_loss[loss=0.1551, simple_loss=0.225, pruned_loss=0.04264, over 972752.59 frames.], batch size: 15, lr: 4.46e-04 +2022-05-04 23:28:44,599 INFO [train.py:715] (7/8) Epoch 4, batch 20850, loss[loss=0.138, simple_loss=0.2176, pruned_loss=0.02916, over 4909.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2241, pruned_loss=0.04211, over 972101.93 frames.], batch size: 19, lr: 4.46e-04 +2022-05-04 23:29:24,438 INFO [train.py:715] (7/8) Epoch 4, batch 20900, loss[loss=0.1942, simple_loss=0.248, pruned_loss=0.07025, over 4824.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2246, pruned_loss=0.04256, over 971959.74 frames.], batch size: 15, lr: 4.46e-04 +2022-05-04 23:30:03,475 INFO [train.py:715] (7/8) Epoch 4, batch 20950, loss[loss=0.1797, simple_loss=0.2454, pruned_loss=0.05699, over 4977.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2252, pruned_loss=0.04301, over 971731.73 frames.], batch size: 24, lr: 4.46e-04 +2022-05-04 23:30:43,450 INFO [train.py:715] (7/8) Epoch 4, batch 21000, loss[loss=0.1732, simple_loss=0.2377, pruned_loss=0.05437, over 4872.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2248, pruned_loss=0.04269, over 971844.86 frames.], batch size: 32, lr: 4.46e-04 +2022-05-04 23:30:43,451 INFO [train.py:733] (7/8) Computing validation loss +2022-05-04 23:30:52,895 INFO [train.py:742] (7/8) Epoch 4, validation: loss=0.1116, simple_loss=0.1973, pruned_loss=0.01293, over 914524.00 frames. +2022-05-04 23:31:33,185 INFO [train.py:715] (7/8) Epoch 4, batch 21050, loss[loss=0.1453, simple_loss=0.2163, pruned_loss=0.03717, over 4831.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2252, pruned_loss=0.04298, over 971904.11 frames.], batch size: 15, lr: 4.45e-04 +2022-05-04 23:32:12,987 INFO [train.py:715] (7/8) Epoch 4, batch 21100, loss[loss=0.1401, simple_loss=0.2135, pruned_loss=0.03336, over 4865.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2253, pruned_loss=0.04277, over 972360.89 frames.], batch size: 32, lr: 4.45e-04 +2022-05-04 23:32:52,574 INFO [train.py:715] (7/8) Epoch 4, batch 21150, loss[loss=0.1674, simple_loss=0.2283, pruned_loss=0.05326, over 4872.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2241, pruned_loss=0.04207, over 972664.33 frames.], batch size: 16, lr: 4.45e-04 +2022-05-04 23:33:32,148 INFO [train.py:715] (7/8) Epoch 4, batch 21200, loss[loss=0.1246, simple_loss=0.1984, pruned_loss=0.02539, over 4693.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2242, pruned_loss=0.04205, over 972878.15 frames.], batch size: 15, lr: 4.45e-04 +2022-05-04 23:34:12,365 INFO [train.py:715] (7/8) Epoch 4, batch 21250, loss[loss=0.1834, simple_loss=0.2439, pruned_loss=0.06143, over 4797.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2242, pruned_loss=0.04215, over 972841.90 frames.], batch size: 14, lr: 4.45e-04 +2022-05-04 23:34:51,184 INFO [train.py:715] (7/8) Epoch 4, batch 21300, loss[loss=0.151, simple_loss=0.2283, pruned_loss=0.03684, over 4801.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2255, pruned_loss=0.04279, over 972874.15 frames.], batch size: 21, lr: 4.45e-04 +2022-05-04 23:35:30,243 INFO [train.py:715] (7/8) Epoch 4, batch 21350, loss[loss=0.1613, simple_loss=0.2367, pruned_loss=0.04296, over 4693.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2257, pruned_loss=0.04288, over 972913.57 frames.], batch size: 15, lr: 4.45e-04 +2022-05-04 23:36:09,891 INFO [train.py:715] (7/8) Epoch 4, batch 21400, loss[loss=0.1255, simple_loss=0.193, pruned_loss=0.02898, over 4903.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2253, pruned_loss=0.0429, over 973622.50 frames.], batch size: 18, lr: 4.45e-04 +2022-05-04 23:36:49,453 INFO [train.py:715] (7/8) Epoch 4, batch 21450, loss[loss=0.1644, simple_loss=0.2425, pruned_loss=0.04316, over 4926.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2249, pruned_loss=0.04265, over 973501.09 frames.], batch size: 18, lr: 4.45e-04 +2022-05-04 23:37:28,644 INFO [train.py:715] (7/8) Epoch 4, batch 21500, loss[loss=0.1723, simple_loss=0.2384, pruned_loss=0.0531, over 4930.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2254, pruned_loss=0.0431, over 972494.76 frames.], batch size: 39, lr: 4.45e-04 +2022-05-04 23:38:08,473 INFO [train.py:715] (7/8) Epoch 4, batch 21550, loss[loss=0.1917, simple_loss=0.2635, pruned_loss=0.05995, over 4882.00 frames.], tot_loss[loss=0.157, simple_loss=0.2266, pruned_loss=0.04374, over 972707.05 frames.], batch size: 22, lr: 4.45e-04 +2022-05-04 23:38:48,846 INFO [train.py:715] (7/8) Epoch 4, batch 21600, loss[loss=0.1218, simple_loss=0.195, pruned_loss=0.02434, over 4844.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2254, pruned_loss=0.04284, over 972727.80 frames.], batch size: 13, lr: 4.45e-04 +2022-05-04 23:39:28,100 INFO [train.py:715] (7/8) Epoch 4, batch 21650, loss[loss=0.1951, simple_loss=0.2637, pruned_loss=0.06322, over 4806.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2256, pruned_loss=0.04302, over 972217.20 frames.], batch size: 25, lr: 4.45e-04 +2022-05-04 23:40:08,350 INFO [train.py:715] (7/8) Epoch 4, batch 21700, loss[loss=0.15, simple_loss=0.2147, pruned_loss=0.04262, over 4877.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2254, pruned_loss=0.04294, over 972584.10 frames.], batch size: 22, lr: 4.45e-04 +2022-05-04 23:40:49,360 INFO [train.py:715] (7/8) Epoch 4, batch 21750, loss[loss=0.1347, simple_loss=0.2058, pruned_loss=0.03182, over 4787.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2251, pruned_loss=0.04309, over 972866.88 frames.], batch size: 17, lr: 4.44e-04 +2022-05-04 23:41:29,015 INFO [train.py:715] (7/8) Epoch 4, batch 21800, loss[loss=0.153, simple_loss=0.2233, pruned_loss=0.04129, over 4801.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2259, pruned_loss=0.04344, over 972973.98 frames.], batch size: 21, lr: 4.44e-04 +2022-05-04 23:42:08,602 INFO [train.py:715] (7/8) Epoch 4, batch 21850, loss[loss=0.1591, simple_loss=0.2265, pruned_loss=0.04583, over 4990.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2265, pruned_loss=0.04389, over 973227.44 frames.], batch size: 26, lr: 4.44e-04 +2022-05-04 23:42:48,651 INFO [train.py:715] (7/8) Epoch 4, batch 21900, loss[loss=0.1696, simple_loss=0.2499, pruned_loss=0.04466, over 4788.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2265, pruned_loss=0.04399, over 973190.34 frames.], batch size: 17, lr: 4.44e-04 +2022-05-04 23:43:29,091 INFO [train.py:715] (7/8) Epoch 4, batch 21950, loss[loss=0.1645, simple_loss=0.2383, pruned_loss=0.0453, over 4898.00 frames.], tot_loss[loss=0.157, simple_loss=0.2263, pruned_loss=0.04388, over 972463.68 frames.], batch size: 19, lr: 4.44e-04 +2022-05-04 23:44:08,291 INFO [train.py:715] (7/8) Epoch 4, batch 22000, loss[loss=0.1747, simple_loss=0.2452, pruned_loss=0.05208, over 4973.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2264, pruned_loss=0.04364, over 971932.10 frames.], batch size: 15, lr: 4.44e-04 +2022-05-04 23:44:48,080 INFO [train.py:715] (7/8) Epoch 4, batch 22050, loss[loss=0.1432, simple_loss=0.2274, pruned_loss=0.02949, over 4887.00 frames.], tot_loss[loss=0.156, simple_loss=0.2255, pruned_loss=0.04323, over 972252.19 frames.], batch size: 22, lr: 4.44e-04 +2022-05-04 23:45:28,541 INFO [train.py:715] (7/8) Epoch 4, batch 22100, loss[loss=0.1264, simple_loss=0.2068, pruned_loss=0.02299, over 4975.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2268, pruned_loss=0.04424, over 971618.97 frames.], batch size: 28, lr: 4.44e-04 +2022-05-04 23:46:08,388 INFO [train.py:715] (7/8) Epoch 4, batch 22150, loss[loss=0.1476, simple_loss=0.2293, pruned_loss=0.03292, over 4873.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2268, pruned_loss=0.04395, over 971895.46 frames.], batch size: 20, lr: 4.44e-04 +2022-05-04 23:46:47,295 INFO [train.py:715] (7/8) Epoch 4, batch 22200, loss[loss=0.1434, simple_loss=0.2191, pruned_loss=0.0338, over 4934.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2264, pruned_loss=0.0435, over 971560.30 frames.], batch size: 23, lr: 4.44e-04 +2022-05-04 23:47:27,363 INFO [train.py:715] (7/8) Epoch 4, batch 22250, loss[loss=0.1621, simple_loss=0.2188, pruned_loss=0.05273, over 4777.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2261, pruned_loss=0.04343, over 972154.44 frames.], batch size: 17, lr: 4.44e-04 +2022-05-04 23:48:07,759 INFO [train.py:715] (7/8) Epoch 4, batch 22300, loss[loss=0.1484, simple_loss=0.2188, pruned_loss=0.03897, over 4882.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2257, pruned_loss=0.0433, over 971890.32 frames.], batch size: 22, lr: 4.44e-04 +2022-05-04 23:48:46,515 INFO [train.py:715] (7/8) Epoch 4, batch 22350, loss[loss=0.1253, simple_loss=0.2054, pruned_loss=0.02261, over 4781.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2258, pruned_loss=0.04372, over 972469.68 frames.], batch size: 18, lr: 4.44e-04 +2022-05-04 23:49:25,542 INFO [train.py:715] (7/8) Epoch 4, batch 22400, loss[loss=0.1653, simple_loss=0.2341, pruned_loss=0.04827, over 4860.00 frames.], tot_loss[loss=0.1565, simple_loss=0.226, pruned_loss=0.04355, over 972989.25 frames.], batch size: 20, lr: 4.44e-04 +2022-05-04 23:50:06,131 INFO [train.py:715] (7/8) Epoch 4, batch 22450, loss[loss=0.1877, simple_loss=0.2571, pruned_loss=0.05914, over 4888.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2271, pruned_loss=0.04422, over 972249.52 frames.], batch size: 16, lr: 4.44e-04 +2022-05-04 23:50:45,315 INFO [train.py:715] (7/8) Epoch 4, batch 22500, loss[loss=0.1527, simple_loss=0.2281, pruned_loss=0.0387, over 4785.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2264, pruned_loss=0.04366, over 971992.57 frames.], batch size: 17, lr: 4.43e-04 +2022-05-04 23:51:24,260 INFO [train.py:715] (7/8) Epoch 4, batch 22550, loss[loss=0.1691, simple_loss=0.2377, pruned_loss=0.05025, over 4845.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2266, pruned_loss=0.044, over 972293.92 frames.], batch size: 15, lr: 4.43e-04 +2022-05-04 23:52:04,196 INFO [train.py:715] (7/8) Epoch 4, batch 22600, loss[loss=0.1684, simple_loss=0.2203, pruned_loss=0.05828, over 4777.00 frames.], tot_loss[loss=0.156, simple_loss=0.2259, pruned_loss=0.04303, over 971787.34 frames.], batch size: 17, lr: 4.43e-04 +2022-05-04 23:52:44,022 INFO [train.py:715] (7/8) Epoch 4, batch 22650, loss[loss=0.1355, simple_loss=0.216, pruned_loss=0.0275, over 4909.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2258, pruned_loss=0.04301, over 972333.40 frames.], batch size: 17, lr: 4.43e-04 +2022-05-04 23:53:22,947 INFO [train.py:715] (7/8) Epoch 4, batch 22700, loss[loss=0.1811, simple_loss=0.2432, pruned_loss=0.05948, over 4829.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2252, pruned_loss=0.04248, over 972456.69 frames.], batch size: 27, lr: 4.43e-04 +2022-05-04 23:54:02,345 INFO [train.py:715] (7/8) Epoch 4, batch 22750, loss[loss=0.159, simple_loss=0.2278, pruned_loss=0.04504, over 4896.00 frames.], tot_loss[loss=0.1549, simple_loss=0.225, pruned_loss=0.04244, over 972751.33 frames.], batch size: 17, lr: 4.43e-04 +2022-05-04 23:54:42,046 INFO [train.py:715] (7/8) Epoch 4, batch 22800, loss[loss=0.2094, simple_loss=0.2722, pruned_loss=0.07329, over 4936.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2247, pruned_loss=0.04188, over 972658.25 frames.], batch size: 21, lr: 4.43e-04 +2022-05-04 23:55:21,168 INFO [train.py:715] (7/8) Epoch 4, batch 22850, loss[loss=0.1884, simple_loss=0.2594, pruned_loss=0.05874, over 4906.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2253, pruned_loss=0.04226, over 973461.74 frames.], batch size: 39, lr: 4.43e-04 +2022-05-04 23:55:59,900 INFO [train.py:715] (7/8) Epoch 4, batch 22900, loss[loss=0.1646, simple_loss=0.231, pruned_loss=0.04907, over 4795.00 frames.], tot_loss[loss=0.1549, simple_loss=0.225, pruned_loss=0.04239, over 973075.77 frames.], batch size: 24, lr: 4.43e-04 +2022-05-04 23:56:39,567 INFO [train.py:715] (7/8) Epoch 4, batch 22950, loss[loss=0.1907, simple_loss=0.242, pruned_loss=0.06975, over 4843.00 frames.], tot_loss[loss=0.156, simple_loss=0.226, pruned_loss=0.043, over 972616.97 frames.], batch size: 32, lr: 4.43e-04 +2022-05-04 23:57:19,677 INFO [train.py:715] (7/8) Epoch 4, batch 23000, loss[loss=0.1282, simple_loss=0.2033, pruned_loss=0.02658, over 4847.00 frames.], tot_loss[loss=0.156, simple_loss=0.2259, pruned_loss=0.04307, over 972945.18 frames.], batch size: 20, lr: 4.43e-04 +2022-05-04 23:57:58,017 INFO [train.py:715] (7/8) Epoch 4, batch 23050, loss[loss=0.1893, simple_loss=0.2527, pruned_loss=0.06298, over 4941.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2269, pruned_loss=0.04363, over 972830.27 frames.], batch size: 23, lr: 4.43e-04 +2022-05-04 23:58:37,637 INFO [train.py:715] (7/8) Epoch 4, batch 23100, loss[loss=0.1524, simple_loss=0.2277, pruned_loss=0.0385, over 4992.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2275, pruned_loss=0.04379, over 973263.26 frames.], batch size: 14, lr: 4.43e-04 +2022-05-04 23:59:18,005 INFO [train.py:715] (7/8) Epoch 4, batch 23150, loss[loss=0.1559, simple_loss=0.2158, pruned_loss=0.04795, over 4793.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2273, pruned_loss=0.04399, over 973503.34 frames.], batch size: 14, lr: 4.43e-04 +2022-05-04 23:59:57,816 INFO [train.py:715] (7/8) Epoch 4, batch 23200, loss[loss=0.1712, simple_loss=0.2324, pruned_loss=0.05495, over 4857.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2278, pruned_loss=0.04427, over 973203.25 frames.], batch size: 30, lr: 4.42e-04 +2022-05-05 00:00:36,523 INFO [train.py:715] (7/8) Epoch 4, batch 23250, loss[loss=0.1764, simple_loss=0.2457, pruned_loss=0.0535, over 4948.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2282, pruned_loss=0.04469, over 972715.16 frames.], batch size: 21, lr: 4.42e-04 +2022-05-05 00:01:16,400 INFO [train.py:715] (7/8) Epoch 4, batch 23300, loss[loss=0.132, simple_loss=0.21, pruned_loss=0.02699, over 4897.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2276, pruned_loss=0.04455, over 972699.12 frames.], batch size: 22, lr: 4.42e-04 +2022-05-05 00:01:56,692 INFO [train.py:715] (7/8) Epoch 4, batch 23350, loss[loss=0.1421, simple_loss=0.2116, pruned_loss=0.03634, over 4904.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2271, pruned_loss=0.04417, over 972930.28 frames.], batch size: 19, lr: 4.42e-04 +2022-05-05 00:02:35,078 INFO [train.py:715] (7/8) Epoch 4, batch 23400, loss[loss=0.1716, simple_loss=0.2373, pruned_loss=0.0529, over 4955.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2282, pruned_loss=0.04451, over 972738.07 frames.], batch size: 29, lr: 4.42e-04 +2022-05-05 00:03:14,428 INFO [train.py:715] (7/8) Epoch 4, batch 23450, loss[loss=0.152, simple_loss=0.2153, pruned_loss=0.04436, over 4884.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2277, pruned_loss=0.04431, over 971518.22 frames.], batch size: 32, lr: 4.42e-04 +2022-05-05 00:03:55,002 INFO [train.py:715] (7/8) Epoch 4, batch 23500, loss[loss=0.1701, simple_loss=0.2514, pruned_loss=0.04436, over 4894.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2258, pruned_loss=0.04354, over 971974.47 frames.], batch size: 22, lr: 4.42e-04 +2022-05-05 00:04:33,371 INFO [train.py:715] (7/8) Epoch 4, batch 23550, loss[loss=0.1564, simple_loss=0.2287, pruned_loss=0.04208, over 4800.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2253, pruned_loss=0.04324, over 972276.27 frames.], batch size: 21, lr: 4.42e-04 +2022-05-05 00:05:12,659 INFO [train.py:715] (7/8) Epoch 4, batch 23600, loss[loss=0.1621, simple_loss=0.24, pruned_loss=0.04212, over 4816.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2256, pruned_loss=0.04348, over 971561.97 frames.], batch size: 13, lr: 4.42e-04 +2022-05-05 00:05:53,469 INFO [train.py:715] (7/8) Epoch 4, batch 23650, loss[loss=0.1768, simple_loss=0.2465, pruned_loss=0.0536, over 4766.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2265, pruned_loss=0.04425, over 971809.17 frames.], batch size: 19, lr: 4.42e-04 +2022-05-05 00:06:34,863 INFO [train.py:715] (7/8) Epoch 4, batch 23700, loss[loss=0.1627, simple_loss=0.2332, pruned_loss=0.0461, over 4941.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2268, pruned_loss=0.04419, over 971526.15 frames.], batch size: 21, lr: 4.42e-04 +2022-05-05 00:07:14,377 INFO [train.py:715] (7/8) Epoch 4, batch 23750, loss[loss=0.167, simple_loss=0.2347, pruned_loss=0.04966, over 4698.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2279, pruned_loss=0.04459, over 972252.50 frames.], batch size: 15, lr: 4.42e-04 +2022-05-05 00:07:53,778 INFO [train.py:715] (7/8) Epoch 4, batch 23800, loss[loss=0.1887, simple_loss=0.2597, pruned_loss=0.05882, over 4978.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2277, pruned_loss=0.04502, over 972052.50 frames.], batch size: 24, lr: 4.42e-04 +2022-05-05 00:08:34,381 INFO [train.py:715] (7/8) Epoch 4, batch 23850, loss[loss=0.1363, simple_loss=0.199, pruned_loss=0.03681, over 4848.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2273, pruned_loss=0.04469, over 972077.40 frames.], batch size: 13, lr: 4.42e-04 +2022-05-05 00:09:13,908 INFO [train.py:715] (7/8) Epoch 4, batch 23900, loss[loss=0.1597, simple_loss=0.2204, pruned_loss=0.04952, over 4765.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2268, pruned_loss=0.04392, over 972044.44 frames.], batch size: 17, lr: 4.42e-04 +2022-05-05 00:09:53,721 INFO [train.py:715] (7/8) Epoch 4, batch 23950, loss[loss=0.1549, simple_loss=0.2303, pruned_loss=0.0397, over 4845.00 frames.], tot_loss[loss=0.157, simple_loss=0.2263, pruned_loss=0.0438, over 972591.12 frames.], batch size: 13, lr: 4.41e-04 +2022-05-05 00:10:34,505 INFO [train.py:715] (7/8) Epoch 4, batch 24000, loss[loss=0.1372, simple_loss=0.1983, pruned_loss=0.03799, over 4866.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2265, pruned_loss=0.04357, over 971709.52 frames.], batch size: 20, lr: 4.41e-04 +2022-05-05 00:10:34,506 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 00:10:44,332 INFO [train.py:742] (7/8) Epoch 4, validation: loss=0.1115, simple_loss=0.1974, pruned_loss=0.01276, over 914524.00 frames. +2022-05-05 00:11:25,475 INFO [train.py:715] (7/8) Epoch 4, batch 24050, loss[loss=0.1551, simple_loss=0.2231, pruned_loss=0.04359, over 4834.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2269, pruned_loss=0.04383, over 971293.94 frames.], batch size: 26, lr: 4.41e-04 +2022-05-05 00:12:06,058 INFO [train.py:715] (7/8) Epoch 4, batch 24100, loss[loss=0.146, simple_loss=0.2253, pruned_loss=0.03338, over 4974.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2275, pruned_loss=0.04386, over 972504.36 frames.], batch size: 28, lr: 4.41e-04 +2022-05-05 00:12:45,927 INFO [train.py:715] (7/8) Epoch 4, batch 24150, loss[loss=0.1719, simple_loss=0.2296, pruned_loss=0.05706, over 4844.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2264, pruned_loss=0.04314, over 972434.99 frames.], batch size: 12, lr: 4.41e-04 +2022-05-05 00:13:25,911 INFO [train.py:715] (7/8) Epoch 4, batch 24200, loss[loss=0.1255, simple_loss=0.1963, pruned_loss=0.02735, over 4903.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2259, pruned_loss=0.04317, over 973162.75 frames.], batch size: 17, lr: 4.41e-04 +2022-05-05 00:14:07,341 INFO [train.py:715] (7/8) Epoch 4, batch 24250, loss[loss=0.1845, simple_loss=0.2464, pruned_loss=0.0613, over 4868.00 frames.], tot_loss[loss=0.156, simple_loss=0.2261, pruned_loss=0.04296, over 973189.11 frames.], batch size: 20, lr: 4.41e-04 +2022-05-05 00:14:46,254 INFO [train.py:715] (7/8) Epoch 4, batch 24300, loss[loss=0.1676, simple_loss=0.239, pruned_loss=0.04808, over 4783.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2264, pruned_loss=0.04294, over 973042.37 frames.], batch size: 14, lr: 4.41e-04 +2022-05-05 00:15:26,723 INFO [train.py:715] (7/8) Epoch 4, batch 24350, loss[loss=0.1724, simple_loss=0.2515, pruned_loss=0.04661, over 4990.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2265, pruned_loss=0.04312, over 973379.06 frames.], batch size: 25, lr: 4.41e-04 +2022-05-05 00:16:07,667 INFO [train.py:715] (7/8) Epoch 4, batch 24400, loss[loss=0.1639, simple_loss=0.243, pruned_loss=0.04239, over 4789.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2263, pruned_loss=0.04292, over 973728.11 frames.], batch size: 17, lr: 4.41e-04 +2022-05-05 00:16:47,248 INFO [train.py:715] (7/8) Epoch 4, batch 24450, loss[loss=0.1545, simple_loss=0.2268, pruned_loss=0.04112, over 4902.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2261, pruned_loss=0.04266, over 973051.46 frames.], batch size: 17, lr: 4.41e-04 +2022-05-05 00:17:27,014 INFO [train.py:715] (7/8) Epoch 4, batch 24500, loss[loss=0.1439, simple_loss=0.211, pruned_loss=0.03841, over 4921.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2255, pruned_loss=0.04235, over 973190.30 frames.], batch size: 18, lr: 4.41e-04 +2022-05-05 00:18:06,877 INFO [train.py:715] (7/8) Epoch 4, batch 24550, loss[loss=0.1405, simple_loss=0.2146, pruned_loss=0.03318, over 4835.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2263, pruned_loss=0.04296, over 973917.71 frames.], batch size: 15, lr: 4.41e-04 +2022-05-05 00:18:48,106 INFO [train.py:715] (7/8) Epoch 4, batch 24600, loss[loss=0.1392, simple_loss=0.2126, pruned_loss=0.03292, over 4927.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2256, pruned_loss=0.04287, over 973635.99 frames.], batch size: 29, lr: 4.41e-04 +2022-05-05 00:19:27,466 INFO [train.py:715] (7/8) Epoch 4, batch 24650, loss[loss=0.1466, simple_loss=0.2217, pruned_loss=0.03581, over 4810.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2247, pruned_loss=0.04285, over 972571.99 frames.], batch size: 27, lr: 4.41e-04 +2022-05-05 00:20:08,200 INFO [train.py:715] (7/8) Epoch 4, batch 24700, loss[loss=0.1427, simple_loss=0.1992, pruned_loss=0.04314, over 4689.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2249, pruned_loss=0.04301, over 972460.78 frames.], batch size: 15, lr: 4.40e-04 +2022-05-05 00:20:49,277 INFO [train.py:715] (7/8) Epoch 4, batch 24750, loss[loss=0.2012, simple_loss=0.2564, pruned_loss=0.07296, over 4648.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2252, pruned_loss=0.04334, over 972763.23 frames.], batch size: 13, lr: 4.40e-04 +2022-05-05 00:21:28,790 INFO [train.py:715] (7/8) Epoch 4, batch 24800, loss[loss=0.149, simple_loss=0.2225, pruned_loss=0.03777, over 4773.00 frames.], tot_loss[loss=0.1557, simple_loss=0.225, pruned_loss=0.04321, over 972389.36 frames.], batch size: 14, lr: 4.40e-04 +2022-05-05 00:22:08,799 INFO [train.py:715] (7/8) Epoch 4, batch 24850, loss[loss=0.14, simple_loss=0.2222, pruned_loss=0.02894, over 4958.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2243, pruned_loss=0.04277, over 971427.02 frames.], batch size: 21, lr: 4.40e-04 +2022-05-05 00:22:49,038 INFO [train.py:715] (7/8) Epoch 4, batch 24900, loss[loss=0.1643, simple_loss=0.2325, pruned_loss=0.04801, over 4791.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2253, pruned_loss=0.04298, over 971426.81 frames.], batch size: 24, lr: 4.40e-04 +2022-05-05 00:23:30,188 INFO [train.py:715] (7/8) Epoch 4, batch 24950, loss[loss=0.1515, simple_loss=0.2212, pruned_loss=0.04092, over 4957.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2259, pruned_loss=0.04321, over 971546.95 frames.], batch size: 24, lr: 4.40e-04 +2022-05-05 00:24:09,090 INFO [train.py:715] (7/8) Epoch 4, batch 25000, loss[loss=0.1408, simple_loss=0.2069, pruned_loss=0.03729, over 4865.00 frames.], tot_loss[loss=0.156, simple_loss=0.2259, pruned_loss=0.04305, over 972624.53 frames.], batch size: 20, lr: 4.40e-04 +2022-05-05 00:24:49,332 INFO [train.py:715] (7/8) Epoch 4, batch 25050, loss[loss=0.1219, simple_loss=0.1971, pruned_loss=0.0233, over 4830.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2261, pruned_loss=0.0433, over 972203.30 frames.], batch size: 27, lr: 4.40e-04 +2022-05-05 00:25:30,453 INFO [train.py:715] (7/8) Epoch 4, batch 25100, loss[loss=0.1658, simple_loss=0.2309, pruned_loss=0.05031, over 4962.00 frames.], tot_loss[loss=0.1554, simple_loss=0.225, pruned_loss=0.04291, over 971780.72 frames.], batch size: 29, lr: 4.40e-04 +2022-05-05 00:26:10,373 INFO [train.py:715] (7/8) Epoch 4, batch 25150, loss[loss=0.1331, simple_loss=0.2005, pruned_loss=0.03281, over 4757.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2257, pruned_loss=0.04289, over 971878.14 frames.], batch size: 16, lr: 4.40e-04 +2022-05-05 00:26:49,795 INFO [train.py:715] (7/8) Epoch 4, batch 25200, loss[loss=0.15, simple_loss=0.2278, pruned_loss=0.03614, over 4989.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2254, pruned_loss=0.0429, over 971238.86 frames.], batch size: 25, lr: 4.40e-04 +2022-05-05 00:27:30,066 INFO [train.py:715] (7/8) Epoch 4, batch 25250, loss[loss=0.1512, simple_loss=0.2259, pruned_loss=0.03823, over 4796.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2259, pruned_loss=0.04325, over 971434.82 frames.], batch size: 24, lr: 4.40e-04 +2022-05-05 00:28:10,080 INFO [train.py:715] (7/8) Epoch 4, batch 25300, loss[loss=0.1588, simple_loss=0.224, pruned_loss=0.04681, over 4805.00 frames.], tot_loss[loss=0.1569, simple_loss=0.227, pruned_loss=0.0434, over 972258.13 frames.], batch size: 13, lr: 4.40e-04 +2022-05-05 00:28:47,883 INFO [train.py:715] (7/8) Epoch 4, batch 25350, loss[loss=0.145, simple_loss=0.2152, pruned_loss=0.03741, over 4847.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2266, pruned_loss=0.04302, over 971374.12 frames.], batch size: 30, lr: 4.40e-04 +2022-05-05 00:29:26,731 INFO [train.py:715] (7/8) Epoch 4, batch 25400, loss[loss=0.1178, simple_loss=0.1906, pruned_loss=0.02251, over 4865.00 frames.], tot_loss[loss=0.156, simple_loss=0.2263, pruned_loss=0.04282, over 970871.23 frames.], batch size: 38, lr: 4.40e-04 +2022-05-05 00:30:06,395 INFO [train.py:715] (7/8) Epoch 4, batch 25450, loss[loss=0.1519, simple_loss=0.2365, pruned_loss=0.03368, over 4955.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2268, pruned_loss=0.04302, over 971574.71 frames.], batch size: 24, lr: 4.39e-04 +2022-05-05 00:30:45,457 INFO [train.py:715] (7/8) Epoch 4, batch 25500, loss[loss=0.1998, simple_loss=0.2677, pruned_loss=0.06595, over 4968.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2267, pruned_loss=0.04331, over 972094.53 frames.], batch size: 15, lr: 4.39e-04 +2022-05-05 00:31:25,316 INFO [train.py:715] (7/8) Epoch 4, batch 25550, loss[loss=0.153, simple_loss=0.2345, pruned_loss=0.03578, over 4815.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2264, pruned_loss=0.04321, over 972040.88 frames.], batch size: 26, lr: 4.39e-04 +2022-05-05 00:32:05,296 INFO [train.py:715] (7/8) Epoch 4, batch 25600, loss[loss=0.1704, simple_loss=0.2401, pruned_loss=0.05033, over 4759.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2261, pruned_loss=0.04261, over 971797.79 frames.], batch size: 12, lr: 4.39e-04 +2022-05-05 00:32:45,568 INFO [train.py:715] (7/8) Epoch 4, batch 25650, loss[loss=0.1644, simple_loss=0.2257, pruned_loss=0.0516, over 4916.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2261, pruned_loss=0.04273, over 972432.20 frames.], batch size: 18, lr: 4.39e-04 +2022-05-05 00:33:24,676 INFO [train.py:715] (7/8) Epoch 4, batch 25700, loss[loss=0.1575, simple_loss=0.2247, pruned_loss=0.04519, over 4934.00 frames.], tot_loss[loss=0.156, simple_loss=0.2263, pruned_loss=0.04288, over 971945.82 frames.], batch size: 23, lr: 4.39e-04 +2022-05-05 00:34:04,661 INFO [train.py:715] (7/8) Epoch 4, batch 25750, loss[loss=0.1599, simple_loss=0.2359, pruned_loss=0.04195, over 4686.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2258, pruned_loss=0.04281, over 971983.92 frames.], batch size: 15, lr: 4.39e-04 +2022-05-05 00:34:45,104 INFO [train.py:715] (7/8) Epoch 4, batch 25800, loss[loss=0.1521, simple_loss=0.2073, pruned_loss=0.04843, over 4779.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2252, pruned_loss=0.04267, over 972167.11 frames.], batch size: 17, lr: 4.39e-04 +2022-05-05 00:35:24,463 INFO [train.py:715] (7/8) Epoch 4, batch 25850, loss[loss=0.1522, simple_loss=0.2228, pruned_loss=0.04084, over 4833.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2245, pruned_loss=0.04225, over 971967.95 frames.], batch size: 27, lr: 4.39e-04 +2022-05-05 00:36:03,599 INFO [train.py:715] (7/8) Epoch 4, batch 25900, loss[loss=0.1197, simple_loss=0.1961, pruned_loss=0.02166, over 4777.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2243, pruned_loss=0.04241, over 972222.07 frames.], batch size: 14, lr: 4.39e-04 +2022-05-05 00:36:43,853 INFO [train.py:715] (7/8) Epoch 4, batch 25950, loss[loss=0.1929, simple_loss=0.2725, pruned_loss=0.05671, over 4906.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2256, pruned_loss=0.04294, over 972466.49 frames.], batch size: 19, lr: 4.39e-04 +2022-05-05 00:37:24,111 INFO [train.py:715] (7/8) Epoch 4, batch 26000, loss[loss=0.167, simple_loss=0.2304, pruned_loss=0.05178, over 4910.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2266, pruned_loss=0.04388, over 972644.34 frames.], batch size: 17, lr: 4.39e-04 +2022-05-05 00:38:02,821 INFO [train.py:715] (7/8) Epoch 4, batch 26050, loss[loss=0.1357, simple_loss=0.2036, pruned_loss=0.03388, over 4759.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2258, pruned_loss=0.04351, over 972318.69 frames.], batch size: 12, lr: 4.39e-04 +2022-05-05 00:38:42,230 INFO [train.py:715] (7/8) Epoch 4, batch 26100, loss[loss=0.14, simple_loss=0.2164, pruned_loss=0.03179, over 4947.00 frames.], tot_loss[loss=0.1558, simple_loss=0.225, pruned_loss=0.04329, over 971906.77 frames.], batch size: 29, lr: 4.39e-04 +2022-05-05 00:39:22,684 INFO [train.py:715] (7/8) Epoch 4, batch 26150, loss[loss=0.1468, simple_loss=0.2189, pruned_loss=0.03733, over 4835.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2237, pruned_loss=0.04245, over 971716.77 frames.], batch size: 15, lr: 4.39e-04 +2022-05-05 00:40:01,760 INFO [train.py:715] (7/8) Epoch 4, batch 26200, loss[loss=0.1503, simple_loss=0.2245, pruned_loss=0.03804, over 4800.00 frames.], tot_loss[loss=0.154, simple_loss=0.2237, pruned_loss=0.04215, over 971747.84 frames.], batch size: 24, lr: 4.38e-04 +2022-05-05 00:40:41,526 INFO [train.py:715] (7/8) Epoch 4, batch 26250, loss[loss=0.1507, simple_loss=0.2261, pruned_loss=0.03766, over 4932.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2246, pruned_loss=0.04281, over 971895.85 frames.], batch size: 39, lr: 4.38e-04 +2022-05-05 00:41:21,390 INFO [train.py:715] (7/8) Epoch 4, batch 26300, loss[loss=0.1522, simple_loss=0.2239, pruned_loss=0.04024, over 4949.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2249, pruned_loss=0.04297, over 972561.52 frames.], batch size: 35, lr: 4.38e-04 +2022-05-05 00:42:01,538 INFO [train.py:715] (7/8) Epoch 4, batch 26350, loss[loss=0.1393, simple_loss=0.2225, pruned_loss=0.02801, over 4821.00 frames.], tot_loss[loss=0.1556, simple_loss=0.225, pruned_loss=0.04307, over 972449.40 frames.], batch size: 25, lr: 4.38e-04 +2022-05-05 00:42:40,877 INFO [train.py:715] (7/8) Epoch 4, batch 26400, loss[loss=0.1822, simple_loss=0.2539, pruned_loss=0.05521, over 4985.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2252, pruned_loss=0.04294, over 972034.04 frames.], batch size: 14, lr: 4.38e-04 +2022-05-05 00:43:20,962 INFO [train.py:715] (7/8) Epoch 4, batch 26450, loss[loss=0.1452, simple_loss=0.2135, pruned_loss=0.03847, over 4784.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2249, pruned_loss=0.04281, over 972095.52 frames.], batch size: 14, lr: 4.38e-04 +2022-05-05 00:44:01,488 INFO [train.py:715] (7/8) Epoch 4, batch 26500, loss[loss=0.1723, simple_loss=0.2413, pruned_loss=0.05167, over 4949.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2253, pruned_loss=0.04288, over 972616.90 frames.], batch size: 21, lr: 4.38e-04 +2022-05-05 00:44:40,386 INFO [train.py:715] (7/8) Epoch 4, batch 26550, loss[loss=0.1831, simple_loss=0.2476, pruned_loss=0.05925, over 4933.00 frames.], tot_loss[loss=0.156, simple_loss=0.2257, pruned_loss=0.0432, over 972418.96 frames.], batch size: 18, lr: 4.38e-04 +2022-05-05 00:45:20,036 INFO [train.py:715] (7/8) Epoch 4, batch 26600, loss[loss=0.1368, simple_loss=0.1978, pruned_loss=0.03793, over 4787.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2257, pruned_loss=0.04274, over 972202.24 frames.], batch size: 12, lr: 4.38e-04 +2022-05-05 00:46:00,427 INFO [train.py:715] (7/8) Epoch 4, batch 26650, loss[loss=0.1487, simple_loss=0.2205, pruned_loss=0.03846, over 4915.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2261, pruned_loss=0.04286, over 971794.04 frames.], batch size: 17, lr: 4.38e-04 +2022-05-05 00:46:41,233 INFO [train.py:715] (7/8) Epoch 4, batch 26700, loss[loss=0.1421, simple_loss=0.2174, pruned_loss=0.0334, over 4783.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2258, pruned_loss=0.04299, over 972306.43 frames.], batch size: 12, lr: 4.38e-04 +2022-05-05 00:47:20,023 INFO [train.py:715] (7/8) Epoch 4, batch 26750, loss[loss=0.1441, simple_loss=0.2261, pruned_loss=0.03102, over 4928.00 frames.], tot_loss[loss=0.157, simple_loss=0.2267, pruned_loss=0.04361, over 972476.40 frames.], batch size: 23, lr: 4.38e-04 +2022-05-05 00:47:59,599 INFO [train.py:715] (7/8) Epoch 4, batch 26800, loss[loss=0.1728, simple_loss=0.2346, pruned_loss=0.05551, over 4711.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2264, pruned_loss=0.04348, over 972359.78 frames.], batch size: 15, lr: 4.38e-04 +2022-05-05 00:48:39,808 INFO [train.py:715] (7/8) Epoch 4, batch 26850, loss[loss=0.1375, simple_loss=0.2126, pruned_loss=0.03124, over 4894.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2262, pruned_loss=0.0435, over 972307.45 frames.], batch size: 19, lr: 4.38e-04 +2022-05-05 00:49:18,745 INFO [train.py:715] (7/8) Epoch 4, batch 26900, loss[loss=0.1524, simple_loss=0.2159, pruned_loss=0.04448, over 4992.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2259, pruned_loss=0.04277, over 971956.32 frames.], batch size: 16, lr: 4.38e-04 +2022-05-05 00:49:58,562 INFO [train.py:715] (7/8) Epoch 4, batch 26950, loss[loss=0.1545, simple_loss=0.2279, pruned_loss=0.04057, over 4903.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2269, pruned_loss=0.04313, over 971697.23 frames.], batch size: 17, lr: 4.37e-04 +2022-05-05 00:50:38,532 INFO [train.py:715] (7/8) Epoch 4, batch 27000, loss[loss=0.1461, simple_loss=0.2145, pruned_loss=0.03879, over 4796.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2265, pruned_loss=0.04292, over 971493.50 frames.], batch size: 21, lr: 4.37e-04 +2022-05-05 00:50:38,532 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 00:50:48,692 INFO [train.py:742] (7/8) Epoch 4, validation: loss=0.1114, simple_loss=0.197, pruned_loss=0.01284, over 914524.00 frames. +2022-05-05 00:51:28,853 INFO [train.py:715] (7/8) Epoch 4, batch 27050, loss[loss=0.1453, simple_loss=0.2122, pruned_loss=0.03922, over 4809.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2252, pruned_loss=0.04246, over 972095.11 frames.], batch size: 13, lr: 4.37e-04 +2022-05-05 00:52:08,422 INFO [train.py:715] (7/8) Epoch 4, batch 27100, loss[loss=0.1429, simple_loss=0.2163, pruned_loss=0.03479, over 4795.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2258, pruned_loss=0.04282, over 971800.52 frames.], batch size: 18, lr: 4.37e-04 +2022-05-05 00:52:47,729 INFO [train.py:715] (7/8) Epoch 4, batch 27150, loss[loss=0.1726, simple_loss=0.236, pruned_loss=0.0546, over 4854.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2254, pruned_loss=0.04296, over 972406.51 frames.], batch size: 15, lr: 4.37e-04 +2022-05-05 00:53:27,412 INFO [train.py:715] (7/8) Epoch 4, batch 27200, loss[loss=0.1562, simple_loss=0.2294, pruned_loss=0.04152, over 4851.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2253, pruned_loss=0.04259, over 972799.35 frames.], batch size: 20, lr: 4.37e-04 +2022-05-05 00:54:07,878 INFO [train.py:715] (7/8) Epoch 4, batch 27250, loss[loss=0.154, simple_loss=0.2202, pruned_loss=0.04393, over 4888.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2265, pruned_loss=0.04323, over 972980.86 frames.], batch size: 19, lr: 4.37e-04 +2022-05-05 00:54:46,639 INFO [train.py:715] (7/8) Epoch 4, batch 27300, loss[loss=0.1725, simple_loss=0.2482, pruned_loss=0.04842, over 4772.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2262, pruned_loss=0.04247, over 973526.55 frames.], batch size: 17, lr: 4.37e-04 +2022-05-05 00:55:26,630 INFO [train.py:715] (7/8) Epoch 4, batch 27350, loss[loss=0.2047, simple_loss=0.2549, pruned_loss=0.07725, over 4933.00 frames.], tot_loss[loss=0.1555, simple_loss=0.226, pruned_loss=0.04249, over 972208.19 frames.], batch size: 18, lr: 4.37e-04 +2022-05-05 00:56:06,586 INFO [train.py:715] (7/8) Epoch 4, batch 27400, loss[loss=0.1631, simple_loss=0.231, pruned_loss=0.04758, over 4976.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2262, pruned_loss=0.04259, over 972741.52 frames.], batch size: 28, lr: 4.37e-04 +2022-05-05 00:56:45,013 INFO [train.py:715] (7/8) Epoch 4, batch 27450, loss[loss=0.1305, simple_loss=0.2077, pruned_loss=0.02666, over 4904.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2254, pruned_loss=0.04206, over 972007.95 frames.], batch size: 19, lr: 4.37e-04 +2022-05-05 00:57:24,964 INFO [train.py:715] (7/8) Epoch 4, batch 27500, loss[loss=0.1874, simple_loss=0.2529, pruned_loss=0.06089, over 4987.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2255, pruned_loss=0.04177, over 972246.67 frames.], batch size: 31, lr: 4.37e-04 +2022-05-05 00:58:03,979 INFO [train.py:715] (7/8) Epoch 4, batch 27550, loss[loss=0.1367, simple_loss=0.2138, pruned_loss=0.02977, over 4900.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2261, pruned_loss=0.04244, over 971718.33 frames.], batch size: 19, lr: 4.37e-04 +2022-05-05 00:58:43,895 INFO [train.py:715] (7/8) Epoch 4, batch 27600, loss[loss=0.172, simple_loss=0.2482, pruned_loss=0.04789, over 4842.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2259, pruned_loss=0.04238, over 971540.66 frames.], batch size: 30, lr: 4.37e-04 +2022-05-05 00:59:22,458 INFO [train.py:715] (7/8) Epoch 4, batch 27650, loss[loss=0.1421, simple_loss=0.2119, pruned_loss=0.03619, over 4829.00 frames.], tot_loss[loss=0.1557, simple_loss=0.226, pruned_loss=0.04266, over 972467.64 frames.], batch size: 27, lr: 4.37e-04 +2022-05-05 01:00:01,784 INFO [train.py:715] (7/8) Epoch 4, batch 27700, loss[loss=0.1537, simple_loss=0.2368, pruned_loss=0.03535, over 4894.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2266, pruned_loss=0.04304, over 972903.86 frames.], batch size: 22, lr: 4.36e-04 +2022-05-05 01:00:41,408 INFO [train.py:715] (7/8) Epoch 4, batch 27750, loss[loss=0.1543, simple_loss=0.2228, pruned_loss=0.04288, over 4697.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2267, pruned_loss=0.04346, over 972695.38 frames.], batch size: 15, lr: 4.36e-04 +2022-05-05 01:01:20,726 INFO [train.py:715] (7/8) Epoch 4, batch 27800, loss[loss=0.1646, simple_loss=0.2354, pruned_loss=0.0469, over 4783.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2273, pruned_loss=0.04374, over 973297.21 frames.], batch size: 17, lr: 4.36e-04 +2022-05-05 01:01:59,777 INFO [train.py:715] (7/8) Epoch 4, batch 27850, loss[loss=0.1915, simple_loss=0.2509, pruned_loss=0.0661, over 4947.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2266, pruned_loss=0.04328, over 973258.01 frames.], batch size: 21, lr: 4.36e-04 +2022-05-05 01:02:38,866 INFO [train.py:715] (7/8) Epoch 4, batch 27900, loss[loss=0.155, simple_loss=0.2229, pruned_loss=0.04351, over 4822.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2269, pruned_loss=0.04362, over 972723.58 frames.], batch size: 13, lr: 4.36e-04 +2022-05-05 01:03:18,313 INFO [train.py:715] (7/8) Epoch 4, batch 27950, loss[loss=0.1842, simple_loss=0.254, pruned_loss=0.05722, over 4890.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2263, pruned_loss=0.04335, over 972536.70 frames.], batch size: 19, lr: 4.36e-04 +2022-05-05 01:03:57,879 INFO [train.py:715] (7/8) Epoch 4, batch 28000, loss[loss=0.1654, simple_loss=0.2312, pruned_loss=0.04976, over 4823.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2261, pruned_loss=0.04324, over 972385.05 frames.], batch size: 26, lr: 4.36e-04 +2022-05-05 01:04:37,836 INFO [train.py:715] (7/8) Epoch 4, batch 28050, loss[loss=0.177, simple_loss=0.2335, pruned_loss=0.06027, over 4839.00 frames.], tot_loss[loss=0.157, simple_loss=0.2269, pruned_loss=0.04359, over 971567.94 frames.], batch size: 15, lr: 4.36e-04 +2022-05-05 01:05:17,719 INFO [train.py:715] (7/8) Epoch 4, batch 28100, loss[loss=0.1643, simple_loss=0.223, pruned_loss=0.05273, over 4788.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2269, pruned_loss=0.04386, over 971669.58 frames.], batch size: 18, lr: 4.36e-04 +2022-05-05 01:05:57,315 INFO [train.py:715] (7/8) Epoch 4, batch 28150, loss[loss=0.1513, simple_loss=0.2099, pruned_loss=0.04632, over 4929.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2271, pruned_loss=0.04415, over 972425.21 frames.], batch size: 18, lr: 4.36e-04 +2022-05-05 01:06:36,805 INFO [train.py:715] (7/8) Epoch 4, batch 28200, loss[loss=0.1645, simple_loss=0.2259, pruned_loss=0.05156, over 4786.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2265, pruned_loss=0.04349, over 972010.30 frames.], batch size: 17, lr: 4.36e-04 +2022-05-05 01:07:15,870 INFO [train.py:715] (7/8) Epoch 4, batch 28250, loss[loss=0.1714, simple_loss=0.2384, pruned_loss=0.05218, over 4988.00 frames.], tot_loss[loss=0.157, simple_loss=0.227, pruned_loss=0.04357, over 972004.38 frames.], batch size: 31, lr: 4.36e-04 +2022-05-05 01:07:55,435 INFO [train.py:715] (7/8) Epoch 4, batch 28300, loss[loss=0.1215, simple_loss=0.1941, pruned_loss=0.0244, over 4870.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2263, pruned_loss=0.04325, over 972062.35 frames.], batch size: 16, lr: 4.36e-04 +2022-05-05 01:08:34,757 INFO [train.py:715] (7/8) Epoch 4, batch 28350, loss[loss=0.1457, simple_loss=0.2202, pruned_loss=0.03566, over 4764.00 frames.], tot_loss[loss=0.1551, simple_loss=0.225, pruned_loss=0.0426, over 971641.09 frames.], batch size: 19, lr: 4.36e-04 +2022-05-05 01:09:14,658 INFO [train.py:715] (7/8) Epoch 4, batch 28400, loss[loss=0.1364, simple_loss=0.2071, pruned_loss=0.03287, over 4928.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2259, pruned_loss=0.04327, over 971999.10 frames.], batch size: 23, lr: 4.36e-04 +2022-05-05 01:09:53,873 INFO [train.py:715] (7/8) Epoch 4, batch 28450, loss[loss=0.1476, simple_loss=0.2067, pruned_loss=0.04426, over 4779.00 frames.], tot_loss[loss=0.156, simple_loss=0.2258, pruned_loss=0.04309, over 972313.15 frames.], batch size: 14, lr: 4.36e-04 +2022-05-05 01:10:32,532 INFO [train.py:715] (7/8) Epoch 4, batch 28500, loss[loss=0.1324, simple_loss=0.2031, pruned_loss=0.03085, over 4855.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2259, pruned_loss=0.04281, over 970957.94 frames.], batch size: 30, lr: 4.35e-04 +2022-05-05 01:11:12,042 INFO [train.py:715] (7/8) Epoch 4, batch 28550, loss[loss=0.1511, simple_loss=0.2243, pruned_loss=0.03893, over 4921.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2255, pruned_loss=0.04255, over 971894.56 frames.], batch size: 29, lr: 4.35e-04 +2022-05-05 01:11:51,204 INFO [train.py:715] (7/8) Epoch 4, batch 28600, loss[loss=0.1388, simple_loss=0.2067, pruned_loss=0.03549, over 4828.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2259, pruned_loss=0.04293, over 971491.11 frames.], batch size: 15, lr: 4.35e-04 +2022-05-05 01:12:30,866 INFO [train.py:715] (7/8) Epoch 4, batch 28650, loss[loss=0.1344, simple_loss=0.1982, pruned_loss=0.03533, over 4969.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2258, pruned_loss=0.04283, over 973211.21 frames.], batch size: 14, lr: 4.35e-04 +2022-05-05 01:13:10,040 INFO [train.py:715] (7/8) Epoch 4, batch 28700, loss[loss=0.1237, simple_loss=0.1939, pruned_loss=0.02674, over 4909.00 frames.], tot_loss[loss=0.1559, simple_loss=0.226, pruned_loss=0.04284, over 973721.41 frames.], batch size: 19, lr: 4.35e-04 +2022-05-05 01:13:49,558 INFO [train.py:715] (7/8) Epoch 4, batch 28750, loss[loss=0.1122, simple_loss=0.1879, pruned_loss=0.01826, over 4846.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2261, pruned_loss=0.04262, over 973912.96 frames.], batch size: 12, lr: 4.35e-04 +2022-05-05 01:14:31,715 INFO [train.py:715] (7/8) Epoch 4, batch 28800, loss[loss=0.1689, simple_loss=0.2359, pruned_loss=0.05092, over 4797.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2265, pruned_loss=0.04264, over 972843.85 frames.], batch size: 24, lr: 4.35e-04 +2022-05-05 01:15:10,514 INFO [train.py:715] (7/8) Epoch 4, batch 28850, loss[loss=0.1554, simple_loss=0.2113, pruned_loss=0.04975, over 4751.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2261, pruned_loss=0.04244, over 972605.64 frames.], batch size: 16, lr: 4.35e-04 +2022-05-05 01:15:50,212 INFO [train.py:715] (7/8) Epoch 4, batch 28900, loss[loss=0.1561, simple_loss=0.2264, pruned_loss=0.04285, over 4987.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2257, pruned_loss=0.04231, over 972298.42 frames.], batch size: 33, lr: 4.35e-04 +2022-05-05 01:16:29,314 INFO [train.py:715] (7/8) Epoch 4, batch 28950, loss[loss=0.1738, simple_loss=0.2277, pruned_loss=0.05999, over 4917.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2262, pruned_loss=0.04237, over 972159.00 frames.], batch size: 17, lr: 4.35e-04 +2022-05-05 01:17:08,544 INFO [train.py:715] (7/8) Epoch 4, batch 29000, loss[loss=0.1414, simple_loss=0.2179, pruned_loss=0.03238, over 4800.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2261, pruned_loss=0.04232, over 972928.04 frames.], batch size: 21, lr: 4.35e-04 +2022-05-05 01:17:48,162 INFO [train.py:715] (7/8) Epoch 4, batch 29050, loss[loss=0.1561, simple_loss=0.2371, pruned_loss=0.03754, over 4747.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2261, pruned_loss=0.04205, over 972475.11 frames.], batch size: 16, lr: 4.35e-04 +2022-05-05 01:18:28,183 INFO [train.py:715] (7/8) Epoch 4, batch 29100, loss[loss=0.1676, simple_loss=0.2449, pruned_loss=0.04512, over 4903.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2263, pruned_loss=0.04256, over 971719.68 frames.], batch size: 19, lr: 4.35e-04 +2022-05-05 01:19:07,858 INFO [train.py:715] (7/8) Epoch 4, batch 29150, loss[loss=0.149, simple_loss=0.2358, pruned_loss=0.03112, over 4974.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2264, pruned_loss=0.04261, over 971744.98 frames.], batch size: 15, lr: 4.35e-04 +2022-05-05 01:19:46,743 INFO [train.py:715] (7/8) Epoch 4, batch 29200, loss[loss=0.1373, simple_loss=0.2158, pruned_loss=0.02941, over 4750.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2255, pruned_loss=0.04241, over 971975.62 frames.], batch size: 19, lr: 4.35e-04 +2022-05-05 01:20:26,109 INFO [train.py:715] (7/8) Epoch 4, batch 29250, loss[loss=0.119, simple_loss=0.1973, pruned_loss=0.02042, over 4826.00 frames.], tot_loss[loss=0.1547, simple_loss=0.225, pruned_loss=0.04217, over 972288.83 frames.], batch size: 25, lr: 4.34e-04 +2022-05-05 01:21:05,004 INFO [train.py:715] (7/8) Epoch 4, batch 29300, loss[loss=0.1555, simple_loss=0.2288, pruned_loss=0.04109, over 4813.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2262, pruned_loss=0.04269, over 972339.45 frames.], batch size: 27, lr: 4.34e-04 +2022-05-05 01:21:43,986 INFO [train.py:715] (7/8) Epoch 4, batch 29350, loss[loss=0.1547, simple_loss=0.234, pruned_loss=0.03768, over 4966.00 frames.], tot_loss[loss=0.1549, simple_loss=0.225, pruned_loss=0.04235, over 972378.88 frames.], batch size: 24, lr: 4.34e-04 +2022-05-05 01:22:22,968 INFO [train.py:715] (7/8) Epoch 4, batch 29400, loss[loss=0.173, simple_loss=0.235, pruned_loss=0.05551, over 4782.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2249, pruned_loss=0.04243, over 972788.01 frames.], batch size: 18, lr: 4.34e-04 +2022-05-05 01:23:02,047 INFO [train.py:715] (7/8) Epoch 4, batch 29450, loss[loss=0.133, simple_loss=0.2089, pruned_loss=0.02851, over 4770.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2248, pruned_loss=0.04223, over 971769.84 frames.], batch size: 14, lr: 4.34e-04 +2022-05-05 01:23:41,627 INFO [train.py:715] (7/8) Epoch 4, batch 29500, loss[loss=0.1233, simple_loss=0.2037, pruned_loss=0.02141, over 4810.00 frames.], tot_loss[loss=0.155, simple_loss=0.2252, pruned_loss=0.04236, over 972015.54 frames.], batch size: 21, lr: 4.34e-04 +2022-05-05 01:24:20,883 INFO [train.py:715] (7/8) Epoch 4, batch 29550, loss[loss=0.1784, simple_loss=0.2406, pruned_loss=0.05811, over 4887.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2242, pruned_loss=0.04176, over 971084.25 frames.], batch size: 19, lr: 4.34e-04 +2022-05-05 01:25:00,165 INFO [train.py:715] (7/8) Epoch 4, batch 29600, loss[loss=0.1399, simple_loss=0.2065, pruned_loss=0.03664, over 4972.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2237, pruned_loss=0.04159, over 972049.39 frames.], batch size: 39, lr: 4.34e-04 +2022-05-05 01:25:39,287 INFO [train.py:715] (7/8) Epoch 4, batch 29650, loss[loss=0.1249, simple_loss=0.191, pruned_loss=0.02945, over 4699.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2227, pruned_loss=0.04112, over 972457.51 frames.], batch size: 15, lr: 4.34e-04 +2022-05-05 01:26:18,059 INFO [train.py:715] (7/8) Epoch 4, batch 29700, loss[loss=0.1791, simple_loss=0.2427, pruned_loss=0.05774, over 4953.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2242, pruned_loss=0.04216, over 971482.88 frames.], batch size: 35, lr: 4.34e-04 +2022-05-05 01:26:57,623 INFO [train.py:715] (7/8) Epoch 4, batch 29750, loss[loss=0.1651, simple_loss=0.2409, pruned_loss=0.04471, over 4969.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2251, pruned_loss=0.04252, over 971811.62 frames.], batch size: 24, lr: 4.34e-04 +2022-05-05 01:27:36,801 INFO [train.py:715] (7/8) Epoch 4, batch 29800, loss[loss=0.1509, simple_loss=0.229, pruned_loss=0.03639, over 4969.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2261, pruned_loss=0.04268, over 972024.66 frames.], batch size: 28, lr: 4.34e-04 +2022-05-05 01:28:16,334 INFO [train.py:715] (7/8) Epoch 4, batch 29850, loss[loss=0.1741, simple_loss=0.2613, pruned_loss=0.04341, over 4884.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2263, pruned_loss=0.04301, over 972098.21 frames.], batch size: 19, lr: 4.34e-04 +2022-05-05 01:28:55,202 INFO [train.py:715] (7/8) Epoch 4, batch 29900, loss[loss=0.1549, simple_loss=0.2256, pruned_loss=0.04208, over 4986.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2271, pruned_loss=0.04317, over 972536.46 frames.], batch size: 16, lr: 4.34e-04 +2022-05-05 01:29:34,844 INFO [train.py:715] (7/8) Epoch 4, batch 29950, loss[loss=0.1605, simple_loss=0.2332, pruned_loss=0.0439, over 4910.00 frames.], tot_loss[loss=0.156, simple_loss=0.2266, pruned_loss=0.04272, over 972719.85 frames.], batch size: 18, lr: 4.34e-04 +2022-05-05 01:30:13,997 INFO [train.py:715] (7/8) Epoch 4, batch 30000, loss[loss=0.1389, simple_loss=0.2085, pruned_loss=0.03467, over 4940.00 frames.], tot_loss[loss=0.1567, simple_loss=0.227, pruned_loss=0.04313, over 972895.17 frames.], batch size: 21, lr: 4.34e-04 +2022-05-05 01:30:13,998 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 01:30:23,829 INFO [train.py:742] (7/8) Epoch 4, validation: loss=0.1113, simple_loss=0.1968, pruned_loss=0.01286, over 914524.00 frames. +2022-05-05 01:31:04,004 INFO [train.py:715] (7/8) Epoch 4, batch 30050, loss[loss=0.2117, simple_loss=0.2752, pruned_loss=0.07414, over 4958.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2277, pruned_loss=0.04396, over 972555.67 frames.], batch size: 39, lr: 4.33e-04 +2022-05-05 01:31:43,428 INFO [train.py:715] (7/8) Epoch 4, batch 30100, loss[loss=0.1552, simple_loss=0.2201, pruned_loss=0.04513, over 4711.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2287, pruned_loss=0.04429, over 973235.55 frames.], batch size: 15, lr: 4.33e-04 +2022-05-05 01:32:23,321 INFO [train.py:715] (7/8) Epoch 4, batch 30150, loss[loss=0.1553, simple_loss=0.2243, pruned_loss=0.04316, over 4756.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2273, pruned_loss=0.04325, over 972862.20 frames.], batch size: 16, lr: 4.33e-04 +2022-05-05 01:33:02,794 INFO [train.py:715] (7/8) Epoch 4, batch 30200, loss[loss=0.1638, simple_loss=0.2414, pruned_loss=0.04305, over 4972.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2266, pruned_loss=0.04281, over 972692.26 frames.], batch size: 15, lr: 4.33e-04 +2022-05-05 01:33:42,429 INFO [train.py:715] (7/8) Epoch 4, batch 30250, loss[loss=0.1638, simple_loss=0.2254, pruned_loss=0.05113, over 4978.00 frames.], tot_loss[loss=0.1556, simple_loss=0.226, pruned_loss=0.04259, over 972900.76 frames.], batch size: 35, lr: 4.33e-04 +2022-05-05 01:34:21,597 INFO [train.py:715] (7/8) Epoch 4, batch 30300, loss[loss=0.1519, simple_loss=0.231, pruned_loss=0.03642, over 4872.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2252, pruned_loss=0.04184, over 972394.16 frames.], batch size: 16, lr: 4.33e-04 +2022-05-05 01:35:01,079 INFO [train.py:715] (7/8) Epoch 4, batch 30350, loss[loss=0.1591, simple_loss=0.2317, pruned_loss=0.04323, over 4898.00 frames.], tot_loss[loss=0.154, simple_loss=0.2246, pruned_loss=0.0417, over 971859.28 frames.], batch size: 22, lr: 4.33e-04 +2022-05-05 01:35:41,056 INFO [train.py:715] (7/8) Epoch 4, batch 30400, loss[loss=0.1688, simple_loss=0.236, pruned_loss=0.05078, over 4913.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2241, pruned_loss=0.04155, over 972503.72 frames.], batch size: 18, lr: 4.33e-04 +2022-05-05 01:36:20,216 INFO [train.py:715] (7/8) Epoch 4, batch 30450, loss[loss=0.1821, simple_loss=0.2605, pruned_loss=0.05185, over 4806.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2246, pruned_loss=0.04176, over 972222.15 frames.], batch size: 21, lr: 4.33e-04 +2022-05-05 01:36:59,981 INFO [train.py:715] (7/8) Epoch 4, batch 30500, loss[loss=0.2087, simple_loss=0.2813, pruned_loss=0.06806, over 4796.00 frames.], tot_loss[loss=0.1535, simple_loss=0.224, pruned_loss=0.04151, over 971257.03 frames.], batch size: 21, lr: 4.33e-04 +2022-05-05 01:37:40,025 INFO [train.py:715] (7/8) Epoch 4, batch 30550, loss[loss=0.1124, simple_loss=0.1805, pruned_loss=0.02216, over 4845.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2242, pruned_loss=0.042, over 971766.17 frames.], batch size: 12, lr: 4.33e-04 +2022-05-05 01:38:19,336 INFO [train.py:715] (7/8) Epoch 4, batch 30600, loss[loss=0.1383, simple_loss=0.2035, pruned_loss=0.03661, over 4772.00 frames.], tot_loss[loss=0.1548, simple_loss=0.225, pruned_loss=0.0423, over 972500.88 frames.], batch size: 14, lr: 4.33e-04 +2022-05-05 01:38:58,944 INFO [train.py:715] (7/8) Epoch 4, batch 30650, loss[loss=0.1625, simple_loss=0.223, pruned_loss=0.05106, over 4918.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2249, pruned_loss=0.04179, over 972324.92 frames.], batch size: 18, lr: 4.33e-04 +2022-05-05 01:39:38,413 INFO [train.py:715] (7/8) Epoch 4, batch 30700, loss[loss=0.1579, simple_loss=0.2285, pruned_loss=0.04365, over 4820.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2247, pruned_loss=0.04207, over 971511.59 frames.], batch size: 27, lr: 4.33e-04 +2022-05-05 01:40:18,147 INFO [train.py:715] (7/8) Epoch 4, batch 30750, loss[loss=0.151, simple_loss=0.2243, pruned_loss=0.03888, over 4985.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2246, pruned_loss=0.04203, over 971834.36 frames.], batch size: 14, lr: 4.33e-04 +2022-05-05 01:40:57,687 INFO [train.py:715] (7/8) Epoch 4, batch 30800, loss[loss=0.1497, simple_loss=0.2156, pruned_loss=0.04185, over 4851.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2238, pruned_loss=0.04188, over 971707.75 frames.], batch size: 32, lr: 4.32e-04 +2022-05-05 01:41:37,513 INFO [train.py:715] (7/8) Epoch 4, batch 30850, loss[loss=0.165, simple_loss=0.2246, pruned_loss=0.05265, over 4863.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2244, pruned_loss=0.04212, over 971684.72 frames.], batch size: 30, lr: 4.32e-04 +2022-05-05 01:42:17,790 INFO [train.py:715] (7/8) Epoch 4, batch 30900, loss[loss=0.1397, simple_loss=0.2178, pruned_loss=0.03083, over 4812.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2244, pruned_loss=0.04191, over 971751.39 frames.], batch size: 21, lr: 4.32e-04 +2022-05-05 01:42:57,262 INFO [train.py:715] (7/8) Epoch 4, batch 30950, loss[loss=0.1837, simple_loss=0.2534, pruned_loss=0.05707, over 4880.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2252, pruned_loss=0.04232, over 972515.56 frames.], batch size: 16, lr: 4.32e-04 +2022-05-05 01:43:36,636 INFO [train.py:715] (7/8) Epoch 4, batch 31000, loss[loss=0.1536, simple_loss=0.2218, pruned_loss=0.04269, over 4906.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2244, pruned_loss=0.04177, over 971425.40 frames.], batch size: 17, lr: 4.32e-04 +2022-05-05 01:44:16,111 INFO [train.py:715] (7/8) Epoch 4, batch 31050, loss[loss=0.1713, simple_loss=0.2454, pruned_loss=0.04862, over 4932.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2253, pruned_loss=0.04243, over 972391.24 frames.], batch size: 29, lr: 4.32e-04 +2022-05-05 01:44:55,521 INFO [train.py:715] (7/8) Epoch 4, batch 31100, loss[loss=0.1512, simple_loss=0.2138, pruned_loss=0.04427, over 4946.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2258, pruned_loss=0.04335, over 972659.71 frames.], batch size: 35, lr: 4.32e-04 +2022-05-05 01:45:35,036 INFO [train.py:715] (7/8) Epoch 4, batch 31150, loss[loss=0.1338, simple_loss=0.1985, pruned_loss=0.03453, over 4900.00 frames.], tot_loss[loss=0.1552, simple_loss=0.225, pruned_loss=0.04269, over 973710.81 frames.], batch size: 17, lr: 4.32e-04 +2022-05-05 01:46:13,903 INFO [train.py:715] (7/8) Epoch 4, batch 31200, loss[loss=0.1802, simple_loss=0.2477, pruned_loss=0.05635, over 4704.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2254, pruned_loss=0.04291, over 972487.50 frames.], batch size: 15, lr: 4.32e-04 +2022-05-05 01:46:53,974 INFO [train.py:715] (7/8) Epoch 4, batch 31250, loss[loss=0.1402, simple_loss=0.2116, pruned_loss=0.03437, over 4794.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2258, pruned_loss=0.0432, over 971762.51 frames.], batch size: 13, lr: 4.32e-04 +2022-05-05 01:47:33,180 INFO [train.py:715] (7/8) Epoch 4, batch 31300, loss[loss=0.1812, simple_loss=0.2506, pruned_loss=0.05588, over 4973.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2251, pruned_loss=0.04261, over 972147.00 frames.], batch size: 24, lr: 4.32e-04 +2022-05-05 01:48:12,187 INFO [train.py:715] (7/8) Epoch 4, batch 31350, loss[loss=0.1547, simple_loss=0.2301, pruned_loss=0.03967, over 4876.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2252, pruned_loss=0.04309, over 973074.91 frames.], batch size: 22, lr: 4.32e-04 +2022-05-05 01:48:52,072 INFO [train.py:715] (7/8) Epoch 4, batch 31400, loss[loss=0.1261, simple_loss=0.1987, pruned_loss=0.02673, over 4959.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2246, pruned_loss=0.04252, over 973213.32 frames.], batch size: 35, lr: 4.32e-04 +2022-05-05 01:49:31,803 INFO [train.py:715] (7/8) Epoch 4, batch 31450, loss[loss=0.1528, simple_loss=0.225, pruned_loss=0.04031, over 4880.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2259, pruned_loss=0.04342, over 972692.32 frames.], batch size: 16, lr: 4.32e-04 +2022-05-05 01:50:11,370 INFO [train.py:715] (7/8) Epoch 4, batch 31500, loss[loss=0.1735, simple_loss=0.2553, pruned_loss=0.04589, over 4895.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2256, pruned_loss=0.04346, over 973000.16 frames.], batch size: 22, lr: 4.32e-04 +2022-05-05 01:50:51,746 INFO [train.py:715] (7/8) Epoch 4, batch 31550, loss[loss=0.138, simple_loss=0.2128, pruned_loss=0.03155, over 4963.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2255, pruned_loss=0.0434, over 973367.98 frames.], batch size: 15, lr: 4.32e-04 +2022-05-05 01:51:32,267 INFO [train.py:715] (7/8) Epoch 4, batch 31600, loss[loss=0.1701, simple_loss=0.2499, pruned_loss=0.04518, over 4765.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2259, pruned_loss=0.0438, over 973256.91 frames.], batch size: 18, lr: 4.31e-04 +2022-05-05 01:52:11,914 INFO [train.py:715] (7/8) Epoch 4, batch 31650, loss[loss=0.1976, simple_loss=0.2708, pruned_loss=0.06213, over 4815.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2261, pruned_loss=0.04423, over 972617.61 frames.], batch size: 25, lr: 4.31e-04 +2022-05-05 01:52:51,499 INFO [train.py:715] (7/8) Epoch 4, batch 31700, loss[loss=0.2019, simple_loss=0.2841, pruned_loss=0.05983, over 4993.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2263, pruned_loss=0.04366, over 972241.32 frames.], batch size: 15, lr: 4.31e-04 +2022-05-05 01:53:31,555 INFO [train.py:715] (7/8) Epoch 4, batch 31750, loss[loss=0.1474, simple_loss=0.2203, pruned_loss=0.03721, over 4920.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2266, pruned_loss=0.04382, over 973280.42 frames.], batch size: 18, lr: 4.31e-04 +2022-05-05 01:54:11,606 INFO [train.py:715] (7/8) Epoch 4, batch 31800, loss[loss=0.158, simple_loss=0.2221, pruned_loss=0.04693, over 4978.00 frames.], tot_loss[loss=0.1573, simple_loss=0.227, pruned_loss=0.04379, over 974130.10 frames.], batch size: 15, lr: 4.31e-04 +2022-05-05 01:54:51,200 INFO [train.py:715] (7/8) Epoch 4, batch 31850, loss[loss=0.1472, simple_loss=0.2262, pruned_loss=0.03411, over 4820.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2273, pruned_loss=0.04387, over 973594.22 frames.], batch size: 27, lr: 4.31e-04 +2022-05-05 01:55:30,807 INFO [train.py:715] (7/8) Epoch 4, batch 31900, loss[loss=0.1756, simple_loss=0.2599, pruned_loss=0.04564, over 4760.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2271, pruned_loss=0.04364, over 971742.08 frames.], batch size: 19, lr: 4.31e-04 +2022-05-05 01:56:11,031 INFO [train.py:715] (7/8) Epoch 4, batch 31950, loss[loss=0.1465, simple_loss=0.2217, pruned_loss=0.03569, over 4762.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2272, pruned_loss=0.04355, over 972260.62 frames.], batch size: 19, lr: 4.31e-04 +2022-05-05 01:56:50,984 INFO [train.py:715] (7/8) Epoch 4, batch 32000, loss[loss=0.1244, simple_loss=0.201, pruned_loss=0.02385, over 4820.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2267, pruned_loss=0.04306, over 972675.02 frames.], batch size: 27, lr: 4.31e-04 +2022-05-05 01:57:30,378 INFO [train.py:715] (7/8) Epoch 4, batch 32050, loss[loss=0.1508, simple_loss=0.2322, pruned_loss=0.03471, over 4876.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2265, pruned_loss=0.04285, over 972547.67 frames.], batch size: 22, lr: 4.31e-04 +2022-05-05 01:58:10,945 INFO [train.py:715] (7/8) Epoch 4, batch 32100, loss[loss=0.1176, simple_loss=0.1922, pruned_loss=0.02156, over 4848.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2263, pruned_loss=0.04253, over 971588.44 frames.], batch size: 20, lr: 4.31e-04 +2022-05-05 01:58:50,865 INFO [train.py:715] (7/8) Epoch 4, batch 32150, loss[loss=0.1493, simple_loss=0.2229, pruned_loss=0.03788, over 4759.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2257, pruned_loss=0.04236, over 971117.91 frames.], batch size: 19, lr: 4.31e-04 +2022-05-05 01:59:30,404 INFO [train.py:715] (7/8) Epoch 4, batch 32200, loss[loss=0.101, simple_loss=0.1701, pruned_loss=0.01596, over 4754.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2249, pruned_loss=0.04204, over 972137.51 frames.], batch size: 12, lr: 4.31e-04 +2022-05-05 02:00:10,364 INFO [train.py:715] (7/8) Epoch 4, batch 32250, loss[loss=0.1401, simple_loss=0.2222, pruned_loss=0.02905, over 4816.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2236, pruned_loss=0.04151, over 972568.06 frames.], batch size: 26, lr: 4.31e-04 +2022-05-05 02:00:51,159 INFO [train.py:715] (7/8) Epoch 4, batch 32300, loss[loss=0.1654, simple_loss=0.2231, pruned_loss=0.0539, over 4945.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2246, pruned_loss=0.04178, over 972665.42 frames.], batch size: 35, lr: 4.31e-04 +2022-05-05 02:01:31,944 INFO [train.py:715] (7/8) Epoch 4, batch 32350, loss[loss=0.1842, simple_loss=0.2566, pruned_loss=0.05585, over 4916.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2245, pruned_loss=0.0419, over 972511.19 frames.], batch size: 29, lr: 4.31e-04 +2022-05-05 02:02:12,276 INFO [train.py:715] (7/8) Epoch 4, batch 32400, loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03117, over 4745.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2241, pruned_loss=0.04145, over 972262.49 frames.], batch size: 16, lr: 4.30e-04 +2022-05-05 02:02:52,631 INFO [train.py:715] (7/8) Epoch 4, batch 32450, loss[loss=0.1475, simple_loss=0.2137, pruned_loss=0.04064, over 4936.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2243, pruned_loss=0.04157, over 973226.31 frames.], batch size: 23, lr: 4.30e-04 +2022-05-05 02:03:31,865 INFO [train.py:715] (7/8) Epoch 4, batch 32500, loss[loss=0.151, simple_loss=0.2216, pruned_loss=0.04022, over 4841.00 frames.], tot_loss[loss=0.1537, simple_loss=0.224, pruned_loss=0.04175, over 973327.99 frames.], batch size: 20, lr: 4.30e-04 +2022-05-05 02:04:11,773 INFO [train.py:715] (7/8) Epoch 4, batch 32550, loss[loss=0.1377, simple_loss=0.2091, pruned_loss=0.03319, over 4838.00 frames.], tot_loss[loss=0.154, simple_loss=0.2239, pruned_loss=0.04204, over 973040.09 frames.], batch size: 30, lr: 4.30e-04 +2022-05-05 02:04:50,741 INFO [train.py:715] (7/8) Epoch 4, batch 32600, loss[loss=0.1532, simple_loss=0.2351, pruned_loss=0.03561, over 4751.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2246, pruned_loss=0.04223, over 972389.51 frames.], batch size: 19, lr: 4.30e-04 +2022-05-05 02:05:30,805 INFO [train.py:715] (7/8) Epoch 4, batch 32650, loss[loss=0.166, simple_loss=0.2281, pruned_loss=0.05193, over 4929.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2244, pruned_loss=0.04208, over 971806.29 frames.], batch size: 18, lr: 4.30e-04 +2022-05-05 02:06:09,915 INFO [train.py:715] (7/8) Epoch 4, batch 32700, loss[loss=0.1476, simple_loss=0.2155, pruned_loss=0.03979, over 4804.00 frames.], tot_loss[loss=0.155, simple_loss=0.2254, pruned_loss=0.04233, over 972384.88 frames.], batch size: 14, lr: 4.30e-04 +2022-05-05 02:06:49,547 INFO [train.py:715] (7/8) Epoch 4, batch 32750, loss[loss=0.1654, simple_loss=0.2361, pruned_loss=0.04737, over 4901.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2249, pruned_loss=0.04245, over 972678.64 frames.], batch size: 22, lr: 4.30e-04 +2022-05-05 02:07:29,262 INFO [train.py:715] (7/8) Epoch 4, batch 32800, loss[loss=0.1515, simple_loss=0.1963, pruned_loss=0.05339, over 4833.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2249, pruned_loss=0.04247, over 973581.02 frames.], batch size: 13, lr: 4.30e-04 +2022-05-05 02:08:09,340 INFO [train.py:715] (7/8) Epoch 4, batch 32850, loss[loss=0.1795, simple_loss=0.2404, pruned_loss=0.05932, over 4788.00 frames.], tot_loss[loss=0.1547, simple_loss=0.225, pruned_loss=0.04218, over 972297.26 frames.], batch size: 13, lr: 4.30e-04 +2022-05-05 02:08:49,848 INFO [train.py:715] (7/8) Epoch 4, batch 32900, loss[loss=0.1696, simple_loss=0.2307, pruned_loss=0.05431, over 4920.00 frames.], tot_loss[loss=0.155, simple_loss=0.2253, pruned_loss=0.04233, over 972248.90 frames.], batch size: 23, lr: 4.30e-04 +2022-05-05 02:09:30,078 INFO [train.py:715] (7/8) Epoch 4, batch 32950, loss[loss=0.1387, simple_loss=0.2148, pruned_loss=0.03134, over 4896.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2251, pruned_loss=0.04208, over 972122.51 frames.], batch size: 19, lr: 4.30e-04 +2022-05-05 02:10:10,305 INFO [train.py:715] (7/8) Epoch 4, batch 33000, loss[loss=0.1594, simple_loss=0.2204, pruned_loss=0.04918, over 4859.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2264, pruned_loss=0.04254, over 973059.43 frames.], batch size: 32, lr: 4.30e-04 +2022-05-05 02:10:10,305 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 02:10:20,092 INFO [train.py:742] (7/8) Epoch 4, validation: loss=0.1115, simple_loss=0.197, pruned_loss=0.01298, over 914524.00 frames. +2022-05-05 02:11:00,302 INFO [train.py:715] (7/8) Epoch 4, batch 33050, loss[loss=0.1213, simple_loss=0.1915, pruned_loss=0.02555, over 4844.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2256, pruned_loss=0.04231, over 973053.52 frames.], batch size: 30, lr: 4.30e-04 +2022-05-05 02:11:40,005 INFO [train.py:715] (7/8) Epoch 4, batch 33100, loss[loss=0.1581, simple_loss=0.2266, pruned_loss=0.04482, over 4748.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2252, pruned_loss=0.04219, over 971781.30 frames.], batch size: 16, lr: 4.30e-04 +2022-05-05 02:12:20,029 INFO [train.py:715] (7/8) Epoch 4, batch 33150, loss[loss=0.1859, simple_loss=0.2403, pruned_loss=0.06577, over 4861.00 frames.], tot_loss[loss=0.155, simple_loss=0.2255, pruned_loss=0.04223, over 972624.67 frames.], batch size: 12, lr: 4.30e-04 +2022-05-05 02:13:00,225 INFO [train.py:715] (7/8) Epoch 4, batch 33200, loss[loss=0.1335, simple_loss=0.2084, pruned_loss=0.02929, over 4964.00 frames.], tot_loss[loss=0.155, simple_loss=0.2256, pruned_loss=0.04219, over 973334.19 frames.], batch size: 24, lr: 4.29e-04 +2022-05-05 02:13:40,206 INFO [train.py:715] (7/8) Epoch 4, batch 33250, loss[loss=0.1404, simple_loss=0.2061, pruned_loss=0.03734, over 4866.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2256, pruned_loss=0.04211, over 972671.15 frames.], batch size: 32, lr: 4.29e-04 +2022-05-05 02:14:20,218 INFO [train.py:715] (7/8) Epoch 4, batch 33300, loss[loss=0.1461, simple_loss=0.2251, pruned_loss=0.03358, over 4860.00 frames.], tot_loss[loss=0.1539, simple_loss=0.225, pruned_loss=0.04141, over 973630.72 frames.], batch size: 16, lr: 4.29e-04 +2022-05-05 02:14:59,209 INFO [train.py:715] (7/8) Epoch 4, batch 33350, loss[loss=0.1832, simple_loss=0.245, pruned_loss=0.0607, over 4985.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2256, pruned_loss=0.04234, over 973050.99 frames.], batch size: 14, lr: 4.29e-04 +2022-05-05 02:15:38,984 INFO [train.py:715] (7/8) Epoch 4, batch 33400, loss[loss=0.1429, simple_loss=0.2204, pruned_loss=0.03268, over 4951.00 frames.], tot_loss[loss=0.155, simple_loss=0.2258, pruned_loss=0.04208, over 972738.00 frames.], batch size: 15, lr: 4.29e-04 +2022-05-05 02:16:18,847 INFO [train.py:715] (7/8) Epoch 4, batch 33450, loss[loss=0.1436, simple_loss=0.2145, pruned_loss=0.03636, over 4912.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2256, pruned_loss=0.04198, over 972447.51 frames.], batch size: 17, lr: 4.29e-04 +2022-05-05 02:16:58,395 INFO [train.py:715] (7/8) Epoch 4, batch 33500, loss[loss=0.1808, simple_loss=0.2444, pruned_loss=0.05856, over 4946.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2258, pruned_loss=0.04222, over 972353.99 frames.], batch size: 21, lr: 4.29e-04 +2022-05-05 02:17:38,202 INFO [train.py:715] (7/8) Epoch 4, batch 33550, loss[loss=0.1342, simple_loss=0.2007, pruned_loss=0.03382, over 4941.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2254, pruned_loss=0.04203, over 972342.13 frames.], batch size: 29, lr: 4.29e-04 +2022-05-05 02:18:17,698 INFO [train.py:715] (7/8) Epoch 4, batch 33600, loss[loss=0.1278, simple_loss=0.1949, pruned_loss=0.03039, over 4925.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2258, pruned_loss=0.04264, over 972296.96 frames.], batch size: 18, lr: 4.29e-04 +2022-05-05 02:18:57,441 INFO [train.py:715] (7/8) Epoch 4, batch 33650, loss[loss=0.1669, simple_loss=0.2304, pruned_loss=0.0517, over 4965.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2252, pruned_loss=0.0426, over 971377.52 frames.], batch size: 15, lr: 4.29e-04 +2022-05-05 02:19:36,832 INFO [train.py:715] (7/8) Epoch 4, batch 33700, loss[loss=0.1593, simple_loss=0.2289, pruned_loss=0.04483, over 4988.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2243, pruned_loss=0.0424, over 971257.59 frames.], batch size: 25, lr: 4.29e-04 +2022-05-05 02:20:16,628 INFO [train.py:715] (7/8) Epoch 4, batch 33750, loss[loss=0.2011, simple_loss=0.2526, pruned_loss=0.0748, over 4862.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2247, pruned_loss=0.04278, over 971493.34 frames.], batch size: 22, lr: 4.29e-04 +2022-05-05 02:20:56,488 INFO [train.py:715] (7/8) Epoch 4, batch 33800, loss[loss=0.153, simple_loss=0.2196, pruned_loss=0.04318, over 4893.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2244, pruned_loss=0.04228, over 971849.07 frames.], batch size: 19, lr: 4.29e-04 +2022-05-05 02:21:35,974 INFO [train.py:715] (7/8) Epoch 4, batch 33850, loss[loss=0.1419, simple_loss=0.2169, pruned_loss=0.03349, over 4889.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2244, pruned_loss=0.0426, over 972390.31 frames.], batch size: 22, lr: 4.29e-04 +2022-05-05 02:22:15,607 INFO [train.py:715] (7/8) Epoch 4, batch 33900, loss[loss=0.1738, simple_loss=0.235, pruned_loss=0.05631, over 4770.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2234, pruned_loss=0.04206, over 972623.65 frames.], batch size: 14, lr: 4.29e-04 +2022-05-05 02:22:55,360 INFO [train.py:715] (7/8) Epoch 4, batch 33950, loss[loss=0.1402, simple_loss=0.2226, pruned_loss=0.02884, over 4878.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2243, pruned_loss=0.04258, over 972525.13 frames.], batch size: 16, lr: 4.29e-04 +2022-05-05 02:23:35,329 INFO [train.py:715] (7/8) Epoch 4, batch 34000, loss[loss=0.1533, simple_loss=0.2177, pruned_loss=0.04449, over 4943.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2235, pruned_loss=0.04194, over 971600.47 frames.], batch size: 21, lr: 4.28e-04 +2022-05-05 02:24:14,855 INFO [train.py:715] (7/8) Epoch 4, batch 34050, loss[loss=0.134, simple_loss=0.211, pruned_loss=0.02851, over 4836.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2246, pruned_loss=0.04244, over 971659.73 frames.], batch size: 12, lr: 4.28e-04 +2022-05-05 02:24:54,571 INFO [train.py:715] (7/8) Epoch 4, batch 34100, loss[loss=0.1677, simple_loss=0.2322, pruned_loss=0.05167, over 4769.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2251, pruned_loss=0.04313, over 971788.96 frames.], batch size: 16, lr: 4.28e-04 +2022-05-05 02:25:34,634 INFO [train.py:715] (7/8) Epoch 4, batch 34150, loss[loss=0.1573, simple_loss=0.2294, pruned_loss=0.04263, over 4750.00 frames.], tot_loss[loss=0.155, simple_loss=0.2245, pruned_loss=0.04281, over 970754.15 frames.], batch size: 19, lr: 4.28e-04 +2022-05-05 02:26:13,485 INFO [train.py:715] (7/8) Epoch 4, batch 34200, loss[loss=0.1462, simple_loss=0.2161, pruned_loss=0.03813, over 4899.00 frames.], tot_loss[loss=0.155, simple_loss=0.2246, pruned_loss=0.04269, over 970574.87 frames.], batch size: 19, lr: 4.28e-04 +2022-05-05 02:26:54,318 INFO [train.py:715] (7/8) Epoch 4, batch 34250, loss[loss=0.1489, simple_loss=0.2268, pruned_loss=0.03551, over 4818.00 frames.], tot_loss[loss=0.155, simple_loss=0.2248, pruned_loss=0.04256, over 970938.43 frames.], batch size: 21, lr: 4.28e-04 +2022-05-05 02:27:34,193 INFO [train.py:715] (7/8) Epoch 4, batch 34300, loss[loss=0.15, simple_loss=0.2075, pruned_loss=0.04627, over 4750.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2253, pruned_loss=0.04285, over 971407.08 frames.], batch size: 12, lr: 4.28e-04 +2022-05-05 02:28:13,944 INFO [train.py:715] (7/8) Epoch 4, batch 34350, loss[loss=0.1466, simple_loss=0.2159, pruned_loss=0.03869, over 4888.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2245, pruned_loss=0.04264, over 972072.91 frames.], batch size: 22, lr: 4.28e-04 +2022-05-05 02:28:53,977 INFO [train.py:715] (7/8) Epoch 4, batch 34400, loss[loss=0.1655, simple_loss=0.2376, pruned_loss=0.04664, over 4931.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2252, pruned_loss=0.04317, over 971559.68 frames.], batch size: 17, lr: 4.28e-04 +2022-05-05 02:29:33,808 INFO [train.py:715] (7/8) Epoch 4, batch 34450, loss[loss=0.165, simple_loss=0.2408, pruned_loss=0.04458, over 4703.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2246, pruned_loss=0.04289, over 972121.60 frames.], batch size: 15, lr: 4.28e-04 +2022-05-05 02:30:14,470 INFO [train.py:715] (7/8) Epoch 4, batch 34500, loss[loss=0.1883, simple_loss=0.2439, pruned_loss=0.06635, over 4766.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2255, pruned_loss=0.04311, over 972193.42 frames.], batch size: 16, lr: 4.28e-04 +2022-05-05 02:30:53,315 INFO [train.py:715] (7/8) Epoch 4, batch 34550, loss[loss=0.1846, simple_loss=0.2476, pruned_loss=0.06082, over 4784.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2259, pruned_loss=0.04323, over 970911.92 frames.], batch size: 17, lr: 4.28e-04 +2022-05-05 02:31:33,263 INFO [train.py:715] (7/8) Epoch 4, batch 34600, loss[loss=0.1705, simple_loss=0.2481, pruned_loss=0.04652, over 4931.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2248, pruned_loss=0.04245, over 971595.96 frames.], batch size: 29, lr: 4.28e-04 +2022-05-05 02:32:13,238 INFO [train.py:715] (7/8) Epoch 4, batch 34650, loss[loss=0.1345, simple_loss=0.1963, pruned_loss=0.03642, over 4973.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2249, pruned_loss=0.0425, over 972233.60 frames.], batch size: 14, lr: 4.28e-04 +2022-05-05 02:32:52,591 INFO [train.py:715] (7/8) Epoch 4, batch 34700, loss[loss=0.142, simple_loss=0.206, pruned_loss=0.03898, over 4806.00 frames.], tot_loss[loss=0.1552, simple_loss=0.225, pruned_loss=0.0427, over 973079.59 frames.], batch size: 14, lr: 4.28e-04 +2022-05-05 02:33:30,874 INFO [train.py:715] (7/8) Epoch 4, batch 34750, loss[loss=0.1365, simple_loss=0.2118, pruned_loss=0.03056, over 4760.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2244, pruned_loss=0.04171, over 972301.18 frames.], batch size: 18, lr: 4.28e-04 +2022-05-05 02:34:07,936 INFO [train.py:715] (7/8) Epoch 4, batch 34800, loss[loss=0.222, simple_loss=0.2929, pruned_loss=0.07551, over 4923.00 frames.], tot_loss[loss=0.155, simple_loss=0.2255, pruned_loss=0.04228, over 973142.63 frames.], batch size: 18, lr: 4.27e-04 +2022-05-05 02:34:57,766 INFO [train.py:715] (7/8) Epoch 5, batch 0, loss[loss=0.1381, simple_loss=0.2153, pruned_loss=0.03044, over 4691.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2153, pruned_loss=0.03044, over 4691.00 frames.], batch size: 15, lr: 4.02e-04 +2022-05-05 02:35:38,102 INFO [train.py:715] (7/8) Epoch 5, batch 50, loss[loss=0.1652, simple_loss=0.2488, pruned_loss=0.0408, over 4780.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2237, pruned_loss=0.04199, over 219514.42 frames.], batch size: 18, lr: 4.02e-04 +2022-05-05 02:36:17,799 INFO [train.py:715] (7/8) Epoch 5, batch 100, loss[loss=0.1437, simple_loss=0.2139, pruned_loss=0.03676, over 4750.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2249, pruned_loss=0.0427, over 386351.81 frames.], batch size: 19, lr: 4.02e-04 +2022-05-05 02:36:57,767 INFO [train.py:715] (7/8) Epoch 5, batch 150, loss[loss=0.1786, simple_loss=0.2419, pruned_loss=0.05762, over 4797.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2235, pruned_loss=0.04203, over 516481.51 frames.], batch size: 21, lr: 4.02e-04 +2022-05-05 02:37:38,288 INFO [train.py:715] (7/8) Epoch 5, batch 200, loss[loss=0.1671, simple_loss=0.2318, pruned_loss=0.05117, over 4933.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2236, pruned_loss=0.04205, over 618681.30 frames.], batch size: 39, lr: 4.02e-04 +2022-05-05 02:38:17,739 INFO [train.py:715] (7/8) Epoch 5, batch 250, loss[loss=0.1494, simple_loss=0.2186, pruned_loss=0.04009, over 4973.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2225, pruned_loss=0.04157, over 696706.10 frames.], batch size: 14, lr: 4.02e-04 +2022-05-05 02:38:57,156 INFO [train.py:715] (7/8) Epoch 5, batch 300, loss[loss=0.1468, simple_loss=0.2141, pruned_loss=0.03971, over 4864.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2233, pruned_loss=0.04246, over 758335.31 frames.], batch size: 16, lr: 4.01e-04 +2022-05-05 02:39:36,894 INFO [train.py:715] (7/8) Epoch 5, batch 350, loss[loss=0.2041, simple_loss=0.2676, pruned_loss=0.07033, over 4956.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2244, pruned_loss=0.04265, over 805726.81 frames.], batch size: 39, lr: 4.01e-04 +2022-05-05 02:40:16,658 INFO [train.py:715] (7/8) Epoch 5, batch 400, loss[loss=0.1439, simple_loss=0.2142, pruned_loss=0.03677, over 4939.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2237, pruned_loss=0.04197, over 843193.81 frames.], batch size: 21, lr: 4.01e-04 +2022-05-05 02:40:56,051 INFO [train.py:715] (7/8) Epoch 5, batch 450, loss[loss=0.154, simple_loss=0.2306, pruned_loss=0.03868, over 4857.00 frames.], tot_loss[loss=0.1541, simple_loss=0.224, pruned_loss=0.04208, over 871657.99 frames.], batch size: 20, lr: 4.01e-04 +2022-05-05 02:41:35,799 INFO [train.py:715] (7/8) Epoch 5, batch 500, loss[loss=0.1727, simple_loss=0.2408, pruned_loss=0.0523, over 4901.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2247, pruned_loss=0.04254, over 894294.99 frames.], batch size: 17, lr: 4.01e-04 +2022-05-05 02:42:15,655 INFO [train.py:715] (7/8) Epoch 5, batch 550, loss[loss=0.1734, simple_loss=0.251, pruned_loss=0.04792, over 4781.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2251, pruned_loss=0.0425, over 911277.93 frames.], batch size: 17, lr: 4.01e-04 +2022-05-05 02:42:54,761 INFO [train.py:715] (7/8) Epoch 5, batch 600, loss[loss=0.1275, simple_loss=0.2007, pruned_loss=0.02717, over 4914.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2246, pruned_loss=0.04204, over 924831.53 frames.], batch size: 18, lr: 4.01e-04 +2022-05-05 02:43:34,143 INFO [train.py:715] (7/8) Epoch 5, batch 650, loss[loss=0.1746, simple_loss=0.2416, pruned_loss=0.05383, over 4904.00 frames.], tot_loss[loss=0.155, simple_loss=0.2252, pruned_loss=0.0424, over 935348.75 frames.], batch size: 17, lr: 4.01e-04 +2022-05-05 02:44:13,850 INFO [train.py:715] (7/8) Epoch 5, batch 700, loss[loss=0.149, simple_loss=0.2202, pruned_loss=0.03892, over 4952.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2247, pruned_loss=0.04206, over 943512.31 frames.], batch size: 35, lr: 4.01e-04 +2022-05-05 02:44:53,908 INFO [train.py:715] (7/8) Epoch 5, batch 750, loss[loss=0.1514, simple_loss=0.221, pruned_loss=0.04085, over 4891.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2246, pruned_loss=0.04191, over 949981.03 frames.], batch size: 22, lr: 4.01e-04 +2022-05-05 02:45:33,283 INFO [train.py:715] (7/8) Epoch 5, batch 800, loss[loss=0.1553, simple_loss=0.2207, pruned_loss=0.04493, over 4972.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2245, pruned_loss=0.0418, over 956070.37 frames.], batch size: 14, lr: 4.01e-04 +2022-05-05 02:46:12,795 INFO [train.py:715] (7/8) Epoch 5, batch 850, loss[loss=0.1679, simple_loss=0.2315, pruned_loss=0.0522, over 4961.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2249, pruned_loss=0.04241, over 959962.52 frames.], batch size: 24, lr: 4.01e-04 +2022-05-05 02:46:52,366 INFO [train.py:715] (7/8) Epoch 5, batch 900, loss[loss=0.134, simple_loss=0.2066, pruned_loss=0.03066, over 4959.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2241, pruned_loss=0.04208, over 962987.34 frames.], batch size: 21, lr: 4.01e-04 +2022-05-05 02:47:31,845 INFO [train.py:715] (7/8) Epoch 5, batch 950, loss[loss=0.1479, simple_loss=0.2219, pruned_loss=0.03694, over 4831.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2237, pruned_loss=0.04194, over 965141.15 frames.], batch size: 13, lr: 4.01e-04 +2022-05-05 02:48:11,354 INFO [train.py:715] (7/8) Epoch 5, batch 1000, loss[loss=0.1475, simple_loss=0.2269, pruned_loss=0.03403, over 4928.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2237, pruned_loss=0.04143, over 967174.31 frames.], batch size: 29, lr: 4.01e-04 +2022-05-05 02:48:50,617 INFO [train.py:715] (7/8) Epoch 5, batch 1050, loss[loss=0.1991, simple_loss=0.262, pruned_loss=0.06811, over 4952.00 frames.], tot_loss[loss=0.154, simple_loss=0.2242, pruned_loss=0.04185, over 969121.35 frames.], batch size: 24, lr: 4.01e-04 +2022-05-05 02:49:30,327 INFO [train.py:715] (7/8) Epoch 5, batch 1100, loss[loss=0.1617, simple_loss=0.2314, pruned_loss=0.04603, over 4945.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2241, pruned_loss=0.04171, over 969572.83 frames.], batch size: 39, lr: 4.01e-04 +2022-05-05 02:50:09,333 INFO [train.py:715] (7/8) Epoch 5, batch 1150, loss[loss=0.1596, simple_loss=0.2247, pruned_loss=0.04729, over 4994.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2248, pruned_loss=0.04232, over 970854.88 frames.], batch size: 16, lr: 4.00e-04 +2022-05-05 02:50:49,093 INFO [train.py:715] (7/8) Epoch 5, batch 1200, loss[loss=0.1248, simple_loss=0.1882, pruned_loss=0.03067, over 4842.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2246, pruned_loss=0.04202, over 971424.47 frames.], batch size: 13, lr: 4.00e-04 +2022-05-05 02:51:29,242 INFO [train.py:715] (7/8) Epoch 5, batch 1250, loss[loss=0.1655, simple_loss=0.2421, pruned_loss=0.0444, over 4805.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2254, pruned_loss=0.04241, over 971837.70 frames.], batch size: 21, lr: 4.00e-04 +2022-05-05 02:52:08,409 INFO [train.py:715] (7/8) Epoch 5, batch 1300, loss[loss=0.133, simple_loss=0.2043, pruned_loss=0.03085, over 4868.00 frames.], tot_loss[loss=0.154, simple_loss=0.2243, pruned_loss=0.04185, over 970939.67 frames.], batch size: 22, lr: 4.00e-04 +2022-05-05 02:52:48,191 INFO [train.py:715] (7/8) Epoch 5, batch 1350, loss[loss=0.1707, simple_loss=0.2325, pruned_loss=0.05441, over 4847.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2245, pruned_loss=0.0421, over 971024.76 frames.], batch size: 32, lr: 4.00e-04 +2022-05-05 02:53:27,485 INFO [train.py:715] (7/8) Epoch 5, batch 1400, loss[loss=0.1508, simple_loss=0.2296, pruned_loss=0.03602, over 4931.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2236, pruned_loss=0.04145, over 970426.66 frames.], batch size: 21, lr: 4.00e-04 +2022-05-05 02:54:07,302 INFO [train.py:715] (7/8) Epoch 5, batch 1450, loss[loss=0.1643, simple_loss=0.231, pruned_loss=0.04877, over 4976.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2243, pruned_loss=0.04204, over 970359.69 frames.], batch size: 39, lr: 4.00e-04 +2022-05-05 02:54:46,729 INFO [train.py:715] (7/8) Epoch 5, batch 1500, loss[loss=0.1566, simple_loss=0.2328, pruned_loss=0.04014, over 4969.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2238, pruned_loss=0.04141, over 972077.34 frames.], batch size: 24, lr: 4.00e-04 +2022-05-05 02:55:25,724 INFO [train.py:715] (7/8) Epoch 5, batch 1550, loss[loss=0.1558, simple_loss=0.2227, pruned_loss=0.0445, over 4795.00 frames.], tot_loss[loss=0.1536, simple_loss=0.224, pruned_loss=0.04159, over 972286.00 frames.], batch size: 24, lr: 4.00e-04 +2022-05-05 02:56:05,366 INFO [train.py:715] (7/8) Epoch 5, batch 1600, loss[loss=0.1911, simple_loss=0.2532, pruned_loss=0.06452, over 4748.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2235, pruned_loss=0.04145, over 972719.34 frames.], batch size: 19, lr: 4.00e-04 +2022-05-05 02:56:45,704 INFO [train.py:715] (7/8) Epoch 5, batch 1650, loss[loss=0.1395, simple_loss=0.2015, pruned_loss=0.03879, over 4793.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2239, pruned_loss=0.04183, over 972361.75 frames.], batch size: 17, lr: 4.00e-04 +2022-05-05 02:57:24,645 INFO [train.py:715] (7/8) Epoch 5, batch 1700, loss[loss=0.1608, simple_loss=0.2266, pruned_loss=0.04747, over 4880.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2241, pruned_loss=0.04207, over 972530.95 frames.], batch size: 16, lr: 4.00e-04 +2022-05-05 02:58:05,303 INFO [train.py:715] (7/8) Epoch 5, batch 1750, loss[loss=0.1207, simple_loss=0.1966, pruned_loss=0.02243, over 4932.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2237, pruned_loss=0.04186, over 972952.49 frames.], batch size: 29, lr: 4.00e-04 +2022-05-05 02:58:45,446 INFO [train.py:715] (7/8) Epoch 5, batch 1800, loss[loss=0.1003, simple_loss=0.1718, pruned_loss=0.01439, over 4883.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2224, pruned_loss=0.04132, over 972385.37 frames.], batch size: 22, lr: 4.00e-04 +2022-05-05 02:59:25,895 INFO [train.py:715] (7/8) Epoch 5, batch 1850, loss[loss=0.1267, simple_loss=0.1998, pruned_loss=0.02679, over 4805.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2231, pruned_loss=0.0417, over 972418.40 frames.], batch size: 13, lr: 4.00e-04 +2022-05-05 03:00:06,297 INFO [train.py:715] (7/8) Epoch 5, batch 1900, loss[loss=0.1516, simple_loss=0.2241, pruned_loss=0.03958, over 4982.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2241, pruned_loss=0.04232, over 973367.78 frames.], batch size: 35, lr: 4.00e-04 +2022-05-05 03:00:46,052 INFO [train.py:715] (7/8) Epoch 5, batch 1950, loss[loss=0.1401, simple_loss=0.2099, pruned_loss=0.0351, over 4965.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2241, pruned_loss=0.0422, over 972205.82 frames.], batch size: 24, lr: 4.00e-04 +2022-05-05 03:01:29,128 INFO [train.py:715] (7/8) Epoch 5, batch 2000, loss[loss=0.1726, simple_loss=0.2358, pruned_loss=0.05472, over 4769.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2237, pruned_loss=0.04165, over 972244.87 frames.], batch size: 19, lr: 4.00e-04 +2022-05-05 03:02:09,159 INFO [train.py:715] (7/8) Epoch 5, batch 2050, loss[loss=0.129, simple_loss=0.1981, pruned_loss=0.02993, over 4841.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2243, pruned_loss=0.04173, over 972385.43 frames.], batch size: 12, lr: 3.99e-04 +2022-05-05 03:02:49,520 INFO [train.py:715] (7/8) Epoch 5, batch 2100, loss[loss=0.1342, simple_loss=0.2109, pruned_loss=0.02874, over 4980.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2231, pruned_loss=0.04135, over 972156.66 frames.], batch size: 24, lr: 3.99e-04 +2022-05-05 03:03:30,101 INFO [train.py:715] (7/8) Epoch 5, batch 2150, loss[loss=0.1637, simple_loss=0.2309, pruned_loss=0.04824, over 4917.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2224, pruned_loss=0.0411, over 971493.83 frames.], batch size: 18, lr: 3.99e-04 +2022-05-05 03:04:09,668 INFO [train.py:715] (7/8) Epoch 5, batch 2200, loss[loss=0.1527, simple_loss=0.2247, pruned_loss=0.04034, over 4953.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2222, pruned_loss=0.04113, over 971852.76 frames.], batch size: 24, lr: 3.99e-04 +2022-05-05 03:04:50,063 INFO [train.py:715] (7/8) Epoch 5, batch 2250, loss[loss=0.1539, simple_loss=0.2339, pruned_loss=0.0369, over 4807.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2228, pruned_loss=0.04109, over 972190.16 frames.], batch size: 25, lr: 3.99e-04 +2022-05-05 03:05:30,779 INFO [train.py:715] (7/8) Epoch 5, batch 2300, loss[loss=0.1312, simple_loss=0.1923, pruned_loss=0.03503, over 4811.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2227, pruned_loss=0.04123, over 972076.31 frames.], batch size: 13, lr: 3.99e-04 +2022-05-05 03:06:10,990 INFO [train.py:715] (7/8) Epoch 5, batch 2350, loss[loss=0.1337, simple_loss=0.2094, pruned_loss=0.02896, over 4984.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2227, pruned_loss=0.04118, over 971844.90 frames.], batch size: 24, lr: 3.99e-04 +2022-05-05 03:06:51,193 INFO [train.py:715] (7/8) Epoch 5, batch 2400, loss[loss=0.1415, simple_loss=0.2264, pruned_loss=0.02832, over 4755.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2218, pruned_loss=0.0408, over 971783.66 frames.], batch size: 19, lr: 3.99e-04 +2022-05-05 03:07:31,711 INFO [train.py:715] (7/8) Epoch 5, batch 2450, loss[loss=0.167, simple_loss=0.2321, pruned_loss=0.05098, over 4819.00 frames.], tot_loss[loss=0.1519, simple_loss=0.222, pruned_loss=0.04092, over 970947.67 frames.], batch size: 25, lr: 3.99e-04 +2022-05-05 03:08:12,417 INFO [train.py:715] (7/8) Epoch 5, batch 2500, loss[loss=0.1245, simple_loss=0.2075, pruned_loss=0.02079, over 4982.00 frames.], tot_loss[loss=0.1512, simple_loss=0.222, pruned_loss=0.04024, over 971407.10 frames.], batch size: 28, lr: 3.99e-04 +2022-05-05 03:08:52,449 INFO [train.py:715] (7/8) Epoch 5, batch 2550, loss[loss=0.1286, simple_loss=0.2047, pruned_loss=0.02624, over 4766.00 frames.], tot_loss[loss=0.152, simple_loss=0.2226, pruned_loss=0.04071, over 970939.60 frames.], batch size: 18, lr: 3.99e-04 +2022-05-05 03:09:33,373 INFO [train.py:715] (7/8) Epoch 5, batch 2600, loss[loss=0.1792, simple_loss=0.238, pruned_loss=0.06022, over 4764.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2241, pruned_loss=0.04156, over 970626.18 frames.], batch size: 19, lr: 3.99e-04 +2022-05-05 03:10:13,561 INFO [train.py:715] (7/8) Epoch 5, batch 2650, loss[loss=0.1664, simple_loss=0.2361, pruned_loss=0.04836, over 4709.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2247, pruned_loss=0.04129, over 970749.88 frames.], batch size: 15, lr: 3.99e-04 +2022-05-05 03:10:54,140 INFO [train.py:715] (7/8) Epoch 5, batch 2700, loss[loss=0.1349, simple_loss=0.2059, pruned_loss=0.03192, over 4708.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2231, pruned_loss=0.04074, over 971493.45 frames.], batch size: 15, lr: 3.99e-04 +2022-05-05 03:11:34,317 INFO [train.py:715] (7/8) Epoch 5, batch 2750, loss[loss=0.1751, simple_loss=0.2385, pruned_loss=0.05582, over 4888.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2228, pruned_loss=0.04075, over 971919.12 frames.], batch size: 22, lr: 3.99e-04 +2022-05-05 03:12:14,289 INFO [train.py:715] (7/8) Epoch 5, batch 2800, loss[loss=0.1471, simple_loss=0.2148, pruned_loss=0.03967, over 4757.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2234, pruned_loss=0.04103, over 971975.64 frames.], batch size: 16, lr: 3.99e-04 +2022-05-05 03:12:54,884 INFO [train.py:715] (7/8) Epoch 5, batch 2850, loss[loss=0.185, simple_loss=0.2464, pruned_loss=0.0618, over 4894.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2229, pruned_loss=0.04064, over 972043.90 frames.], batch size: 22, lr: 3.99e-04 +2022-05-05 03:13:35,009 INFO [train.py:715] (7/8) Epoch 5, batch 2900, loss[loss=0.147, simple_loss=0.2272, pruned_loss=0.03337, over 4930.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2224, pruned_loss=0.04067, over 971901.75 frames.], batch size: 29, lr: 3.99e-04 +2022-05-05 03:14:15,398 INFO [train.py:715] (7/8) Epoch 5, batch 2950, loss[loss=0.1385, simple_loss=0.2089, pruned_loss=0.03406, over 4796.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2224, pruned_loss=0.04092, over 971715.48 frames.], batch size: 24, lr: 3.98e-04 +2022-05-05 03:14:54,477 INFO [train.py:715] (7/8) Epoch 5, batch 3000, loss[loss=0.1765, simple_loss=0.2486, pruned_loss=0.05221, over 4831.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2231, pruned_loss=0.0418, over 972453.61 frames.], batch size: 30, lr: 3.98e-04 +2022-05-05 03:14:54,477 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 03:15:03,920 INFO [train.py:742] (7/8) Epoch 5, validation: loss=0.1108, simple_loss=0.1962, pruned_loss=0.01274, over 914524.00 frames. +2022-05-05 03:15:42,395 INFO [train.py:715] (7/8) Epoch 5, batch 3050, loss[loss=0.1044, simple_loss=0.1689, pruned_loss=0.01998, over 4988.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2227, pruned_loss=0.04136, over 972466.96 frames.], batch size: 14, lr: 3.98e-04 +2022-05-05 03:16:21,557 INFO [train.py:715] (7/8) Epoch 5, batch 3100, loss[loss=0.1493, simple_loss=0.2215, pruned_loss=0.03852, over 4893.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2233, pruned_loss=0.04143, over 972384.21 frames.], batch size: 22, lr: 3.98e-04 +2022-05-05 03:17:00,520 INFO [train.py:715] (7/8) Epoch 5, batch 3150, loss[loss=0.1773, simple_loss=0.2418, pruned_loss=0.05646, over 4786.00 frames.], tot_loss[loss=0.1525, simple_loss=0.223, pruned_loss=0.041, over 972918.24 frames.], batch size: 24, lr: 3.98e-04 +2022-05-05 03:17:40,039 INFO [train.py:715] (7/8) Epoch 5, batch 3200, loss[loss=0.1686, simple_loss=0.2231, pruned_loss=0.05704, over 4885.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2227, pruned_loss=0.04117, over 973167.64 frames.], batch size: 16, lr: 3.98e-04 +2022-05-05 03:18:19,746 INFO [train.py:715] (7/8) Epoch 5, batch 3250, loss[loss=0.1457, simple_loss=0.2075, pruned_loss=0.04191, over 4917.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2223, pruned_loss=0.04105, over 972314.45 frames.], batch size: 23, lr: 3.98e-04 +2022-05-05 03:18:58,960 INFO [train.py:715] (7/8) Epoch 5, batch 3300, loss[loss=0.1583, simple_loss=0.2246, pruned_loss=0.04596, over 4755.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2229, pruned_loss=0.04128, over 972105.51 frames.], batch size: 19, lr: 3.98e-04 +2022-05-05 03:19:38,241 INFO [train.py:715] (7/8) Epoch 5, batch 3350, loss[loss=0.127, simple_loss=0.1926, pruned_loss=0.03068, over 4960.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2236, pruned_loss=0.04176, over 972856.05 frames.], batch size: 15, lr: 3.98e-04 +2022-05-05 03:20:17,973 INFO [train.py:715] (7/8) Epoch 5, batch 3400, loss[loss=0.1442, simple_loss=0.2088, pruned_loss=0.03986, over 4811.00 frames.], tot_loss[loss=0.154, simple_loss=0.2241, pruned_loss=0.04195, over 972916.68 frames.], batch size: 12, lr: 3.98e-04 +2022-05-05 03:20:57,514 INFO [train.py:715] (7/8) Epoch 5, batch 3450, loss[loss=0.1466, simple_loss=0.2157, pruned_loss=0.03874, over 4871.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2236, pruned_loss=0.04173, over 972333.62 frames.], batch size: 16, lr: 3.98e-04 +2022-05-05 03:21:36,805 INFO [train.py:715] (7/8) Epoch 5, batch 3500, loss[loss=0.134, simple_loss=0.2017, pruned_loss=0.03314, over 4702.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2236, pruned_loss=0.0417, over 973377.66 frames.], batch size: 15, lr: 3.98e-04 +2022-05-05 03:22:16,032 INFO [train.py:715] (7/8) Epoch 5, batch 3550, loss[loss=0.1482, simple_loss=0.2351, pruned_loss=0.03059, over 4869.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2238, pruned_loss=0.04195, over 972921.64 frames.], batch size: 22, lr: 3.98e-04 +2022-05-05 03:22:55,531 INFO [train.py:715] (7/8) Epoch 5, batch 3600, loss[loss=0.1414, simple_loss=0.2141, pruned_loss=0.03436, over 4776.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2239, pruned_loss=0.0421, over 972036.45 frames.], batch size: 17, lr: 3.98e-04 +2022-05-05 03:23:34,523 INFO [train.py:715] (7/8) Epoch 5, batch 3650, loss[loss=0.127, simple_loss=0.1993, pruned_loss=0.02734, over 4978.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2233, pruned_loss=0.04184, over 972255.34 frames.], batch size: 14, lr: 3.98e-04 +2022-05-05 03:24:13,762 INFO [train.py:715] (7/8) Epoch 5, batch 3700, loss[loss=0.1465, simple_loss=0.2151, pruned_loss=0.03896, over 4871.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2228, pruned_loss=0.04147, over 971621.91 frames.], batch size: 32, lr: 3.98e-04 +2022-05-05 03:24:53,920 INFO [train.py:715] (7/8) Epoch 5, batch 3750, loss[loss=0.1656, simple_loss=0.2363, pruned_loss=0.04745, over 4889.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2222, pruned_loss=0.04111, over 971685.31 frames.], batch size: 22, lr: 3.98e-04 +2022-05-05 03:25:33,700 INFO [train.py:715] (7/8) Epoch 5, batch 3800, loss[loss=0.1457, simple_loss=0.2134, pruned_loss=0.039, over 4919.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2228, pruned_loss=0.04125, over 971556.81 frames.], batch size: 18, lr: 3.97e-04 +2022-05-05 03:26:13,094 INFO [train.py:715] (7/8) Epoch 5, batch 3850, loss[loss=0.1519, simple_loss=0.216, pruned_loss=0.04388, over 4826.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2238, pruned_loss=0.04165, over 971562.04 frames.], batch size: 26, lr: 3.97e-04 +2022-05-05 03:26:52,958 INFO [train.py:715] (7/8) Epoch 5, batch 3900, loss[loss=0.1396, simple_loss=0.216, pruned_loss=0.03163, over 4784.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2227, pruned_loss=0.04141, over 972064.97 frames.], batch size: 14, lr: 3.97e-04 +2022-05-05 03:27:33,000 INFO [train.py:715] (7/8) Epoch 5, batch 3950, loss[loss=0.1547, simple_loss=0.2205, pruned_loss=0.04445, over 4939.00 frames.], tot_loss[loss=0.152, simple_loss=0.2221, pruned_loss=0.04099, over 972110.71 frames.], batch size: 21, lr: 3.97e-04 +2022-05-05 03:28:13,087 INFO [train.py:715] (7/8) Epoch 5, batch 4000, loss[loss=0.1413, simple_loss=0.2149, pruned_loss=0.03384, over 4911.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2225, pruned_loss=0.04147, over 972283.62 frames.], batch size: 18, lr: 3.97e-04 +2022-05-05 03:28:53,739 INFO [train.py:715] (7/8) Epoch 5, batch 4050, loss[loss=0.1507, simple_loss=0.2153, pruned_loss=0.04303, over 4965.00 frames.], tot_loss[loss=0.1541, simple_loss=0.224, pruned_loss=0.04206, over 973158.92 frames.], batch size: 35, lr: 3.97e-04 +2022-05-05 03:29:33,849 INFO [train.py:715] (7/8) Epoch 5, batch 4100, loss[loss=0.1313, simple_loss=0.2094, pruned_loss=0.02661, over 4765.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2239, pruned_loss=0.04173, over 973408.60 frames.], batch size: 18, lr: 3.97e-04 +2022-05-05 03:30:14,070 INFO [train.py:715] (7/8) Epoch 5, batch 4150, loss[loss=0.1722, simple_loss=0.2409, pruned_loss=0.05169, over 4894.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2253, pruned_loss=0.04258, over 973296.43 frames.], batch size: 19, lr: 3.97e-04 +2022-05-05 03:30:53,450 INFO [train.py:715] (7/8) Epoch 5, batch 4200, loss[loss=0.1448, simple_loss=0.2125, pruned_loss=0.0386, over 4989.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2246, pruned_loss=0.04228, over 973190.28 frames.], batch size: 25, lr: 3.97e-04 +2022-05-05 03:31:32,792 INFO [train.py:715] (7/8) Epoch 5, batch 4250, loss[loss=0.1314, simple_loss=0.2047, pruned_loss=0.02901, over 4966.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2237, pruned_loss=0.04136, over 973286.10 frames.], batch size: 35, lr: 3.97e-04 +2022-05-05 03:32:12,488 INFO [train.py:715] (7/8) Epoch 5, batch 4300, loss[loss=0.1667, simple_loss=0.2455, pruned_loss=0.04398, over 4837.00 frames.], tot_loss[loss=0.1536, simple_loss=0.224, pruned_loss=0.04158, over 974102.03 frames.], batch size: 15, lr: 3.97e-04 +2022-05-05 03:32:52,104 INFO [train.py:715] (7/8) Epoch 5, batch 4350, loss[loss=0.1663, simple_loss=0.2258, pruned_loss=0.05341, over 4982.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2248, pruned_loss=0.04199, over 974402.16 frames.], batch size: 15, lr: 3.97e-04 +2022-05-05 03:33:32,071 INFO [train.py:715] (7/8) Epoch 5, batch 4400, loss[loss=0.1404, simple_loss=0.2115, pruned_loss=0.03467, over 4968.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2239, pruned_loss=0.04159, over 973460.60 frames.], batch size: 15, lr: 3.97e-04 +2022-05-05 03:34:10,944 INFO [train.py:715] (7/8) Epoch 5, batch 4450, loss[loss=0.1378, simple_loss=0.2086, pruned_loss=0.03356, over 4990.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2243, pruned_loss=0.04162, over 973634.33 frames.], batch size: 14, lr: 3.97e-04 +2022-05-05 03:34:50,793 INFO [train.py:715] (7/8) Epoch 5, batch 4500, loss[loss=0.1431, simple_loss=0.211, pruned_loss=0.0376, over 4864.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2235, pruned_loss=0.04113, over 974038.14 frames.], batch size: 20, lr: 3.97e-04 +2022-05-05 03:35:30,125 INFO [train.py:715] (7/8) Epoch 5, batch 4550, loss[loss=0.1467, simple_loss=0.2199, pruned_loss=0.0367, over 4967.00 frames.], tot_loss[loss=0.153, simple_loss=0.2237, pruned_loss=0.04116, over 973956.76 frames.], batch size: 14, lr: 3.97e-04 +2022-05-05 03:36:09,740 INFO [train.py:715] (7/8) Epoch 5, batch 4600, loss[loss=0.1559, simple_loss=0.2243, pruned_loss=0.04378, over 4832.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2241, pruned_loss=0.04129, over 972709.91 frames.], batch size: 13, lr: 3.97e-04 +2022-05-05 03:36:50,100 INFO [train.py:715] (7/8) Epoch 5, batch 4650, loss[loss=0.1452, simple_loss=0.2241, pruned_loss=0.03313, over 4838.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2233, pruned_loss=0.04108, over 973821.27 frames.], batch size: 30, lr: 3.97e-04 +2022-05-05 03:37:30,434 INFO [train.py:715] (7/8) Epoch 5, batch 4700, loss[loss=0.1417, simple_loss=0.2125, pruned_loss=0.03542, over 4959.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2234, pruned_loss=0.04114, over 973451.44 frames.], batch size: 24, lr: 3.96e-04 +2022-05-05 03:38:10,933 INFO [train.py:715] (7/8) Epoch 5, batch 4750, loss[loss=0.1402, simple_loss=0.2244, pruned_loss=0.02799, over 4877.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2237, pruned_loss=0.04142, over 974012.74 frames.], batch size: 16, lr: 3.96e-04 +2022-05-05 03:38:50,696 INFO [train.py:715] (7/8) Epoch 5, batch 4800, loss[loss=0.1421, simple_loss=0.2169, pruned_loss=0.03358, over 4950.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2231, pruned_loss=0.04099, over 973983.37 frames.], batch size: 21, lr: 3.96e-04 +2022-05-05 03:39:31,186 INFO [train.py:715] (7/8) Epoch 5, batch 4850, loss[loss=0.1351, simple_loss=0.2164, pruned_loss=0.02685, over 4859.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2237, pruned_loss=0.04087, over 974329.58 frames.], batch size: 20, lr: 3.96e-04 +2022-05-05 03:40:11,789 INFO [train.py:715] (7/8) Epoch 5, batch 4900, loss[loss=0.1465, simple_loss=0.221, pruned_loss=0.03604, over 4790.00 frames.], tot_loss[loss=0.153, simple_loss=0.2239, pruned_loss=0.0411, over 973603.25 frames.], batch size: 19, lr: 3.96e-04 +2022-05-05 03:40:51,919 INFO [train.py:715] (7/8) Epoch 5, batch 4950, loss[loss=0.1736, simple_loss=0.2435, pruned_loss=0.05182, over 4954.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2232, pruned_loss=0.04098, over 973248.25 frames.], batch size: 29, lr: 3.96e-04 +2022-05-05 03:41:32,225 INFO [train.py:715] (7/8) Epoch 5, batch 5000, loss[loss=0.1569, simple_loss=0.239, pruned_loss=0.03742, over 4828.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2241, pruned_loss=0.0414, over 973244.93 frames.], batch size: 26, lr: 3.96e-04 +2022-05-05 03:42:13,230 INFO [train.py:715] (7/8) Epoch 5, batch 5050, loss[loss=0.1662, simple_loss=0.2396, pruned_loss=0.04638, over 4822.00 frames.], tot_loss[loss=0.1537, simple_loss=0.224, pruned_loss=0.04164, over 973227.67 frames.], batch size: 15, lr: 3.96e-04 +2022-05-05 03:42:52,851 INFO [train.py:715] (7/8) Epoch 5, batch 5100, loss[loss=0.151, simple_loss=0.2218, pruned_loss=0.04008, over 4843.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2251, pruned_loss=0.04203, over 972644.82 frames.], batch size: 20, lr: 3.96e-04 +2022-05-05 03:43:32,137 INFO [train.py:715] (7/8) Epoch 5, batch 5150, loss[loss=0.1667, simple_loss=0.2279, pruned_loss=0.05269, over 4776.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2249, pruned_loss=0.04217, over 973150.77 frames.], batch size: 14, lr: 3.96e-04 +2022-05-05 03:44:11,859 INFO [train.py:715] (7/8) Epoch 5, batch 5200, loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03151, over 4828.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2242, pruned_loss=0.04167, over 973930.84 frames.], batch size: 27, lr: 3.96e-04 +2022-05-05 03:44:51,641 INFO [train.py:715] (7/8) Epoch 5, batch 5250, loss[loss=0.1568, simple_loss=0.2348, pruned_loss=0.03937, over 4862.00 frames.], tot_loss[loss=0.154, simple_loss=0.2243, pruned_loss=0.04189, over 973704.03 frames.], batch size: 20, lr: 3.96e-04 +2022-05-05 03:45:32,213 INFO [train.py:715] (7/8) Epoch 5, batch 5300, loss[loss=0.1333, simple_loss=0.2119, pruned_loss=0.0273, over 4817.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2236, pruned_loss=0.04126, over 972693.09 frames.], batch size: 27, lr: 3.96e-04 +2022-05-05 03:46:12,531 INFO [train.py:715] (7/8) Epoch 5, batch 5350, loss[loss=0.1622, simple_loss=0.2319, pruned_loss=0.04628, over 4967.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2229, pruned_loss=0.04101, over 972901.78 frames.], batch size: 24, lr: 3.96e-04 +2022-05-05 03:46:52,867 INFO [train.py:715] (7/8) Epoch 5, batch 5400, loss[loss=0.1498, simple_loss=0.217, pruned_loss=0.04133, over 4934.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2228, pruned_loss=0.04109, over 972650.08 frames.], batch size: 21, lr: 3.96e-04 +2022-05-05 03:47:32,579 INFO [train.py:715] (7/8) Epoch 5, batch 5450, loss[loss=0.1501, simple_loss=0.2209, pruned_loss=0.03959, over 4925.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2227, pruned_loss=0.04124, over 973491.89 frames.], batch size: 18, lr: 3.96e-04 +2022-05-05 03:48:12,700 INFO [train.py:715] (7/8) Epoch 5, batch 5500, loss[loss=0.1429, simple_loss=0.2038, pruned_loss=0.04096, over 4769.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2236, pruned_loss=0.04135, over 972321.25 frames.], batch size: 12, lr: 3.96e-04 +2022-05-05 03:48:53,028 INFO [train.py:715] (7/8) Epoch 5, batch 5550, loss[loss=0.1508, simple_loss=0.2236, pruned_loss=0.03897, over 4894.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2242, pruned_loss=0.04173, over 972059.63 frames.], batch size: 22, lr: 3.96e-04 +2022-05-05 03:49:33,411 INFO [train.py:715] (7/8) Epoch 5, batch 5600, loss[loss=0.1472, simple_loss=0.2188, pruned_loss=0.0378, over 4927.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2236, pruned_loss=0.04149, over 972867.88 frames.], batch size: 21, lr: 3.95e-04 +2022-05-05 03:50:13,546 INFO [train.py:715] (7/8) Epoch 5, batch 5650, loss[loss=0.1795, simple_loss=0.2503, pruned_loss=0.05438, over 4837.00 frames.], tot_loss[loss=0.153, simple_loss=0.2231, pruned_loss=0.04146, over 972719.74 frames.], batch size: 13, lr: 3.95e-04 +2022-05-05 03:50:52,901 INFO [train.py:715] (7/8) Epoch 5, batch 5700, loss[loss=0.2031, simple_loss=0.2702, pruned_loss=0.06793, over 4689.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2231, pruned_loss=0.04123, over 971911.60 frames.], batch size: 15, lr: 3.95e-04 +2022-05-05 03:51:33,321 INFO [train.py:715] (7/8) Epoch 5, batch 5750, loss[loss=0.1441, simple_loss=0.2166, pruned_loss=0.03578, over 4829.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2234, pruned_loss=0.0416, over 971936.64 frames.], batch size: 26, lr: 3.95e-04 +2022-05-05 03:52:13,226 INFO [train.py:715] (7/8) Epoch 5, batch 5800, loss[loss=0.1382, simple_loss=0.2142, pruned_loss=0.03104, over 4753.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2239, pruned_loss=0.04191, over 972064.65 frames.], batch size: 16, lr: 3.95e-04 +2022-05-05 03:52:53,761 INFO [train.py:715] (7/8) Epoch 5, batch 5850, loss[loss=0.1913, simple_loss=0.2671, pruned_loss=0.05777, over 4824.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2244, pruned_loss=0.04202, over 971985.12 frames.], batch size: 15, lr: 3.95e-04 +2022-05-05 03:53:33,398 INFO [train.py:715] (7/8) Epoch 5, batch 5900, loss[loss=0.1466, simple_loss=0.209, pruned_loss=0.04212, over 4994.00 frames.], tot_loss[loss=0.1536, simple_loss=0.224, pruned_loss=0.04164, over 972449.38 frames.], batch size: 14, lr: 3.95e-04 +2022-05-05 03:54:13,786 INFO [train.py:715] (7/8) Epoch 5, batch 5950, loss[loss=0.1494, simple_loss=0.2323, pruned_loss=0.03325, over 4777.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2241, pruned_loss=0.04185, over 971574.65 frames.], batch size: 18, lr: 3.95e-04 +2022-05-05 03:54:53,619 INFO [train.py:715] (7/8) Epoch 5, batch 6000, loss[loss=0.1379, simple_loss=0.2208, pruned_loss=0.02753, over 4985.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2233, pruned_loss=0.04144, over 972095.54 frames.], batch size: 28, lr: 3.95e-04 +2022-05-05 03:54:53,620 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 03:55:03,071 INFO [train.py:742] (7/8) Epoch 5, validation: loss=0.1106, simple_loss=0.1959, pruned_loss=0.01263, over 914524.00 frames. +2022-05-05 03:55:42,939 INFO [train.py:715] (7/8) Epoch 5, batch 6050, loss[loss=0.1568, simple_loss=0.2201, pruned_loss=0.04674, over 4919.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2227, pruned_loss=0.04137, over 972699.37 frames.], batch size: 17, lr: 3.95e-04 +2022-05-05 03:56:22,017 INFO [train.py:715] (7/8) Epoch 5, batch 6100, loss[loss=0.161, simple_loss=0.2313, pruned_loss=0.04533, over 4752.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2228, pruned_loss=0.04142, over 972717.80 frames.], batch size: 19, lr: 3.95e-04 +2022-05-05 03:57:01,852 INFO [train.py:715] (7/8) Epoch 5, batch 6150, loss[loss=0.1269, simple_loss=0.1991, pruned_loss=0.02738, over 4914.00 frames.], tot_loss[loss=0.1527, simple_loss=0.223, pruned_loss=0.04115, over 972642.75 frames.], batch size: 19, lr: 3.95e-04 +2022-05-05 03:57:40,842 INFO [train.py:715] (7/8) Epoch 5, batch 6200, loss[loss=0.1466, simple_loss=0.1998, pruned_loss=0.04676, over 4994.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2234, pruned_loss=0.0411, over 972924.15 frames.], batch size: 14, lr: 3.95e-04 +2022-05-05 03:58:21,091 INFO [train.py:715] (7/8) Epoch 5, batch 6250, loss[loss=0.1367, simple_loss=0.2111, pruned_loss=0.03112, over 4976.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2233, pruned_loss=0.04145, over 972439.48 frames.], batch size: 14, lr: 3.95e-04 +2022-05-05 03:58:59,729 INFO [train.py:715] (7/8) Epoch 5, batch 6300, loss[loss=0.1647, simple_loss=0.2295, pruned_loss=0.05, over 4876.00 frames.], tot_loss[loss=0.1541, simple_loss=0.224, pruned_loss=0.04204, over 971569.31 frames.], batch size: 22, lr: 3.95e-04 +2022-05-05 03:59:39,539 INFO [train.py:715] (7/8) Epoch 5, batch 6350, loss[loss=0.1458, simple_loss=0.2251, pruned_loss=0.03327, over 4755.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2246, pruned_loss=0.04214, over 972458.31 frames.], batch size: 16, lr: 3.95e-04 +2022-05-05 04:00:18,907 INFO [train.py:715] (7/8) Epoch 5, batch 6400, loss[loss=0.1613, simple_loss=0.2242, pruned_loss=0.04918, over 4809.00 frames.], tot_loss[loss=0.155, simple_loss=0.2253, pruned_loss=0.04231, over 972519.97 frames.], batch size: 25, lr: 3.95e-04 +2022-05-05 04:00:57,770 INFO [train.py:715] (7/8) Epoch 5, batch 6450, loss[loss=0.1621, simple_loss=0.2371, pruned_loss=0.04357, over 4844.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2245, pruned_loss=0.04206, over 971640.36 frames.], batch size: 30, lr: 3.95e-04 +2022-05-05 04:01:37,239 INFO [train.py:715] (7/8) Epoch 5, batch 6500, loss[loss=0.1561, simple_loss=0.2331, pruned_loss=0.03952, over 4944.00 frames.], tot_loss[loss=0.1549, simple_loss=0.225, pruned_loss=0.04237, over 972152.30 frames.], batch size: 21, lr: 3.95e-04 +2022-05-05 04:02:16,585 INFO [train.py:715] (7/8) Epoch 5, batch 6550, loss[loss=0.132, simple_loss=0.2102, pruned_loss=0.02692, over 4782.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2251, pruned_loss=0.04239, over 972865.33 frames.], batch size: 18, lr: 3.94e-04 +2022-05-05 04:02:55,733 INFO [train.py:715] (7/8) Epoch 5, batch 6600, loss[loss=0.188, simple_loss=0.2659, pruned_loss=0.05499, over 4987.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2252, pruned_loss=0.04228, over 973062.37 frames.], batch size: 14, lr: 3.94e-04 +2022-05-05 04:03:35,253 INFO [train.py:715] (7/8) Epoch 5, batch 6650, loss[loss=0.1618, simple_loss=0.2344, pruned_loss=0.04459, over 4901.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2246, pruned_loss=0.04219, over 972741.04 frames.], batch size: 19, lr: 3.94e-04 +2022-05-05 04:04:15,786 INFO [train.py:715] (7/8) Epoch 5, batch 6700, loss[loss=0.1219, simple_loss=0.1904, pruned_loss=0.02669, over 4989.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2245, pruned_loss=0.04221, over 972339.69 frames.], batch size: 25, lr: 3.94e-04 +2022-05-05 04:04:56,116 INFO [train.py:715] (7/8) Epoch 5, batch 6750, loss[loss=0.1503, simple_loss=0.2271, pruned_loss=0.03679, over 4929.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2247, pruned_loss=0.04234, over 972070.08 frames.], batch size: 23, lr: 3.94e-04 +2022-05-05 04:05:36,108 INFO [train.py:715] (7/8) Epoch 5, batch 6800, loss[loss=0.1765, simple_loss=0.2394, pruned_loss=0.05679, over 4940.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2243, pruned_loss=0.04213, over 972563.64 frames.], batch size: 39, lr: 3.94e-04 +2022-05-05 04:06:16,590 INFO [train.py:715] (7/8) Epoch 5, batch 6850, loss[loss=0.1646, simple_loss=0.2278, pruned_loss=0.05069, over 4925.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2246, pruned_loss=0.04187, over 972783.09 frames.], batch size: 23, lr: 3.94e-04 +2022-05-05 04:06:56,552 INFO [train.py:715] (7/8) Epoch 5, batch 6900, loss[loss=0.1567, simple_loss=0.2309, pruned_loss=0.04124, over 4926.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2242, pruned_loss=0.04157, over 973537.46 frames.], batch size: 18, lr: 3.94e-04 +2022-05-05 04:07:37,127 INFO [train.py:715] (7/8) Epoch 5, batch 6950, loss[loss=0.1356, simple_loss=0.2087, pruned_loss=0.03125, over 4811.00 frames.], tot_loss[loss=0.1532, simple_loss=0.224, pruned_loss=0.04118, over 974257.80 frames.], batch size: 26, lr: 3.94e-04 +2022-05-05 04:08:16,567 INFO [train.py:715] (7/8) Epoch 5, batch 7000, loss[loss=0.1411, simple_loss=0.2096, pruned_loss=0.03627, over 4994.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2245, pruned_loss=0.04129, over 974149.42 frames.], batch size: 14, lr: 3.94e-04 +2022-05-05 04:08:56,461 INFO [train.py:715] (7/8) Epoch 5, batch 7050, loss[loss=0.1294, simple_loss=0.2045, pruned_loss=0.02714, over 4977.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2241, pruned_loss=0.0413, over 973120.53 frames.], batch size: 28, lr: 3.94e-04 +2022-05-05 04:09:36,252 INFO [train.py:715] (7/8) Epoch 5, batch 7100, loss[loss=0.1698, simple_loss=0.2408, pruned_loss=0.04935, over 4961.00 frames.], tot_loss[loss=0.154, simple_loss=0.2242, pruned_loss=0.04193, over 973146.79 frames.], batch size: 35, lr: 3.94e-04 +2022-05-05 04:10:15,692 INFO [train.py:715] (7/8) Epoch 5, batch 7150, loss[loss=0.1936, simple_loss=0.2613, pruned_loss=0.06294, over 4821.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2245, pruned_loss=0.04216, over 972118.77 frames.], batch size: 25, lr: 3.94e-04 +2022-05-05 04:10:55,645 INFO [train.py:715] (7/8) Epoch 5, batch 7200, loss[loss=0.1507, simple_loss=0.2223, pruned_loss=0.0395, over 4915.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2243, pruned_loss=0.04198, over 972186.03 frames.], batch size: 17, lr: 3.94e-04 +2022-05-05 04:11:35,241 INFO [train.py:715] (7/8) Epoch 5, batch 7250, loss[loss=0.145, simple_loss=0.2029, pruned_loss=0.04348, over 4834.00 frames.], tot_loss[loss=0.154, simple_loss=0.224, pruned_loss=0.042, over 972553.71 frames.], batch size: 30, lr: 3.94e-04 +2022-05-05 04:12:15,757 INFO [train.py:715] (7/8) Epoch 5, batch 7300, loss[loss=0.1537, simple_loss=0.2284, pruned_loss=0.03951, over 4911.00 frames.], tot_loss[loss=0.1542, simple_loss=0.224, pruned_loss=0.04219, over 972746.35 frames.], batch size: 39, lr: 3.94e-04 +2022-05-05 04:12:55,315 INFO [train.py:715] (7/8) Epoch 5, batch 7350, loss[loss=0.1411, simple_loss=0.2157, pruned_loss=0.03324, over 4797.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2237, pruned_loss=0.04208, over 971818.90 frames.], batch size: 14, lr: 3.94e-04 +2022-05-05 04:13:34,918 INFO [train.py:715] (7/8) Epoch 5, batch 7400, loss[loss=0.188, simple_loss=0.2592, pruned_loss=0.05841, over 4865.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2244, pruned_loss=0.04252, over 972069.65 frames.], batch size: 30, lr: 3.94e-04 +2022-05-05 04:14:14,463 INFO [train.py:715] (7/8) Epoch 5, batch 7450, loss[loss=0.1276, simple_loss=0.2088, pruned_loss=0.02325, over 4769.00 frames.], tot_loss[loss=0.1544, simple_loss=0.224, pruned_loss=0.04239, over 971983.26 frames.], batch size: 18, lr: 3.93e-04 +2022-05-05 04:14:53,553 INFO [train.py:715] (7/8) Epoch 5, batch 7500, loss[loss=0.201, simple_loss=0.2838, pruned_loss=0.05912, over 4795.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2237, pruned_loss=0.0423, over 971953.33 frames.], batch size: 17, lr: 3.93e-04 +2022-05-05 04:15:33,687 INFO [train.py:715] (7/8) Epoch 5, batch 7550, loss[loss=0.1327, simple_loss=0.2095, pruned_loss=0.02791, over 4944.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2234, pruned_loss=0.04215, over 972486.61 frames.], batch size: 29, lr: 3.93e-04 +2022-05-05 04:16:13,351 INFO [train.py:715] (7/8) Epoch 5, batch 7600, loss[loss=0.1356, simple_loss=0.2133, pruned_loss=0.02898, over 4945.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2234, pruned_loss=0.0421, over 972066.81 frames.], batch size: 29, lr: 3.93e-04 +2022-05-05 04:16:53,611 INFO [train.py:715] (7/8) Epoch 5, batch 7650, loss[loss=0.1614, simple_loss=0.2407, pruned_loss=0.04105, over 4895.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2224, pruned_loss=0.04145, over 971856.27 frames.], batch size: 22, lr: 3.93e-04 +2022-05-05 04:17:33,269 INFO [train.py:715] (7/8) Epoch 5, batch 7700, loss[loss=0.1747, simple_loss=0.2433, pruned_loss=0.05308, over 4876.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2235, pruned_loss=0.04153, over 972052.64 frames.], batch size: 17, lr: 3.93e-04 +2022-05-05 04:18:12,780 INFO [train.py:715] (7/8) Epoch 5, batch 7750, loss[loss=0.1499, simple_loss=0.2243, pruned_loss=0.03777, over 4822.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2239, pruned_loss=0.04176, over 972493.59 frames.], batch size: 25, lr: 3.93e-04 +2022-05-05 04:18:52,928 INFO [train.py:715] (7/8) Epoch 5, batch 7800, loss[loss=0.1425, simple_loss=0.2071, pruned_loss=0.03892, over 4811.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2234, pruned_loss=0.04161, over 972593.92 frames.], batch size: 27, lr: 3.93e-04 +2022-05-05 04:19:32,131 INFO [train.py:715] (7/8) Epoch 5, batch 7850, loss[loss=0.1463, simple_loss=0.2137, pruned_loss=0.03943, over 4981.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2237, pruned_loss=0.04198, over 972621.97 frames.], batch size: 28, lr: 3.93e-04 +2022-05-05 04:20:12,362 INFO [train.py:715] (7/8) Epoch 5, batch 7900, loss[loss=0.2105, simple_loss=0.2597, pruned_loss=0.0807, over 4900.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2242, pruned_loss=0.04239, over 971955.63 frames.], batch size: 39, lr: 3.93e-04 +2022-05-05 04:20:51,915 INFO [train.py:715] (7/8) Epoch 5, batch 7950, loss[loss=0.1668, simple_loss=0.2256, pruned_loss=0.05399, over 4913.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2255, pruned_loss=0.04306, over 972622.88 frames.], batch size: 17, lr: 3.93e-04 +2022-05-05 04:21:32,116 INFO [train.py:715] (7/8) Epoch 5, batch 8000, loss[loss=0.144, simple_loss=0.2203, pruned_loss=0.03379, over 4884.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2246, pruned_loss=0.04235, over 972467.16 frames.], batch size: 19, lr: 3.93e-04 +2022-05-05 04:22:11,576 INFO [train.py:715] (7/8) Epoch 5, batch 8050, loss[loss=0.1521, simple_loss=0.223, pruned_loss=0.04054, over 4934.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2246, pruned_loss=0.0425, over 972804.04 frames.], batch size: 39, lr: 3.93e-04 +2022-05-05 04:22:51,029 INFO [train.py:715] (7/8) Epoch 5, batch 8100, loss[loss=0.1283, simple_loss=0.2055, pruned_loss=0.0256, over 4870.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2248, pruned_loss=0.04234, over 971846.76 frames.], batch size: 22, lr: 3.93e-04 +2022-05-05 04:23:30,814 INFO [train.py:715] (7/8) Epoch 5, batch 8150, loss[loss=0.1515, simple_loss=0.2248, pruned_loss=0.03905, over 4824.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2249, pruned_loss=0.04242, over 972828.27 frames.], batch size: 13, lr: 3.93e-04 +2022-05-05 04:24:09,997 INFO [train.py:715] (7/8) Epoch 5, batch 8200, loss[loss=0.1754, simple_loss=0.2405, pruned_loss=0.05514, over 4813.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2251, pruned_loss=0.04269, over 972464.90 frames.], batch size: 15, lr: 3.93e-04 +2022-05-05 04:24:50,015 INFO [train.py:715] (7/8) Epoch 5, batch 8250, loss[loss=0.1691, simple_loss=0.2286, pruned_loss=0.05481, over 4835.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2247, pruned_loss=0.04271, over 972362.00 frames.], batch size: 30, lr: 3.93e-04 +2022-05-05 04:25:29,484 INFO [train.py:715] (7/8) Epoch 5, batch 8300, loss[loss=0.142, simple_loss=0.2169, pruned_loss=0.0336, over 4829.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2246, pruned_loss=0.04221, over 972914.74 frames.], batch size: 26, lr: 3.93e-04 +2022-05-05 04:26:09,424 INFO [train.py:715] (7/8) Epoch 5, batch 8350, loss[loss=0.1456, simple_loss=0.2187, pruned_loss=0.03627, over 4904.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2251, pruned_loss=0.04227, over 972746.30 frames.], batch size: 17, lr: 3.93e-04 +2022-05-05 04:26:48,505 INFO [train.py:715] (7/8) Epoch 5, batch 8400, loss[loss=0.1403, simple_loss=0.215, pruned_loss=0.03281, over 4696.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2242, pruned_loss=0.04158, over 972575.44 frames.], batch size: 15, lr: 3.92e-04 +2022-05-05 04:27:27,553 INFO [train.py:715] (7/8) Epoch 5, batch 8450, loss[loss=0.1198, simple_loss=0.1938, pruned_loss=0.02291, over 4938.00 frames.], tot_loss[loss=0.153, simple_loss=0.2234, pruned_loss=0.04133, over 973534.60 frames.], batch size: 23, lr: 3.92e-04 +2022-05-05 04:28:06,814 INFO [train.py:715] (7/8) Epoch 5, batch 8500, loss[loss=0.1404, simple_loss=0.2096, pruned_loss=0.0356, over 4906.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2231, pruned_loss=0.04123, over 972633.80 frames.], batch size: 18, lr: 3.92e-04 +2022-05-05 04:28:45,807 INFO [train.py:715] (7/8) Epoch 5, batch 8550, loss[loss=0.1505, simple_loss=0.2201, pruned_loss=0.04048, over 4779.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2234, pruned_loss=0.04166, over 971697.14 frames.], batch size: 14, lr: 3.92e-04 +2022-05-05 04:29:25,251 INFO [train.py:715] (7/8) Epoch 5, batch 8600, loss[loss=0.1658, simple_loss=0.2375, pruned_loss=0.04702, over 4888.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2231, pruned_loss=0.0416, over 971654.38 frames.], batch size: 22, lr: 3.92e-04 +2022-05-05 04:30:04,417 INFO [train.py:715] (7/8) Epoch 5, batch 8650, loss[loss=0.1522, simple_loss=0.2196, pruned_loss=0.04239, over 4778.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2243, pruned_loss=0.042, over 972366.36 frames.], batch size: 18, lr: 3.92e-04 +2022-05-05 04:30:43,888 INFO [train.py:715] (7/8) Epoch 5, batch 8700, loss[loss=0.1363, simple_loss=0.2013, pruned_loss=0.03568, over 4874.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2244, pruned_loss=0.04198, over 972430.34 frames.], batch size: 30, lr: 3.92e-04 +2022-05-05 04:31:23,275 INFO [train.py:715] (7/8) Epoch 5, batch 8750, loss[loss=0.161, simple_loss=0.2182, pruned_loss=0.05192, over 4834.00 frames.], tot_loss[loss=0.1549, simple_loss=0.225, pruned_loss=0.04245, over 972871.20 frames.], batch size: 30, lr: 3.92e-04 +2022-05-05 04:32:02,279 INFO [train.py:715] (7/8) Epoch 5, batch 8800, loss[loss=0.1161, simple_loss=0.19, pruned_loss=0.02111, over 4782.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2246, pruned_loss=0.04204, over 972886.28 frames.], batch size: 17, lr: 3.92e-04 +2022-05-05 04:32:42,163 INFO [train.py:715] (7/8) Epoch 5, batch 8850, loss[loss=0.1459, simple_loss=0.2088, pruned_loss=0.04148, over 4912.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2239, pruned_loss=0.04184, over 973244.53 frames.], batch size: 17, lr: 3.92e-04 +2022-05-05 04:33:20,886 INFO [train.py:715] (7/8) Epoch 5, batch 8900, loss[loss=0.1396, simple_loss=0.2182, pruned_loss=0.03049, over 4786.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2228, pruned_loss=0.04117, over 973154.38 frames.], batch size: 17, lr: 3.92e-04 +2022-05-05 04:33:59,748 INFO [train.py:715] (7/8) Epoch 5, batch 8950, loss[loss=0.1569, simple_loss=0.226, pruned_loss=0.0439, over 4941.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2227, pruned_loss=0.04131, over 973264.77 frames.], batch size: 39, lr: 3.92e-04 +2022-05-05 04:34:39,031 INFO [train.py:715] (7/8) Epoch 5, batch 9000, loss[loss=0.1407, simple_loss=0.2144, pruned_loss=0.03353, over 4860.00 frames.], tot_loss[loss=0.153, simple_loss=0.2232, pruned_loss=0.04146, over 973138.11 frames.], batch size: 32, lr: 3.92e-04 +2022-05-05 04:34:39,031 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 04:34:48,553 INFO [train.py:742] (7/8) Epoch 5, validation: loss=0.1105, simple_loss=0.196, pruned_loss=0.01252, over 914524.00 frames. +2022-05-05 04:35:28,198 INFO [train.py:715] (7/8) Epoch 5, batch 9050, loss[loss=0.1184, simple_loss=0.1945, pruned_loss=0.02113, over 4790.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2229, pruned_loss=0.0415, over 973058.26 frames.], batch size: 18, lr: 3.92e-04 +2022-05-05 04:36:07,670 INFO [train.py:715] (7/8) Epoch 5, batch 9100, loss[loss=0.1441, simple_loss=0.2224, pruned_loss=0.03288, over 4827.00 frames.], tot_loss[loss=0.1537, simple_loss=0.224, pruned_loss=0.04177, over 973281.10 frames.], batch size: 15, lr: 3.92e-04 +2022-05-05 04:36:46,716 INFO [train.py:715] (7/8) Epoch 5, batch 9150, loss[loss=0.1428, simple_loss=0.2234, pruned_loss=0.03106, over 4916.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2225, pruned_loss=0.04114, over 972742.60 frames.], batch size: 18, lr: 3.92e-04 +2022-05-05 04:37:26,202 INFO [train.py:715] (7/8) Epoch 5, batch 9200, loss[loss=0.1371, simple_loss=0.2083, pruned_loss=0.03291, over 4772.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2223, pruned_loss=0.04105, over 972794.65 frames.], batch size: 18, lr: 3.92e-04 +2022-05-05 04:38:06,417 INFO [train.py:715] (7/8) Epoch 5, batch 9250, loss[loss=0.1299, simple_loss=0.1989, pruned_loss=0.03045, over 4935.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2223, pruned_loss=0.04081, over 971687.14 frames.], batch size: 21, lr: 3.92e-04 +2022-05-05 04:38:45,294 INFO [train.py:715] (7/8) Epoch 5, batch 9300, loss[loss=0.1815, simple_loss=0.2384, pruned_loss=0.06227, over 4906.00 frames.], tot_loss[loss=0.1523, simple_loss=0.222, pruned_loss=0.04127, over 971863.13 frames.], batch size: 39, lr: 3.91e-04 +2022-05-05 04:39:24,933 INFO [train.py:715] (7/8) Epoch 5, batch 9350, loss[loss=0.1504, simple_loss=0.2301, pruned_loss=0.03536, over 4939.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2226, pruned_loss=0.0416, over 971570.10 frames.], batch size: 21, lr: 3.91e-04 +2022-05-05 04:40:04,424 INFO [train.py:715] (7/8) Epoch 5, batch 9400, loss[loss=0.1383, simple_loss=0.2076, pruned_loss=0.03454, over 4831.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2227, pruned_loss=0.04128, over 971379.18 frames.], batch size: 13, lr: 3.91e-04 +2022-05-05 04:40:43,718 INFO [train.py:715] (7/8) Epoch 5, batch 9450, loss[loss=0.1723, simple_loss=0.2456, pruned_loss=0.04954, over 4952.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2229, pruned_loss=0.04132, over 971932.93 frames.], batch size: 29, lr: 3.91e-04 +2022-05-05 04:41:22,595 INFO [train.py:715] (7/8) Epoch 5, batch 9500, loss[loss=0.1306, simple_loss=0.1991, pruned_loss=0.03102, over 4772.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2229, pruned_loss=0.04145, over 971255.79 frames.], batch size: 18, lr: 3.91e-04 +2022-05-05 04:42:02,156 INFO [train.py:715] (7/8) Epoch 5, batch 9550, loss[loss=0.1282, simple_loss=0.1926, pruned_loss=0.03196, over 4813.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2223, pruned_loss=0.04096, over 970477.53 frames.], batch size: 25, lr: 3.91e-04 +2022-05-05 04:42:41,922 INFO [train.py:715] (7/8) Epoch 5, batch 9600, loss[loss=0.1611, simple_loss=0.2266, pruned_loss=0.04777, over 4812.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2226, pruned_loss=0.04107, over 970994.74 frames.], batch size: 24, lr: 3.91e-04 +2022-05-05 04:43:21,155 INFO [train.py:715] (7/8) Epoch 5, batch 9650, loss[loss=0.1929, simple_loss=0.2575, pruned_loss=0.06413, over 4936.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2228, pruned_loss=0.04115, over 971601.60 frames.], batch size: 39, lr: 3.91e-04 +2022-05-05 04:44:00,817 INFO [train.py:715] (7/8) Epoch 5, batch 9700, loss[loss=0.1486, simple_loss=0.2315, pruned_loss=0.03288, over 4925.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2225, pruned_loss=0.0411, over 971843.58 frames.], batch size: 23, lr: 3.91e-04 +2022-05-05 04:44:40,237 INFO [train.py:715] (7/8) Epoch 5, batch 9750, loss[loss=0.1701, simple_loss=0.2461, pruned_loss=0.04705, over 4739.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2228, pruned_loss=0.04117, over 971571.45 frames.], batch size: 16, lr: 3.91e-04 +2022-05-05 04:45:19,139 INFO [train.py:715] (7/8) Epoch 5, batch 9800, loss[loss=0.142, simple_loss=0.2111, pruned_loss=0.03642, over 4881.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2229, pruned_loss=0.04148, over 970641.96 frames.], batch size: 32, lr: 3.91e-04 +2022-05-05 04:45:58,976 INFO [train.py:715] (7/8) Epoch 5, batch 9850, loss[loss=0.1611, simple_loss=0.2274, pruned_loss=0.04741, over 4795.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2221, pruned_loss=0.04105, over 970765.65 frames.], batch size: 14, lr: 3.91e-04 +2022-05-05 04:46:38,174 INFO [train.py:715] (7/8) Epoch 5, batch 9900, loss[loss=0.148, simple_loss=0.2228, pruned_loss=0.03663, over 4804.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2237, pruned_loss=0.04178, over 970707.60 frames.], batch size: 21, lr: 3.91e-04 +2022-05-05 04:47:17,941 INFO [train.py:715] (7/8) Epoch 5, batch 9950, loss[loss=0.1449, simple_loss=0.215, pruned_loss=0.03743, over 4759.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2228, pruned_loss=0.04121, over 970771.31 frames.], batch size: 12, lr: 3.91e-04 +2022-05-05 04:47:59,852 INFO [train.py:715] (7/8) Epoch 5, batch 10000, loss[loss=0.1562, simple_loss=0.2252, pruned_loss=0.04357, over 4819.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2235, pruned_loss=0.04198, over 971292.30 frames.], batch size: 25, lr: 3.91e-04 +2022-05-05 04:48:39,810 INFO [train.py:715] (7/8) Epoch 5, batch 10050, loss[loss=0.1351, simple_loss=0.2092, pruned_loss=0.03052, over 4931.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2237, pruned_loss=0.04158, over 971577.56 frames.], batch size: 29, lr: 3.91e-04 +2022-05-05 04:49:19,418 INFO [train.py:715] (7/8) Epoch 5, batch 10100, loss[loss=0.1342, simple_loss=0.2076, pruned_loss=0.03038, over 4813.00 frames.], tot_loss[loss=0.153, simple_loss=0.223, pruned_loss=0.04148, over 971290.68 frames.], batch size: 25, lr: 3.91e-04 +2022-05-05 04:49:58,587 INFO [train.py:715] (7/8) Epoch 5, batch 10150, loss[loss=0.1573, simple_loss=0.224, pruned_loss=0.04535, over 4940.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2234, pruned_loss=0.0419, over 971548.98 frames.], batch size: 39, lr: 3.91e-04 +2022-05-05 04:50:38,455 INFO [train.py:715] (7/8) Epoch 5, batch 10200, loss[loss=0.1795, simple_loss=0.2472, pruned_loss=0.05593, over 4791.00 frames.], tot_loss[loss=0.1531, simple_loss=0.223, pruned_loss=0.04157, over 971573.28 frames.], batch size: 21, lr: 3.91e-04 +2022-05-05 04:51:17,798 INFO [train.py:715] (7/8) Epoch 5, batch 10250, loss[loss=0.2037, simple_loss=0.2492, pruned_loss=0.07908, over 4947.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2234, pruned_loss=0.04197, over 972303.62 frames.], batch size: 35, lr: 3.90e-04 +2022-05-05 04:51:56,803 INFO [train.py:715] (7/8) Epoch 5, batch 10300, loss[loss=0.17, simple_loss=0.2329, pruned_loss=0.05356, over 4700.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2235, pruned_loss=0.04188, over 973029.85 frames.], batch size: 15, lr: 3.90e-04 +2022-05-05 04:52:36,628 INFO [train.py:715] (7/8) Epoch 5, batch 10350, loss[loss=0.1399, simple_loss=0.2072, pruned_loss=0.03628, over 4929.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2238, pruned_loss=0.04223, over 972262.00 frames.], batch size: 39, lr: 3.90e-04 +2022-05-05 04:53:15,667 INFO [train.py:715] (7/8) Epoch 5, batch 10400, loss[loss=0.159, simple_loss=0.2233, pruned_loss=0.04736, over 4729.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2227, pruned_loss=0.04135, over 972267.36 frames.], batch size: 16, lr: 3.90e-04 +2022-05-05 04:53:55,622 INFO [train.py:715] (7/8) Epoch 5, batch 10450, loss[loss=0.1792, simple_loss=0.2535, pruned_loss=0.05243, over 4847.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2224, pruned_loss=0.04095, over 972522.26 frames.], batch size: 20, lr: 3.90e-04 +2022-05-05 04:54:35,514 INFO [train.py:715] (7/8) Epoch 5, batch 10500, loss[loss=0.1643, simple_loss=0.228, pruned_loss=0.05027, over 4864.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2227, pruned_loss=0.04128, over 973002.34 frames.], batch size: 32, lr: 3.90e-04 +2022-05-05 04:55:15,983 INFO [train.py:715] (7/8) Epoch 5, batch 10550, loss[loss=0.1615, simple_loss=0.2235, pruned_loss=0.04975, over 4911.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2222, pruned_loss=0.04117, over 973500.51 frames.], batch size: 29, lr: 3.90e-04 +2022-05-05 04:55:55,070 INFO [train.py:715] (7/8) Epoch 5, batch 10600, loss[loss=0.144, simple_loss=0.214, pruned_loss=0.03705, over 4770.00 frames.], tot_loss[loss=0.1519, simple_loss=0.222, pruned_loss=0.0409, over 973422.89 frames.], batch size: 19, lr: 3.90e-04 +2022-05-05 04:56:34,545 INFO [train.py:715] (7/8) Epoch 5, batch 10650, loss[loss=0.1538, simple_loss=0.2353, pruned_loss=0.03614, over 4821.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2229, pruned_loss=0.04139, over 973595.21 frames.], batch size: 14, lr: 3.90e-04 +2022-05-05 04:57:14,073 INFO [train.py:715] (7/8) Epoch 5, batch 10700, loss[loss=0.1555, simple_loss=0.2205, pruned_loss=0.04522, over 4974.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2236, pruned_loss=0.04162, over 974037.77 frames.], batch size: 28, lr: 3.90e-04 +2022-05-05 04:57:53,029 INFO [train.py:715] (7/8) Epoch 5, batch 10750, loss[loss=0.1649, simple_loss=0.2374, pruned_loss=0.04619, over 4946.00 frames.], tot_loss[loss=0.1534, simple_loss=0.224, pruned_loss=0.04137, over 973535.99 frames.], batch size: 21, lr: 3.90e-04 +2022-05-05 04:58:32,278 INFO [train.py:715] (7/8) Epoch 5, batch 10800, loss[loss=0.1434, simple_loss=0.2075, pruned_loss=0.03967, over 4845.00 frames.], tot_loss[loss=0.154, simple_loss=0.2243, pruned_loss=0.0418, over 973574.70 frames.], batch size: 30, lr: 3.90e-04 +2022-05-05 04:59:11,505 INFO [train.py:715] (7/8) Epoch 5, batch 10850, loss[loss=0.1542, simple_loss=0.2196, pruned_loss=0.0444, over 4777.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2243, pruned_loss=0.04154, over 973234.18 frames.], batch size: 18, lr: 3.90e-04 +2022-05-05 04:59:51,500 INFO [train.py:715] (7/8) Epoch 5, batch 10900, loss[loss=0.168, simple_loss=0.2454, pruned_loss=0.0453, over 4932.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2244, pruned_loss=0.0417, over 973048.08 frames.], batch size: 29, lr: 3.90e-04 +2022-05-05 05:00:30,696 INFO [train.py:715] (7/8) Epoch 5, batch 10950, loss[loss=0.1609, simple_loss=0.2288, pruned_loss=0.0465, over 4781.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2249, pruned_loss=0.04196, over 973739.97 frames.], batch size: 14, lr: 3.90e-04 +2022-05-05 05:01:10,467 INFO [train.py:715] (7/8) Epoch 5, batch 11000, loss[loss=0.1505, simple_loss=0.2246, pruned_loss=0.03822, over 4908.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2242, pruned_loss=0.04167, over 972805.43 frames.], batch size: 29, lr: 3.90e-04 +2022-05-05 05:01:49,966 INFO [train.py:715] (7/8) Epoch 5, batch 11050, loss[loss=0.1297, simple_loss=0.1971, pruned_loss=0.03109, over 4826.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2237, pruned_loss=0.04142, over 973081.66 frames.], batch size: 12, lr: 3.90e-04 +2022-05-05 05:02:29,386 INFO [train.py:715] (7/8) Epoch 5, batch 11100, loss[loss=0.1373, simple_loss=0.2043, pruned_loss=0.03517, over 4909.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2225, pruned_loss=0.041, over 971817.30 frames.], batch size: 18, lr: 3.90e-04 +2022-05-05 05:03:08,926 INFO [train.py:715] (7/8) Epoch 5, batch 11150, loss[loss=0.132, simple_loss=0.2058, pruned_loss=0.02905, over 4943.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2221, pruned_loss=0.04053, over 971994.82 frames.], batch size: 21, lr: 3.90e-04 +2022-05-05 05:03:48,020 INFO [train.py:715] (7/8) Epoch 5, batch 11200, loss[loss=0.1491, simple_loss=0.2259, pruned_loss=0.0361, over 4905.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2225, pruned_loss=0.04048, over 971791.98 frames.], batch size: 19, lr: 3.89e-04 +2022-05-05 05:04:27,939 INFO [train.py:715] (7/8) Epoch 5, batch 11250, loss[loss=0.124, simple_loss=0.2007, pruned_loss=0.02369, over 4967.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2217, pruned_loss=0.04002, over 972847.90 frames.], batch size: 14, lr: 3.89e-04 +2022-05-05 05:05:07,260 INFO [train.py:715] (7/8) Epoch 5, batch 11300, loss[loss=0.1343, simple_loss=0.2072, pruned_loss=0.03073, over 4944.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2204, pruned_loss=0.0393, over 973136.01 frames.], batch size: 21, lr: 3.89e-04 +2022-05-05 05:05:46,390 INFO [train.py:715] (7/8) Epoch 5, batch 11350, loss[loss=0.1255, simple_loss=0.2131, pruned_loss=0.01891, over 4808.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2205, pruned_loss=0.03905, over 972191.39 frames.], batch size: 24, lr: 3.89e-04 +2022-05-05 05:06:27,197 INFO [train.py:715] (7/8) Epoch 5, batch 11400, loss[loss=0.1208, simple_loss=0.204, pruned_loss=0.01885, over 4957.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2207, pruned_loss=0.03933, over 972185.01 frames.], batch size: 23, lr: 3.89e-04 +2022-05-05 05:07:07,353 INFO [train.py:715] (7/8) Epoch 5, batch 11450, loss[loss=0.1308, simple_loss=0.211, pruned_loss=0.02529, over 4820.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2212, pruned_loss=0.03968, over 972410.78 frames.], batch size: 26, lr: 3.89e-04 +2022-05-05 05:07:47,396 INFO [train.py:715] (7/8) Epoch 5, batch 11500, loss[loss=0.1965, simple_loss=0.264, pruned_loss=0.06454, over 4767.00 frames.], tot_loss[loss=0.1514, simple_loss=0.222, pruned_loss=0.04034, over 972726.13 frames.], batch size: 14, lr: 3.89e-04 +2022-05-05 05:08:27,416 INFO [train.py:715] (7/8) Epoch 5, batch 11550, loss[loss=0.1701, simple_loss=0.2346, pruned_loss=0.05283, over 4896.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2218, pruned_loss=0.0406, over 972667.42 frames.], batch size: 22, lr: 3.89e-04 +2022-05-05 05:09:07,607 INFO [train.py:715] (7/8) Epoch 5, batch 11600, loss[loss=0.1711, simple_loss=0.2413, pruned_loss=0.05044, over 4812.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2207, pruned_loss=0.03992, over 972420.50 frames.], batch size: 12, lr: 3.89e-04 +2022-05-05 05:09:48,314 INFO [train.py:715] (7/8) Epoch 5, batch 11650, loss[loss=0.1272, simple_loss=0.1965, pruned_loss=0.02899, over 4968.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2213, pruned_loss=0.0401, over 971765.22 frames.], batch size: 28, lr: 3.89e-04 +2022-05-05 05:10:28,059 INFO [train.py:715] (7/8) Epoch 5, batch 11700, loss[loss=0.1614, simple_loss=0.2271, pruned_loss=0.04785, over 4792.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2216, pruned_loss=0.04054, over 971868.67 frames.], batch size: 17, lr: 3.89e-04 +2022-05-05 05:11:08,776 INFO [train.py:715] (7/8) Epoch 5, batch 11750, loss[loss=0.1509, simple_loss=0.2243, pruned_loss=0.03873, over 4876.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2218, pruned_loss=0.04071, over 971417.83 frames.], batch size: 22, lr: 3.89e-04 +2022-05-05 05:11:48,922 INFO [train.py:715] (7/8) Epoch 5, batch 11800, loss[loss=0.1493, simple_loss=0.2208, pruned_loss=0.03894, over 4875.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2225, pruned_loss=0.0409, over 972423.34 frames.], batch size: 16, lr: 3.89e-04 +2022-05-05 05:12:29,043 INFO [train.py:715] (7/8) Epoch 5, batch 11850, loss[loss=0.1866, simple_loss=0.2509, pruned_loss=0.06117, over 4891.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2236, pruned_loss=0.04143, over 973123.10 frames.], batch size: 22, lr: 3.89e-04 +2022-05-05 05:13:08,182 INFO [train.py:715] (7/8) Epoch 5, batch 11900, loss[loss=0.1246, simple_loss=0.193, pruned_loss=0.02805, over 4926.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2234, pruned_loss=0.04178, over 973469.18 frames.], batch size: 23, lr: 3.89e-04 +2022-05-05 05:13:47,506 INFO [train.py:715] (7/8) Epoch 5, batch 11950, loss[loss=0.1426, simple_loss=0.2099, pruned_loss=0.03762, over 4894.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2233, pruned_loss=0.04144, over 973688.72 frames.], batch size: 32, lr: 3.89e-04 +2022-05-05 05:14:27,512 INFO [train.py:715] (7/8) Epoch 5, batch 12000, loss[loss=0.1634, simple_loss=0.2294, pruned_loss=0.04868, over 4784.00 frames.], tot_loss[loss=0.1526, simple_loss=0.223, pruned_loss=0.04116, over 972195.39 frames.], batch size: 14, lr: 3.89e-04 +2022-05-05 05:14:27,513 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 05:14:37,327 INFO [train.py:742] (7/8) Epoch 5, validation: loss=0.1103, simple_loss=0.1957, pruned_loss=0.01243, over 914524.00 frames. +2022-05-05 05:15:17,599 INFO [train.py:715] (7/8) Epoch 5, batch 12050, loss[loss=0.1213, simple_loss=0.1912, pruned_loss=0.02569, over 4887.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2231, pruned_loss=0.04174, over 971137.59 frames.], batch size: 22, lr: 3.89e-04 +2022-05-05 05:15:57,249 INFO [train.py:715] (7/8) Epoch 5, batch 12100, loss[loss=0.1473, simple_loss=0.2117, pruned_loss=0.04143, over 4964.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2222, pruned_loss=0.0412, over 971108.18 frames.], batch size: 28, lr: 3.89e-04 +2022-05-05 05:16:36,762 INFO [train.py:715] (7/8) Epoch 5, batch 12150, loss[loss=0.1627, simple_loss=0.23, pruned_loss=0.0477, over 4921.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2228, pruned_loss=0.04114, over 971877.27 frames.], batch size: 17, lr: 3.88e-04 +2022-05-05 05:17:16,024 INFO [train.py:715] (7/8) Epoch 5, batch 12200, loss[loss=0.1236, simple_loss=0.2013, pruned_loss=0.023, over 4988.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2237, pruned_loss=0.04172, over 972143.24 frames.], batch size: 24, lr: 3.88e-04 +2022-05-05 05:17:56,100 INFO [train.py:715] (7/8) Epoch 5, batch 12250, loss[loss=0.1383, simple_loss=0.201, pruned_loss=0.03778, over 4840.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2227, pruned_loss=0.0412, over 971961.94 frames.], batch size: 30, lr: 3.88e-04 +2022-05-05 05:18:35,381 INFO [train.py:715] (7/8) Epoch 5, batch 12300, loss[loss=0.1594, simple_loss=0.2248, pruned_loss=0.04701, over 4753.00 frames.], tot_loss[loss=0.153, simple_loss=0.223, pruned_loss=0.04143, over 971353.41 frames.], batch size: 14, lr: 3.88e-04 +2022-05-05 05:19:14,278 INFO [train.py:715] (7/8) Epoch 5, batch 12350, loss[loss=0.1675, simple_loss=0.2436, pruned_loss=0.0457, over 4901.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2242, pruned_loss=0.04172, over 971607.82 frames.], batch size: 19, lr: 3.88e-04 +2022-05-05 05:19:53,845 INFO [train.py:715] (7/8) Epoch 5, batch 12400, loss[loss=0.1527, simple_loss=0.2308, pruned_loss=0.03732, over 4944.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2242, pruned_loss=0.04146, over 971536.26 frames.], batch size: 39, lr: 3.88e-04 +2022-05-05 05:20:33,433 INFO [train.py:715] (7/8) Epoch 5, batch 12450, loss[loss=0.1531, simple_loss=0.2141, pruned_loss=0.04601, over 4786.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2232, pruned_loss=0.04089, over 971214.54 frames.], batch size: 18, lr: 3.88e-04 +2022-05-05 05:21:12,664 INFO [train.py:715] (7/8) Epoch 5, batch 12500, loss[loss=0.154, simple_loss=0.2218, pruned_loss=0.04312, over 4827.00 frames.], tot_loss[loss=0.1528, simple_loss=0.223, pruned_loss=0.04129, over 971037.36 frames.], batch size: 30, lr: 3.88e-04 +2022-05-05 05:21:51,878 INFO [train.py:715] (7/8) Epoch 5, batch 12550, loss[loss=0.1526, simple_loss=0.2272, pruned_loss=0.03897, over 4915.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2224, pruned_loss=0.04094, over 971043.57 frames.], batch size: 38, lr: 3.88e-04 +2022-05-05 05:22:30,627 INFO [train.py:715] (7/8) Epoch 5, batch 12600, loss[loss=0.1521, simple_loss=0.2285, pruned_loss=0.0378, over 4887.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2229, pruned_loss=0.04123, over 971374.41 frames.], batch size: 22, lr: 3.88e-04 +2022-05-05 05:23:08,927 INFO [train.py:715] (7/8) Epoch 5, batch 12650, loss[loss=0.1458, simple_loss=0.2185, pruned_loss=0.03652, over 4829.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2236, pruned_loss=0.04127, over 972049.97 frames.], batch size: 25, lr: 3.88e-04 +2022-05-05 05:23:47,147 INFO [train.py:715] (7/8) Epoch 5, batch 12700, loss[loss=0.1534, simple_loss=0.2127, pruned_loss=0.04703, over 4830.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2239, pruned_loss=0.04115, over 972456.02 frames.], batch size: 15, lr: 3.88e-04 +2022-05-05 05:24:27,029 INFO [train.py:715] (7/8) Epoch 5, batch 12750, loss[loss=0.1581, simple_loss=0.2312, pruned_loss=0.04253, over 4951.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2234, pruned_loss=0.04103, over 972566.98 frames.], batch size: 24, lr: 3.88e-04 +2022-05-05 05:25:06,591 INFO [train.py:715] (7/8) Epoch 5, batch 12800, loss[loss=0.172, simple_loss=0.2367, pruned_loss=0.05365, over 4873.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2244, pruned_loss=0.04128, over 972805.65 frames.], batch size: 20, lr: 3.88e-04 +2022-05-05 05:25:46,758 INFO [train.py:715] (7/8) Epoch 5, batch 12850, loss[loss=0.1688, simple_loss=0.248, pruned_loss=0.04481, over 4819.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2239, pruned_loss=0.04151, over 973232.18 frames.], batch size: 27, lr: 3.88e-04 +2022-05-05 05:26:26,308 INFO [train.py:715] (7/8) Epoch 5, batch 12900, loss[loss=0.156, simple_loss=0.2364, pruned_loss=0.03775, over 4811.00 frames.], tot_loss[loss=0.1537, simple_loss=0.224, pruned_loss=0.04174, over 972469.23 frames.], batch size: 14, lr: 3.88e-04 +2022-05-05 05:27:06,305 INFO [train.py:715] (7/8) Epoch 5, batch 12950, loss[loss=0.1497, simple_loss=0.2199, pruned_loss=0.03977, over 4707.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2226, pruned_loss=0.04096, over 972725.88 frames.], batch size: 15, lr: 3.88e-04 +2022-05-05 05:27:45,740 INFO [train.py:715] (7/8) Epoch 5, batch 13000, loss[loss=0.1222, simple_loss=0.1924, pruned_loss=0.02598, over 4830.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2237, pruned_loss=0.04143, over 972570.18 frames.], batch size: 12, lr: 3.88e-04 +2022-05-05 05:28:25,607 INFO [train.py:715] (7/8) Epoch 5, batch 13050, loss[loss=0.1313, simple_loss=0.1963, pruned_loss=0.03315, over 4768.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2232, pruned_loss=0.04073, over 972934.98 frames.], batch size: 14, lr: 3.88e-04 +2022-05-05 05:29:03,806 INFO [train.py:715] (7/8) Epoch 5, batch 13100, loss[loss=0.186, simple_loss=0.2526, pruned_loss=0.05972, over 4958.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2236, pruned_loss=0.0415, over 972542.71 frames.], batch size: 24, lr: 3.87e-04 +2022-05-05 05:29:42,388 INFO [train.py:715] (7/8) Epoch 5, batch 13150, loss[loss=0.1221, simple_loss=0.1904, pruned_loss=0.02688, over 4778.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2233, pruned_loss=0.04078, over 972214.81 frames.], batch size: 14, lr: 3.87e-04 +2022-05-05 05:30:20,477 INFO [train.py:715] (7/8) Epoch 5, batch 13200, loss[loss=0.1667, simple_loss=0.2396, pruned_loss=0.04691, over 4949.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2233, pruned_loss=0.04093, over 972379.32 frames.], batch size: 29, lr: 3.87e-04 +2022-05-05 05:30:58,489 INFO [train.py:715] (7/8) Epoch 5, batch 13250, loss[loss=0.1625, simple_loss=0.2372, pruned_loss=0.04387, over 4980.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2233, pruned_loss=0.04089, over 971930.36 frames.], batch size: 25, lr: 3.87e-04 +2022-05-05 05:31:37,092 INFO [train.py:715] (7/8) Epoch 5, batch 13300, loss[loss=0.1488, simple_loss=0.224, pruned_loss=0.03681, over 4914.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2229, pruned_loss=0.04082, over 972680.80 frames.], batch size: 23, lr: 3.87e-04 +2022-05-05 05:32:14,952 INFO [train.py:715] (7/8) Epoch 5, batch 13350, loss[loss=0.1885, simple_loss=0.2569, pruned_loss=0.06007, over 4843.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2228, pruned_loss=0.04084, over 972519.00 frames.], batch size: 30, lr: 3.87e-04 +2022-05-05 05:32:53,086 INFO [train.py:715] (7/8) Epoch 5, batch 13400, loss[loss=0.191, simple_loss=0.2502, pruned_loss=0.06592, over 4861.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2233, pruned_loss=0.04094, over 972829.89 frames.], batch size: 16, lr: 3.87e-04 +2022-05-05 05:33:30,830 INFO [train.py:715] (7/8) Epoch 5, batch 13450, loss[loss=0.1621, simple_loss=0.2401, pruned_loss=0.04207, over 4947.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2228, pruned_loss=0.04105, over 973008.53 frames.], batch size: 21, lr: 3.87e-04 +2022-05-05 05:34:09,168 INFO [train.py:715] (7/8) Epoch 5, batch 13500, loss[loss=0.1392, simple_loss=0.2052, pruned_loss=0.03666, over 4748.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2229, pruned_loss=0.04104, over 972867.25 frames.], batch size: 19, lr: 3.87e-04 +2022-05-05 05:34:47,071 INFO [train.py:715] (7/8) Epoch 5, batch 13550, loss[loss=0.1506, simple_loss=0.2059, pruned_loss=0.04766, over 4809.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2218, pruned_loss=0.04072, over 972734.46 frames.], batch size: 12, lr: 3.87e-04 +2022-05-05 05:35:24,568 INFO [train.py:715] (7/8) Epoch 5, batch 13600, loss[loss=0.1508, simple_loss=0.2192, pruned_loss=0.0412, over 4943.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2218, pruned_loss=0.0406, over 972626.63 frames.], batch size: 35, lr: 3.87e-04 +2022-05-05 05:36:03,223 INFO [train.py:715] (7/8) Epoch 5, batch 13650, loss[loss=0.1357, simple_loss=0.2063, pruned_loss=0.03258, over 4920.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2227, pruned_loss=0.0411, over 972904.75 frames.], batch size: 29, lr: 3.87e-04 +2022-05-05 05:36:41,018 INFO [train.py:715] (7/8) Epoch 5, batch 13700, loss[loss=0.152, simple_loss=0.2336, pruned_loss=0.03522, over 4819.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2226, pruned_loss=0.04054, over 973413.75 frames.], batch size: 25, lr: 3.87e-04 +2022-05-05 05:37:19,075 INFO [train.py:715] (7/8) Epoch 5, batch 13750, loss[loss=0.1551, simple_loss=0.2269, pruned_loss=0.04162, over 4649.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2219, pruned_loss=0.04036, over 973107.66 frames.], batch size: 13, lr: 3.87e-04 +2022-05-05 05:37:56,882 INFO [train.py:715] (7/8) Epoch 5, batch 13800, loss[loss=0.1252, simple_loss=0.1892, pruned_loss=0.0306, over 4788.00 frames.], tot_loss[loss=0.1519, simple_loss=0.222, pruned_loss=0.04096, over 972355.00 frames.], batch size: 12, lr: 3.87e-04 +2022-05-05 05:38:35,346 INFO [train.py:715] (7/8) Epoch 5, batch 13850, loss[loss=0.1487, simple_loss=0.2197, pruned_loss=0.03887, over 4917.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2215, pruned_loss=0.04073, over 972232.66 frames.], batch size: 17, lr: 3.87e-04 +2022-05-05 05:39:13,571 INFO [train.py:715] (7/8) Epoch 5, batch 13900, loss[loss=0.1874, simple_loss=0.2485, pruned_loss=0.0632, over 4979.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2221, pruned_loss=0.04068, over 972522.17 frames.], batch size: 39, lr: 3.87e-04 +2022-05-05 05:39:51,056 INFO [train.py:715] (7/8) Epoch 5, batch 13950, loss[loss=0.1351, simple_loss=0.2127, pruned_loss=0.02875, over 4814.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2222, pruned_loss=0.04072, over 972704.90 frames.], batch size: 27, lr: 3.87e-04 +2022-05-05 05:40:29,786 INFO [train.py:715] (7/8) Epoch 5, batch 14000, loss[loss=0.1662, simple_loss=0.2441, pruned_loss=0.04415, over 4941.00 frames.], tot_loss[loss=0.152, simple_loss=0.222, pruned_loss=0.04094, over 973097.01 frames.], batch size: 29, lr: 3.87e-04 +2022-05-05 05:41:07,814 INFO [train.py:715] (7/8) Epoch 5, batch 14050, loss[loss=0.1546, simple_loss=0.2224, pruned_loss=0.04344, over 4806.00 frames.], tot_loss[loss=0.152, simple_loss=0.2221, pruned_loss=0.041, over 972898.12 frames.], batch size: 26, lr: 3.87e-04 +2022-05-05 05:41:45,578 INFO [train.py:715] (7/8) Epoch 5, batch 14100, loss[loss=0.167, simple_loss=0.2416, pruned_loss=0.04621, over 4935.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2231, pruned_loss=0.04115, over 973305.33 frames.], batch size: 39, lr: 3.86e-04 +2022-05-05 05:42:23,454 INFO [train.py:715] (7/8) Epoch 5, batch 14150, loss[loss=0.1585, simple_loss=0.2387, pruned_loss=0.03918, over 4818.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2223, pruned_loss=0.04099, over 972442.91 frames.], batch size: 27, lr: 3.86e-04 +2022-05-05 05:43:01,798 INFO [train.py:715] (7/8) Epoch 5, batch 14200, loss[loss=0.1414, simple_loss=0.2045, pruned_loss=0.03913, over 4975.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2217, pruned_loss=0.0408, over 972010.18 frames.], batch size: 28, lr: 3.86e-04 +2022-05-05 05:43:40,049 INFO [train.py:715] (7/8) Epoch 5, batch 14250, loss[loss=0.1539, simple_loss=0.2339, pruned_loss=0.03696, over 4853.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2214, pruned_loss=0.0406, over 971533.01 frames.], batch size: 20, lr: 3.86e-04 +2022-05-05 05:44:18,049 INFO [train.py:715] (7/8) Epoch 5, batch 14300, loss[loss=0.1564, simple_loss=0.2258, pruned_loss=0.04348, over 4820.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2221, pruned_loss=0.04111, over 971848.59 frames.], batch size: 12, lr: 3.86e-04 +2022-05-05 05:44:56,433 INFO [train.py:715] (7/8) Epoch 5, batch 14350, loss[loss=0.1696, simple_loss=0.2413, pruned_loss=0.04889, over 4951.00 frames.], tot_loss[loss=0.1527, simple_loss=0.223, pruned_loss=0.04117, over 971947.74 frames.], batch size: 21, lr: 3.86e-04 +2022-05-05 05:45:34,230 INFO [train.py:715] (7/8) Epoch 5, batch 14400, loss[loss=0.1535, simple_loss=0.2265, pruned_loss=0.04028, over 4832.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2222, pruned_loss=0.04074, over 972023.78 frames.], batch size: 26, lr: 3.86e-04 +2022-05-05 05:46:11,862 INFO [train.py:715] (7/8) Epoch 5, batch 14450, loss[loss=0.1857, simple_loss=0.2724, pruned_loss=0.04951, over 4757.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2226, pruned_loss=0.04114, over 971671.48 frames.], batch size: 19, lr: 3.86e-04 +2022-05-05 05:46:49,660 INFO [train.py:715] (7/8) Epoch 5, batch 14500, loss[loss=0.1224, simple_loss=0.198, pruned_loss=0.02341, over 4835.00 frames.], tot_loss[loss=0.153, simple_loss=0.2227, pruned_loss=0.0417, over 971809.05 frames.], batch size: 26, lr: 3.86e-04 +2022-05-05 05:47:27,994 INFO [train.py:715] (7/8) Epoch 5, batch 14550, loss[loss=0.1484, simple_loss=0.2199, pruned_loss=0.03843, over 4705.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2228, pruned_loss=0.04113, over 971992.48 frames.], batch size: 15, lr: 3.86e-04 +2022-05-05 05:48:06,093 INFO [train.py:715] (7/8) Epoch 5, batch 14600, loss[loss=0.1941, simple_loss=0.2539, pruned_loss=0.06714, over 4742.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2233, pruned_loss=0.04115, over 973320.29 frames.], batch size: 12, lr: 3.86e-04 +2022-05-05 05:48:44,025 INFO [train.py:715] (7/8) Epoch 5, batch 14650, loss[loss=0.1783, simple_loss=0.2435, pruned_loss=0.05659, over 4952.00 frames.], tot_loss[loss=0.1522, simple_loss=0.223, pruned_loss=0.04067, over 972651.88 frames.], batch size: 24, lr: 3.86e-04 +2022-05-05 05:49:22,273 INFO [train.py:715] (7/8) Epoch 5, batch 14700, loss[loss=0.1365, simple_loss=0.2113, pruned_loss=0.03079, over 4891.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2226, pruned_loss=0.04053, over 972625.93 frames.], batch size: 19, lr: 3.86e-04 +2022-05-05 05:49:59,644 INFO [train.py:715] (7/8) Epoch 5, batch 14750, loss[loss=0.1537, simple_loss=0.2309, pruned_loss=0.03825, over 4932.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2232, pruned_loss=0.04085, over 972366.51 frames.], batch size: 23, lr: 3.86e-04 +2022-05-05 05:50:37,675 INFO [train.py:715] (7/8) Epoch 5, batch 14800, loss[loss=0.1432, simple_loss=0.227, pruned_loss=0.02967, over 4915.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2239, pruned_loss=0.0412, over 972172.41 frames.], batch size: 29, lr: 3.86e-04 +2022-05-05 05:51:15,492 INFO [train.py:715] (7/8) Epoch 5, batch 14850, loss[loss=0.1328, simple_loss=0.2018, pruned_loss=0.0319, over 4898.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2223, pruned_loss=0.04062, over 972492.03 frames.], batch size: 19, lr: 3.86e-04 +2022-05-05 05:51:54,088 INFO [train.py:715] (7/8) Epoch 5, batch 14900, loss[loss=0.1668, simple_loss=0.2485, pruned_loss=0.04254, over 4970.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2231, pruned_loss=0.04109, over 971733.47 frames.], batch size: 15, lr: 3.86e-04 +2022-05-05 05:52:32,748 INFO [train.py:715] (7/8) Epoch 5, batch 14950, loss[loss=0.1748, simple_loss=0.2317, pruned_loss=0.0589, over 4845.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2235, pruned_loss=0.04132, over 971559.55 frames.], batch size: 15, lr: 3.86e-04 +2022-05-05 05:53:10,808 INFO [train.py:715] (7/8) Epoch 5, batch 15000, loss[loss=0.1471, simple_loss=0.2131, pruned_loss=0.0405, over 4948.00 frames.], tot_loss[loss=0.153, simple_loss=0.2233, pruned_loss=0.04134, over 971923.10 frames.], batch size: 39, lr: 3.86e-04 +2022-05-05 05:53:10,808 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 05:53:21,083 INFO [train.py:742] (7/8) Epoch 5, validation: loss=0.1105, simple_loss=0.1958, pruned_loss=0.01261, over 914524.00 frames. +2022-05-05 05:53:58,557 INFO [train.py:715] (7/8) Epoch 5, batch 15050, loss[loss=0.1434, simple_loss=0.2113, pruned_loss=0.03781, over 4877.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2234, pruned_loss=0.04123, over 972258.34 frames.], batch size: 30, lr: 3.85e-04 +2022-05-05 05:54:37,215 INFO [train.py:715] (7/8) Epoch 5, batch 15100, loss[loss=0.1313, simple_loss=0.2018, pruned_loss=0.03044, over 4870.00 frames.], tot_loss[loss=0.152, simple_loss=0.2225, pruned_loss=0.04078, over 972516.91 frames.], batch size: 32, lr: 3.85e-04 +2022-05-05 05:55:15,135 INFO [train.py:715] (7/8) Epoch 5, batch 15150, loss[loss=0.1486, simple_loss=0.2188, pruned_loss=0.03923, over 4781.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2219, pruned_loss=0.04048, over 972207.40 frames.], batch size: 12, lr: 3.85e-04 +2022-05-05 05:55:53,271 INFO [train.py:715] (7/8) Epoch 5, batch 15200, loss[loss=0.1337, simple_loss=0.2057, pruned_loss=0.03085, over 4938.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2224, pruned_loss=0.0411, over 972125.29 frames.], batch size: 23, lr: 3.85e-04 +2022-05-05 05:56:32,188 INFO [train.py:715] (7/8) Epoch 5, batch 15250, loss[loss=0.1523, simple_loss=0.2134, pruned_loss=0.0456, over 4729.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2236, pruned_loss=0.04166, over 972113.83 frames.], batch size: 16, lr: 3.85e-04 +2022-05-05 05:57:10,898 INFO [train.py:715] (7/8) Epoch 5, batch 15300, loss[loss=0.1585, simple_loss=0.2251, pruned_loss=0.04599, over 4840.00 frames.], tot_loss[loss=0.153, simple_loss=0.2234, pruned_loss=0.04134, over 972983.93 frames.], batch size: 13, lr: 3.85e-04 +2022-05-05 05:57:50,139 INFO [train.py:715] (7/8) Epoch 5, batch 15350, loss[loss=0.1565, simple_loss=0.2348, pruned_loss=0.03908, over 4894.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2234, pruned_loss=0.04102, over 972554.73 frames.], batch size: 19, lr: 3.85e-04 +2022-05-05 05:58:28,474 INFO [train.py:715] (7/8) Epoch 5, batch 15400, loss[loss=0.1473, simple_loss=0.2214, pruned_loss=0.03666, over 4778.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2235, pruned_loss=0.04096, over 973357.81 frames.], batch size: 18, lr: 3.85e-04 +2022-05-05 05:59:07,518 INFO [train.py:715] (7/8) Epoch 5, batch 15450, loss[loss=0.1683, simple_loss=0.2401, pruned_loss=0.04825, over 4977.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2226, pruned_loss=0.04015, over 972985.21 frames.], batch size: 31, lr: 3.85e-04 +2022-05-05 05:59:46,050 INFO [train.py:715] (7/8) Epoch 5, batch 15500, loss[loss=0.1414, simple_loss=0.2119, pruned_loss=0.03543, over 4849.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2236, pruned_loss=0.04072, over 972999.52 frames.], batch size: 20, lr: 3.85e-04 +2022-05-05 06:00:25,317 INFO [train.py:715] (7/8) Epoch 5, batch 15550, loss[loss=0.1621, simple_loss=0.2341, pruned_loss=0.04504, over 4846.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2232, pruned_loss=0.0407, over 973218.30 frames.], batch size: 15, lr: 3.85e-04 +2022-05-05 06:01:03,325 INFO [train.py:715] (7/8) Epoch 5, batch 15600, loss[loss=0.1585, simple_loss=0.237, pruned_loss=0.03993, over 4882.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2232, pruned_loss=0.04125, over 972497.90 frames.], batch size: 32, lr: 3.85e-04 +2022-05-05 06:01:40,925 INFO [train.py:715] (7/8) Epoch 5, batch 15650, loss[loss=0.161, simple_loss=0.2409, pruned_loss=0.04062, over 4964.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2237, pruned_loss=0.0415, over 972895.31 frames.], batch size: 24, lr: 3.85e-04 +2022-05-05 06:02:18,446 INFO [train.py:715] (7/8) Epoch 5, batch 15700, loss[loss=0.1459, simple_loss=0.2224, pruned_loss=0.0347, over 4830.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2241, pruned_loss=0.04166, over 971372.26 frames.], batch size: 15, lr: 3.85e-04 +2022-05-05 06:02:56,464 INFO [train.py:715] (7/8) Epoch 5, batch 15750, loss[loss=0.1996, simple_loss=0.2599, pruned_loss=0.06959, over 4860.00 frames.], tot_loss[loss=0.154, simple_loss=0.224, pruned_loss=0.042, over 971510.91 frames.], batch size: 38, lr: 3.85e-04 +2022-05-05 06:03:34,888 INFO [train.py:715] (7/8) Epoch 5, batch 15800, loss[loss=0.1522, simple_loss=0.2241, pruned_loss=0.04015, over 4824.00 frames.], tot_loss[loss=0.154, simple_loss=0.2234, pruned_loss=0.04226, over 972151.10 frames.], batch size: 27, lr: 3.85e-04 +2022-05-05 06:04:12,954 INFO [train.py:715] (7/8) Epoch 5, batch 15850, loss[loss=0.1209, simple_loss=0.1964, pruned_loss=0.02271, over 4774.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2233, pruned_loss=0.042, over 972030.33 frames.], batch size: 17, lr: 3.85e-04 +2022-05-05 06:04:50,528 INFO [train.py:715] (7/8) Epoch 5, batch 15900, loss[loss=0.1693, simple_loss=0.2342, pruned_loss=0.05224, over 4962.00 frames.], tot_loss[loss=0.1531, simple_loss=0.223, pruned_loss=0.0416, over 972075.49 frames.], batch size: 15, lr: 3.85e-04 +2022-05-05 06:05:28,344 INFO [train.py:715] (7/8) Epoch 5, batch 15950, loss[loss=0.1456, simple_loss=0.2171, pruned_loss=0.03706, over 4784.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2226, pruned_loss=0.04128, over 972186.49 frames.], batch size: 18, lr: 3.85e-04 +2022-05-05 06:06:05,813 INFO [train.py:715] (7/8) Epoch 5, batch 16000, loss[loss=0.1462, simple_loss=0.2239, pruned_loss=0.03419, over 4984.00 frames.], tot_loss[loss=0.1514, simple_loss=0.222, pruned_loss=0.04039, over 972674.08 frames.], batch size: 25, lr: 3.85e-04 +2022-05-05 06:06:43,535 INFO [train.py:715] (7/8) Epoch 5, batch 16050, loss[loss=0.1791, simple_loss=0.2393, pruned_loss=0.05944, over 4781.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2226, pruned_loss=0.04099, over 972916.69 frames.], batch size: 18, lr: 3.84e-04 +2022-05-05 06:07:21,600 INFO [train.py:715] (7/8) Epoch 5, batch 16100, loss[loss=0.1122, simple_loss=0.1876, pruned_loss=0.01844, over 4788.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2233, pruned_loss=0.04116, over 972288.35 frames.], batch size: 24, lr: 3.84e-04 +2022-05-05 06:08:00,778 INFO [train.py:715] (7/8) Epoch 5, batch 16150, loss[loss=0.153, simple_loss=0.2335, pruned_loss=0.03622, over 4971.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2239, pruned_loss=0.04165, over 972327.79 frames.], batch size: 24, lr: 3.84e-04 +2022-05-05 06:08:39,728 INFO [train.py:715] (7/8) Epoch 5, batch 16200, loss[loss=0.166, simple_loss=0.2236, pruned_loss=0.05416, over 4964.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2233, pruned_loss=0.04145, over 971397.41 frames.], batch size: 15, lr: 3.84e-04 +2022-05-05 06:09:18,289 INFO [train.py:715] (7/8) Epoch 5, batch 16250, loss[loss=0.1636, simple_loss=0.2381, pruned_loss=0.04457, over 4775.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2238, pruned_loss=0.04149, over 971593.73 frames.], batch size: 18, lr: 3.84e-04 +2022-05-05 06:09:56,098 INFO [train.py:715] (7/8) Epoch 5, batch 16300, loss[loss=0.1831, simple_loss=0.243, pruned_loss=0.06163, over 4753.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2235, pruned_loss=0.04151, over 972021.65 frames.], batch size: 19, lr: 3.84e-04 +2022-05-05 06:10:34,110 INFO [train.py:715] (7/8) Epoch 5, batch 16350, loss[loss=0.1759, simple_loss=0.2474, pruned_loss=0.05222, over 4771.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2239, pruned_loss=0.04164, over 972287.87 frames.], batch size: 18, lr: 3.84e-04 +2022-05-05 06:11:12,493 INFO [train.py:715] (7/8) Epoch 5, batch 16400, loss[loss=0.1588, simple_loss=0.2189, pruned_loss=0.04936, over 4896.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2241, pruned_loss=0.04154, over 972994.20 frames.], batch size: 19, lr: 3.84e-04 +2022-05-05 06:11:50,951 INFO [train.py:715] (7/8) Epoch 5, batch 16450, loss[loss=0.1343, simple_loss=0.192, pruned_loss=0.03829, over 4971.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2238, pruned_loss=0.04164, over 972707.68 frames.], batch size: 14, lr: 3.84e-04 +2022-05-05 06:12:30,301 INFO [train.py:715] (7/8) Epoch 5, batch 16500, loss[loss=0.1885, simple_loss=0.2495, pruned_loss=0.06374, over 4986.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2243, pruned_loss=0.04205, over 972176.61 frames.], batch size: 25, lr: 3.84e-04 +2022-05-05 06:13:08,221 INFO [train.py:715] (7/8) Epoch 5, batch 16550, loss[loss=0.1543, simple_loss=0.2311, pruned_loss=0.03879, over 4815.00 frames.], tot_loss[loss=0.1552, simple_loss=0.225, pruned_loss=0.04272, over 971637.16 frames.], batch size: 15, lr: 3.84e-04 +2022-05-05 06:13:46,904 INFO [train.py:715] (7/8) Epoch 5, batch 16600, loss[loss=0.1696, simple_loss=0.2501, pruned_loss=0.0445, over 4786.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2247, pruned_loss=0.04248, over 971997.92 frames.], batch size: 17, lr: 3.84e-04 +2022-05-05 06:14:25,620 INFO [train.py:715] (7/8) Epoch 5, batch 16650, loss[loss=0.1904, simple_loss=0.2706, pruned_loss=0.05505, over 4899.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2244, pruned_loss=0.04218, over 971969.47 frames.], batch size: 17, lr: 3.84e-04 +2022-05-05 06:15:04,295 INFO [train.py:715] (7/8) Epoch 5, batch 16700, loss[loss=0.1456, simple_loss=0.2211, pruned_loss=0.03506, over 4837.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2237, pruned_loss=0.0419, over 971917.51 frames.], batch size: 30, lr: 3.84e-04 +2022-05-05 06:15:42,484 INFO [train.py:715] (7/8) Epoch 5, batch 16750, loss[loss=0.1037, simple_loss=0.1717, pruned_loss=0.01786, over 4813.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2227, pruned_loss=0.04116, over 971965.68 frames.], batch size: 12, lr: 3.84e-04 +2022-05-05 06:16:20,936 INFO [train.py:715] (7/8) Epoch 5, batch 16800, loss[loss=0.158, simple_loss=0.2331, pruned_loss=0.04146, over 4874.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2231, pruned_loss=0.04132, over 972420.71 frames.], batch size: 16, lr: 3.84e-04 +2022-05-05 06:17:00,068 INFO [train.py:715] (7/8) Epoch 5, batch 16850, loss[loss=0.1422, simple_loss=0.2102, pruned_loss=0.03706, over 4973.00 frames.], tot_loss[loss=0.152, simple_loss=0.2221, pruned_loss=0.04089, over 973235.03 frames.], batch size: 35, lr: 3.84e-04 +2022-05-05 06:17:37,930 INFO [train.py:715] (7/8) Epoch 5, batch 16900, loss[loss=0.1447, simple_loss=0.2278, pruned_loss=0.03083, over 4887.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2233, pruned_loss=0.04172, over 973798.73 frames.], batch size: 22, lr: 3.84e-04 +2022-05-05 06:18:16,757 INFO [train.py:715] (7/8) Epoch 5, batch 16950, loss[loss=0.1392, simple_loss=0.2138, pruned_loss=0.03232, over 4850.00 frames.], tot_loss[loss=0.1529, simple_loss=0.223, pruned_loss=0.04144, over 972721.18 frames.], batch size: 30, lr: 3.84e-04 +2022-05-05 06:18:55,161 INFO [train.py:715] (7/8) Epoch 5, batch 17000, loss[loss=0.1596, simple_loss=0.2338, pruned_loss=0.04274, over 4778.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2221, pruned_loss=0.0408, over 972872.30 frames.], batch size: 18, lr: 3.84e-04 +2022-05-05 06:19:33,549 INFO [train.py:715] (7/8) Epoch 5, batch 17050, loss[loss=0.1596, simple_loss=0.2288, pruned_loss=0.04514, over 4853.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2227, pruned_loss=0.04077, over 972560.35 frames.], batch size: 34, lr: 3.83e-04 +2022-05-05 06:20:11,942 INFO [train.py:715] (7/8) Epoch 5, batch 17100, loss[loss=0.1559, simple_loss=0.2334, pruned_loss=0.03924, over 4932.00 frames.], tot_loss[loss=0.1522, simple_loss=0.223, pruned_loss=0.0407, over 972457.84 frames.], batch size: 21, lr: 3.83e-04 +2022-05-05 06:20:49,750 INFO [train.py:715] (7/8) Epoch 5, batch 17150, loss[loss=0.1494, simple_loss=0.2255, pruned_loss=0.03664, over 4981.00 frames.], tot_loss[loss=0.152, simple_loss=0.2229, pruned_loss=0.04053, over 973065.60 frames.], batch size: 25, lr: 3.83e-04 +2022-05-05 06:21:27,629 INFO [train.py:715] (7/8) Epoch 5, batch 17200, loss[loss=0.114, simple_loss=0.1872, pruned_loss=0.02037, over 4975.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2214, pruned_loss=0.03984, over 972867.50 frames.], batch size: 28, lr: 3.83e-04 +2022-05-05 06:22:04,735 INFO [train.py:715] (7/8) Epoch 5, batch 17250, loss[loss=0.1436, simple_loss=0.2263, pruned_loss=0.03046, over 4884.00 frames.], tot_loss[loss=0.1513, simple_loss=0.222, pruned_loss=0.04031, over 973105.03 frames.], batch size: 22, lr: 3.83e-04 +2022-05-05 06:22:42,971 INFO [train.py:715] (7/8) Epoch 5, batch 17300, loss[loss=0.1558, simple_loss=0.2262, pruned_loss=0.04265, over 4860.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2222, pruned_loss=0.04039, over 972815.78 frames.], batch size: 30, lr: 3.83e-04 +2022-05-05 06:23:22,497 INFO [train.py:715] (7/8) Epoch 5, batch 17350, loss[loss=0.1637, simple_loss=0.2407, pruned_loss=0.04332, over 4958.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2219, pruned_loss=0.04027, over 972948.15 frames.], batch size: 29, lr: 3.83e-04 +2022-05-05 06:24:00,868 INFO [train.py:715] (7/8) Epoch 5, batch 17400, loss[loss=0.1436, simple_loss=0.2184, pruned_loss=0.03436, over 4821.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2221, pruned_loss=0.04042, over 972978.05 frames.], batch size: 27, lr: 3.83e-04 +2022-05-05 06:24:39,481 INFO [train.py:715] (7/8) Epoch 5, batch 17450, loss[loss=0.1191, simple_loss=0.1889, pruned_loss=0.02463, over 4913.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2225, pruned_loss=0.04068, over 972970.41 frames.], batch size: 17, lr: 3.83e-04 +2022-05-05 06:25:17,954 INFO [train.py:715] (7/8) Epoch 5, batch 17500, loss[loss=0.1651, simple_loss=0.2472, pruned_loss=0.04152, over 4792.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2225, pruned_loss=0.04059, over 973141.74 frames.], batch size: 21, lr: 3.83e-04 +2022-05-05 06:25:56,805 INFO [train.py:715] (7/8) Epoch 5, batch 17550, loss[loss=0.1646, simple_loss=0.2422, pruned_loss=0.04353, over 4890.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2226, pruned_loss=0.04041, over 973141.81 frames.], batch size: 22, lr: 3.83e-04 +2022-05-05 06:26:35,441 INFO [train.py:715] (7/8) Epoch 5, batch 17600, loss[loss=0.1312, simple_loss=0.215, pruned_loss=0.02369, over 4828.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2232, pruned_loss=0.04075, over 973113.42 frames.], batch size: 13, lr: 3.83e-04 +2022-05-05 06:27:14,154 INFO [train.py:715] (7/8) Epoch 5, batch 17650, loss[loss=0.1651, simple_loss=0.235, pruned_loss=0.04765, over 4935.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2231, pruned_loss=0.04094, over 972746.87 frames.], batch size: 39, lr: 3.83e-04 +2022-05-05 06:27:52,809 INFO [train.py:715] (7/8) Epoch 5, batch 17700, loss[loss=0.1494, simple_loss=0.2232, pruned_loss=0.03778, over 4768.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2226, pruned_loss=0.04093, over 972298.09 frames.], batch size: 19, lr: 3.83e-04 +2022-05-05 06:28:31,727 INFO [train.py:715] (7/8) Epoch 5, batch 17750, loss[loss=0.1619, simple_loss=0.2331, pruned_loss=0.04532, over 4881.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2228, pruned_loss=0.04108, over 972569.04 frames.], batch size: 32, lr: 3.83e-04 +2022-05-05 06:29:09,752 INFO [train.py:715] (7/8) Epoch 5, batch 17800, loss[loss=0.1397, simple_loss=0.2142, pruned_loss=0.03262, over 4938.00 frames.], tot_loss[loss=0.1523, simple_loss=0.223, pruned_loss=0.04081, over 972574.85 frames.], batch size: 29, lr: 3.83e-04 +2022-05-05 06:29:48,583 INFO [train.py:715] (7/8) Epoch 5, batch 17850, loss[loss=0.1379, simple_loss=0.2168, pruned_loss=0.02947, over 4890.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2225, pruned_loss=0.04046, over 972285.13 frames.], batch size: 22, lr: 3.83e-04 +2022-05-05 06:30:27,677 INFO [train.py:715] (7/8) Epoch 5, batch 17900, loss[loss=0.216, simple_loss=0.2932, pruned_loss=0.06935, over 4813.00 frames.], tot_loss[loss=0.152, simple_loss=0.2229, pruned_loss=0.04051, over 972446.27 frames.], batch size: 27, lr: 3.83e-04 +2022-05-05 06:31:06,329 INFO [train.py:715] (7/8) Epoch 5, batch 17950, loss[loss=0.1728, simple_loss=0.2404, pruned_loss=0.05254, over 4928.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2226, pruned_loss=0.04051, over 972983.16 frames.], batch size: 23, lr: 3.83e-04 +2022-05-05 06:31:47,055 INFO [train.py:715] (7/8) Epoch 5, batch 18000, loss[loss=0.1847, simple_loss=0.2401, pruned_loss=0.06463, over 4970.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2229, pruned_loss=0.04094, over 972645.41 frames.], batch size: 15, lr: 3.83e-04 +2022-05-05 06:31:47,056 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 06:31:59,754 INFO [train.py:742] (7/8) Epoch 5, validation: loss=0.1102, simple_loss=0.1955, pruned_loss=0.01245, over 914524.00 frames. +2022-05-05 06:32:38,355 INFO [train.py:715] (7/8) Epoch 5, batch 18050, loss[loss=0.1657, simple_loss=0.2328, pruned_loss=0.04933, over 4877.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2225, pruned_loss=0.04129, over 972622.69 frames.], batch size: 22, lr: 3.82e-04 +2022-05-05 06:33:17,596 INFO [train.py:715] (7/8) Epoch 5, batch 18100, loss[loss=0.1747, simple_loss=0.2397, pruned_loss=0.0548, over 4897.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2235, pruned_loss=0.0415, over 972716.37 frames.], batch size: 22, lr: 3.82e-04 +2022-05-05 06:33:56,333 INFO [train.py:715] (7/8) Epoch 5, batch 18150, loss[loss=0.1409, simple_loss=0.2116, pruned_loss=0.0351, over 4768.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2227, pruned_loss=0.04109, over 971905.65 frames.], batch size: 19, lr: 3.82e-04 +2022-05-05 06:34:34,859 INFO [train.py:715] (7/8) Epoch 5, batch 18200, loss[loss=0.1484, simple_loss=0.2166, pruned_loss=0.04011, over 4858.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2226, pruned_loss=0.041, over 971965.11 frames.], batch size: 20, lr: 3.82e-04 +2022-05-05 06:35:14,245 INFO [train.py:715] (7/8) Epoch 5, batch 18250, loss[loss=0.219, simple_loss=0.2754, pruned_loss=0.08133, over 4776.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2229, pruned_loss=0.04132, over 972445.30 frames.], batch size: 17, lr: 3.82e-04 +2022-05-05 06:35:53,138 INFO [train.py:715] (7/8) Epoch 5, batch 18300, loss[loss=0.1549, simple_loss=0.228, pruned_loss=0.04095, over 4944.00 frames.], tot_loss[loss=0.153, simple_loss=0.223, pruned_loss=0.04144, over 972822.24 frames.], batch size: 29, lr: 3.82e-04 +2022-05-05 06:36:31,709 INFO [train.py:715] (7/8) Epoch 5, batch 18350, loss[loss=0.1441, simple_loss=0.2166, pruned_loss=0.03579, over 4828.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2228, pruned_loss=0.04118, over 973179.51 frames.], batch size: 15, lr: 3.82e-04 +2022-05-05 06:37:09,998 INFO [train.py:715] (7/8) Epoch 5, batch 18400, loss[loss=0.1144, simple_loss=0.1783, pruned_loss=0.02526, over 4783.00 frames.], tot_loss[loss=0.1528, simple_loss=0.223, pruned_loss=0.04126, over 972543.16 frames.], batch size: 14, lr: 3.82e-04 +2022-05-05 06:37:49,158 INFO [train.py:715] (7/8) Epoch 5, batch 18450, loss[loss=0.1668, simple_loss=0.2435, pruned_loss=0.04504, over 4827.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2223, pruned_loss=0.04093, over 971825.40 frames.], batch size: 30, lr: 3.82e-04 +2022-05-05 06:38:27,817 INFO [train.py:715] (7/8) Epoch 5, batch 18500, loss[loss=0.1683, simple_loss=0.2336, pruned_loss=0.05151, over 4865.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2229, pruned_loss=0.04103, over 971447.89 frames.], batch size: 30, lr: 3.82e-04 +2022-05-05 06:39:06,125 INFO [train.py:715] (7/8) Epoch 5, batch 18550, loss[loss=0.1219, simple_loss=0.2033, pruned_loss=0.02023, over 4987.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2225, pruned_loss=0.04083, over 971693.66 frames.], batch size: 14, lr: 3.82e-04 +2022-05-05 06:39:45,171 INFO [train.py:715] (7/8) Epoch 5, batch 18600, loss[loss=0.1586, simple_loss=0.2293, pruned_loss=0.04393, over 4850.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2231, pruned_loss=0.04101, over 972369.35 frames.], batch size: 20, lr: 3.82e-04 +2022-05-05 06:40:23,779 INFO [train.py:715] (7/8) Epoch 5, batch 18650, loss[loss=0.1407, simple_loss=0.2034, pruned_loss=0.03893, over 4990.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2226, pruned_loss=0.04117, over 972120.22 frames.], batch size: 14, lr: 3.82e-04 +2022-05-05 06:41:01,937 INFO [train.py:715] (7/8) Epoch 5, batch 18700, loss[loss=0.1841, simple_loss=0.2579, pruned_loss=0.05514, over 4950.00 frames.], tot_loss[loss=0.1528, simple_loss=0.223, pruned_loss=0.04123, over 971864.47 frames.], batch size: 35, lr: 3.82e-04 +2022-05-05 06:41:40,674 INFO [train.py:715] (7/8) Epoch 5, batch 18750, loss[loss=0.1365, simple_loss=0.2232, pruned_loss=0.02492, over 4814.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2223, pruned_loss=0.04074, over 972125.63 frames.], batch size: 27, lr: 3.82e-04 +2022-05-05 06:42:19,955 INFO [train.py:715] (7/8) Epoch 5, batch 18800, loss[loss=0.1526, simple_loss=0.2307, pruned_loss=0.03729, over 4899.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2222, pruned_loss=0.04043, over 972374.09 frames.], batch size: 17, lr: 3.82e-04 +2022-05-05 06:42:59,660 INFO [train.py:715] (7/8) Epoch 5, batch 18850, loss[loss=0.1398, simple_loss=0.2048, pruned_loss=0.03739, over 4913.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2214, pruned_loss=0.04053, over 972208.43 frames.], batch size: 39, lr: 3.82e-04 +2022-05-05 06:43:38,447 INFO [train.py:715] (7/8) Epoch 5, batch 18900, loss[loss=0.1694, simple_loss=0.2268, pruned_loss=0.05604, over 4851.00 frames.], tot_loss[loss=0.1532, simple_loss=0.223, pruned_loss=0.04166, over 972447.34 frames.], batch size: 20, lr: 3.82e-04 +2022-05-05 06:44:16,644 INFO [train.py:715] (7/8) Epoch 5, batch 18950, loss[loss=0.1504, simple_loss=0.2274, pruned_loss=0.03667, over 4911.00 frames.], tot_loss[loss=0.1528, simple_loss=0.223, pruned_loss=0.04133, over 973382.27 frames.], batch size: 17, lr: 3.82e-04 +2022-05-05 06:44:56,115 INFO [train.py:715] (7/8) Epoch 5, batch 19000, loss[loss=0.1444, simple_loss=0.2127, pruned_loss=0.03809, over 4970.00 frames.], tot_loss[loss=0.153, simple_loss=0.2232, pruned_loss=0.04137, over 973118.89 frames.], batch size: 15, lr: 3.82e-04 +2022-05-05 06:45:34,090 INFO [train.py:715] (7/8) Epoch 5, batch 19050, loss[loss=0.1072, simple_loss=0.1898, pruned_loss=0.01225, over 4816.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2228, pruned_loss=0.04086, over 972140.37 frames.], batch size: 14, lr: 3.81e-04 +2022-05-05 06:46:13,038 INFO [train.py:715] (7/8) Epoch 5, batch 19100, loss[loss=0.1229, simple_loss=0.1913, pruned_loss=0.02731, over 4791.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2224, pruned_loss=0.04052, over 971899.36 frames.], batch size: 12, lr: 3.81e-04 +2022-05-05 06:46:52,735 INFO [train.py:715] (7/8) Epoch 5, batch 19150, loss[loss=0.1517, simple_loss=0.2226, pruned_loss=0.0404, over 4987.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2222, pruned_loss=0.04045, over 972102.57 frames.], batch size: 25, lr: 3.81e-04 +2022-05-05 06:47:31,316 INFO [train.py:715] (7/8) Epoch 5, batch 19200, loss[loss=0.1796, simple_loss=0.2369, pruned_loss=0.06121, over 4750.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2231, pruned_loss=0.04096, over 972018.51 frames.], batch size: 16, lr: 3.81e-04 +2022-05-05 06:48:10,846 INFO [train.py:715] (7/8) Epoch 5, batch 19250, loss[loss=0.1211, simple_loss=0.1947, pruned_loss=0.02378, over 4909.00 frames.], tot_loss[loss=0.1519, simple_loss=0.223, pruned_loss=0.04037, over 971408.10 frames.], batch size: 18, lr: 3.81e-04 +2022-05-05 06:48:48,905 INFO [train.py:715] (7/8) Epoch 5, batch 19300, loss[loss=0.2169, simple_loss=0.2953, pruned_loss=0.06925, over 4839.00 frames.], tot_loss[loss=0.152, simple_loss=0.2229, pruned_loss=0.04057, over 971448.06 frames.], batch size: 15, lr: 3.81e-04 +2022-05-05 06:49:27,999 INFO [train.py:715] (7/8) Epoch 5, batch 19350, loss[loss=0.1425, simple_loss=0.2088, pruned_loss=0.03808, over 4992.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2227, pruned_loss=0.04056, over 971720.49 frames.], batch size: 16, lr: 3.81e-04 +2022-05-05 06:50:06,759 INFO [train.py:715] (7/8) Epoch 5, batch 19400, loss[loss=0.1708, simple_loss=0.2408, pruned_loss=0.05041, over 4933.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2225, pruned_loss=0.04091, over 971344.71 frames.], batch size: 29, lr: 3.81e-04 +2022-05-05 06:50:45,414 INFO [train.py:715] (7/8) Epoch 5, batch 19450, loss[loss=0.2039, simple_loss=0.2561, pruned_loss=0.07583, over 4812.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2228, pruned_loss=0.04108, over 971434.34 frames.], batch size: 21, lr: 3.81e-04 +2022-05-05 06:51:25,051 INFO [train.py:715] (7/8) Epoch 5, batch 19500, loss[loss=0.1236, simple_loss=0.197, pruned_loss=0.02512, over 4782.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2226, pruned_loss=0.04094, over 971236.07 frames.], batch size: 18, lr: 3.81e-04 +2022-05-05 06:52:03,848 INFO [train.py:715] (7/8) Epoch 5, batch 19550, loss[loss=0.1433, simple_loss=0.22, pruned_loss=0.03331, over 4819.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2226, pruned_loss=0.04064, over 971892.66 frames.], batch size: 27, lr: 3.81e-04 +2022-05-05 06:52:42,736 INFO [train.py:715] (7/8) Epoch 5, batch 19600, loss[loss=0.131, simple_loss=0.2072, pruned_loss=0.02745, over 4814.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2242, pruned_loss=0.04154, over 972232.96 frames.], batch size: 13, lr: 3.81e-04 +2022-05-05 06:53:21,191 INFO [train.py:715] (7/8) Epoch 5, batch 19650, loss[loss=0.1451, simple_loss=0.223, pruned_loss=0.0336, over 4885.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2243, pruned_loss=0.04158, over 972759.04 frames.], batch size: 16, lr: 3.81e-04 +2022-05-05 06:54:00,674 INFO [train.py:715] (7/8) Epoch 5, batch 19700, loss[loss=0.162, simple_loss=0.2411, pruned_loss=0.04143, over 4931.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2238, pruned_loss=0.04153, over 973594.78 frames.], batch size: 18, lr: 3.81e-04 +2022-05-05 06:54:39,904 INFO [train.py:715] (7/8) Epoch 5, batch 19750, loss[loss=0.145, simple_loss=0.222, pruned_loss=0.03404, over 4922.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2236, pruned_loss=0.04137, over 973614.48 frames.], batch size: 18, lr: 3.81e-04 +2022-05-05 06:55:17,843 INFO [train.py:715] (7/8) Epoch 5, batch 19800, loss[loss=0.1509, simple_loss=0.2137, pruned_loss=0.04403, over 4700.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2236, pruned_loss=0.04125, over 972805.84 frames.], batch size: 15, lr: 3.81e-04 +2022-05-05 06:55:56,846 INFO [train.py:715] (7/8) Epoch 5, batch 19850, loss[loss=0.1508, simple_loss=0.2181, pruned_loss=0.04177, over 4844.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2227, pruned_loss=0.04106, over 972198.65 frames.], batch size: 30, lr: 3.81e-04 +2022-05-05 06:56:35,743 INFO [train.py:715] (7/8) Epoch 5, batch 19900, loss[loss=0.1927, simple_loss=0.2475, pruned_loss=0.06893, over 4824.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2225, pruned_loss=0.04095, over 972361.60 frames.], batch size: 15, lr: 3.81e-04 +2022-05-05 06:57:14,680 INFO [train.py:715] (7/8) Epoch 5, batch 19950, loss[loss=0.1157, simple_loss=0.191, pruned_loss=0.02016, over 4963.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2226, pruned_loss=0.04125, over 971121.29 frames.], batch size: 14, lr: 3.81e-04 +2022-05-05 06:57:53,093 INFO [train.py:715] (7/8) Epoch 5, batch 20000, loss[loss=0.1425, simple_loss=0.214, pruned_loss=0.03551, over 4766.00 frames.], tot_loss[loss=0.1522, simple_loss=0.222, pruned_loss=0.04125, over 971518.85 frames.], batch size: 16, lr: 3.81e-04 +2022-05-05 06:58:32,599 INFO [train.py:715] (7/8) Epoch 5, batch 20050, loss[loss=0.1375, simple_loss=0.2109, pruned_loss=0.03208, over 4982.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2214, pruned_loss=0.0408, over 971171.92 frames.], batch size: 28, lr: 3.81e-04 +2022-05-05 06:59:12,130 INFO [train.py:715] (7/8) Epoch 5, batch 20100, loss[loss=0.2018, simple_loss=0.2774, pruned_loss=0.06307, over 4758.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2218, pruned_loss=0.04069, over 971214.17 frames.], batch size: 16, lr: 3.80e-04 +2022-05-05 06:59:50,438 INFO [train.py:715] (7/8) Epoch 5, batch 20150, loss[loss=0.1462, simple_loss=0.2166, pruned_loss=0.03793, over 4849.00 frames.], tot_loss[loss=0.151, simple_loss=0.2214, pruned_loss=0.0403, over 971077.15 frames.], batch size: 30, lr: 3.80e-04 +2022-05-05 07:00:30,259 INFO [train.py:715] (7/8) Epoch 5, batch 20200, loss[loss=0.1544, simple_loss=0.2127, pruned_loss=0.04805, over 4944.00 frames.], tot_loss[loss=0.1516, simple_loss=0.222, pruned_loss=0.04062, over 971284.63 frames.], batch size: 29, lr: 3.80e-04 +2022-05-05 07:01:09,276 INFO [train.py:715] (7/8) Epoch 5, batch 20250, loss[loss=0.1866, simple_loss=0.25, pruned_loss=0.06159, over 4939.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2225, pruned_loss=0.04066, over 972162.87 frames.], batch size: 39, lr: 3.80e-04 +2022-05-05 07:01:47,794 INFO [train.py:715] (7/8) Epoch 5, batch 20300, loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02967, over 4990.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2222, pruned_loss=0.04009, over 972069.42 frames.], batch size: 25, lr: 3.80e-04 +2022-05-05 07:02:25,748 INFO [train.py:715] (7/8) Epoch 5, batch 20350, loss[loss=0.1599, simple_loss=0.2191, pruned_loss=0.05032, over 4963.00 frames.], tot_loss[loss=0.152, simple_loss=0.2228, pruned_loss=0.04062, over 972810.78 frames.], batch size: 35, lr: 3.80e-04 +2022-05-05 07:03:04,304 INFO [train.py:715] (7/8) Epoch 5, batch 20400, loss[loss=0.1262, simple_loss=0.2011, pruned_loss=0.02563, over 4646.00 frames.], tot_loss[loss=0.1524, simple_loss=0.223, pruned_loss=0.04086, over 972762.16 frames.], batch size: 13, lr: 3.80e-04 +2022-05-05 07:03:43,173 INFO [train.py:715] (7/8) Epoch 5, batch 20450, loss[loss=0.1349, simple_loss=0.2077, pruned_loss=0.03107, over 4885.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2229, pruned_loss=0.04086, over 972856.22 frames.], batch size: 32, lr: 3.80e-04 +2022-05-05 07:04:21,347 INFO [train.py:715] (7/8) Epoch 5, batch 20500, loss[loss=0.1391, simple_loss=0.2078, pruned_loss=0.03517, over 4767.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2228, pruned_loss=0.04068, over 972626.93 frames.], batch size: 19, lr: 3.80e-04 +2022-05-05 07:05:00,717 INFO [train.py:715] (7/8) Epoch 5, batch 20550, loss[loss=0.1748, simple_loss=0.2364, pruned_loss=0.05664, over 4911.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2234, pruned_loss=0.04108, over 972923.63 frames.], batch size: 17, lr: 3.80e-04 +2022-05-05 07:05:39,983 INFO [train.py:715] (7/8) Epoch 5, batch 20600, loss[loss=0.1627, simple_loss=0.225, pruned_loss=0.05014, over 4964.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2241, pruned_loss=0.0416, over 972795.76 frames.], batch size: 14, lr: 3.80e-04 +2022-05-05 07:06:18,977 INFO [train.py:715] (7/8) Epoch 5, batch 20650, loss[loss=0.1595, simple_loss=0.2207, pruned_loss=0.04917, over 4792.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2228, pruned_loss=0.04098, over 972031.61 frames.], batch size: 14, lr: 3.80e-04 +2022-05-05 07:06:58,194 INFO [train.py:715] (7/8) Epoch 5, batch 20700, loss[loss=0.1398, simple_loss=0.2146, pruned_loss=0.03255, over 4773.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2217, pruned_loss=0.04007, over 973137.69 frames.], batch size: 12, lr: 3.80e-04 +2022-05-05 07:07:36,956 INFO [train.py:715] (7/8) Epoch 5, batch 20750, loss[loss=0.1741, simple_loss=0.2373, pruned_loss=0.05547, over 4869.00 frames.], tot_loss[loss=0.151, simple_loss=0.2217, pruned_loss=0.04014, over 973093.07 frames.], batch size: 30, lr: 3.80e-04 +2022-05-05 07:08:16,383 INFO [train.py:715] (7/8) Epoch 5, batch 20800, loss[loss=0.1623, simple_loss=0.2358, pruned_loss=0.04443, over 4893.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2216, pruned_loss=0.0398, over 972873.24 frames.], batch size: 17, lr: 3.80e-04 +2022-05-05 07:08:55,023 INFO [train.py:715] (7/8) Epoch 5, batch 20850, loss[loss=0.1237, simple_loss=0.1982, pruned_loss=0.02463, over 4964.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2215, pruned_loss=0.03981, over 972256.28 frames.], batch size: 28, lr: 3.80e-04 +2022-05-05 07:09:34,327 INFO [train.py:715] (7/8) Epoch 5, batch 20900, loss[loss=0.1529, simple_loss=0.2323, pruned_loss=0.03678, over 4883.00 frames.], tot_loss[loss=0.1517, simple_loss=0.222, pruned_loss=0.04065, over 972079.21 frames.], batch size: 19, lr: 3.80e-04 +2022-05-05 07:10:12,902 INFO [train.py:715] (7/8) Epoch 5, batch 20950, loss[loss=0.1406, simple_loss=0.2213, pruned_loss=0.02998, over 4964.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2225, pruned_loss=0.04046, over 971868.34 frames.], batch size: 24, lr: 3.80e-04 +2022-05-05 07:10:51,486 INFO [train.py:715] (7/8) Epoch 5, batch 21000, loss[loss=0.1246, simple_loss=0.2012, pruned_loss=0.02397, over 4956.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2226, pruned_loss=0.04063, over 972166.01 frames.], batch size: 14, lr: 3.80e-04 +2022-05-05 07:10:51,487 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 07:11:01,470 INFO [train.py:742] (7/8) Epoch 5, validation: loss=0.1101, simple_loss=0.1954, pruned_loss=0.01242, over 914524.00 frames. +2022-05-05 07:11:40,512 INFO [train.py:715] (7/8) Epoch 5, batch 21050, loss[loss=0.1564, simple_loss=0.2328, pruned_loss=0.04, over 4824.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2235, pruned_loss=0.04115, over 972373.80 frames.], batch size: 25, lr: 3.80e-04 +2022-05-05 07:12:19,699 INFO [train.py:715] (7/8) Epoch 5, batch 21100, loss[loss=0.1424, simple_loss=0.2139, pruned_loss=0.03543, over 4871.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2233, pruned_loss=0.04127, over 972631.82 frames.], batch size: 13, lr: 3.79e-04 +2022-05-05 07:12:58,342 INFO [train.py:715] (7/8) Epoch 5, batch 21150, loss[loss=0.1872, simple_loss=0.2505, pruned_loss=0.06199, over 4770.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2225, pruned_loss=0.04068, over 973248.54 frames.], batch size: 14, lr: 3.79e-04 +2022-05-05 07:13:37,165 INFO [train.py:715] (7/8) Epoch 5, batch 21200, loss[loss=0.1566, simple_loss=0.2261, pruned_loss=0.0435, over 4883.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2217, pruned_loss=0.04027, over 972933.31 frames.], batch size: 16, lr: 3.79e-04 +2022-05-05 07:14:15,841 INFO [train.py:715] (7/8) Epoch 5, batch 21250, loss[loss=0.1194, simple_loss=0.1895, pruned_loss=0.02463, over 4936.00 frames.], tot_loss[loss=0.1511, simple_loss=0.222, pruned_loss=0.04016, over 973580.69 frames.], batch size: 23, lr: 3.79e-04 +2022-05-05 07:14:54,661 INFO [train.py:715] (7/8) Epoch 5, batch 21300, loss[loss=0.1695, simple_loss=0.2259, pruned_loss=0.05654, over 4769.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2221, pruned_loss=0.04061, over 972761.41 frames.], batch size: 18, lr: 3.79e-04 +2022-05-05 07:15:33,334 INFO [train.py:715] (7/8) Epoch 5, batch 21350, loss[loss=0.1369, simple_loss=0.2084, pruned_loss=0.03274, over 4885.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2224, pruned_loss=0.04054, over 972999.95 frames.], batch size: 22, lr: 3.79e-04 +2022-05-05 07:16:11,912 INFO [train.py:715] (7/8) Epoch 5, batch 21400, loss[loss=0.1586, simple_loss=0.2282, pruned_loss=0.04449, over 4694.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2213, pruned_loss=0.04003, over 972373.59 frames.], batch size: 15, lr: 3.79e-04 +2022-05-05 07:16:50,973 INFO [train.py:715] (7/8) Epoch 5, batch 21450, loss[loss=0.1593, simple_loss=0.2191, pruned_loss=0.0498, over 4988.00 frames.], tot_loss[loss=0.1515, simple_loss=0.222, pruned_loss=0.04045, over 972520.52 frames.], batch size: 25, lr: 3.79e-04 +2022-05-05 07:17:29,099 INFO [train.py:715] (7/8) Epoch 5, batch 21500, loss[loss=0.1403, simple_loss=0.2079, pruned_loss=0.03634, over 4804.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2219, pruned_loss=0.04053, over 971293.83 frames.], batch size: 21, lr: 3.79e-04 +2022-05-05 07:18:08,222 INFO [train.py:715] (7/8) Epoch 5, batch 21550, loss[loss=0.115, simple_loss=0.1796, pruned_loss=0.02522, over 4867.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2216, pruned_loss=0.0401, over 971216.14 frames.], batch size: 20, lr: 3.79e-04 +2022-05-05 07:18:46,744 INFO [train.py:715] (7/8) Epoch 5, batch 21600, loss[loss=0.1567, simple_loss=0.2244, pruned_loss=0.04447, over 4916.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2215, pruned_loss=0.04005, over 971705.92 frames.], batch size: 17, lr: 3.79e-04 +2022-05-05 07:19:25,823 INFO [train.py:715] (7/8) Epoch 5, batch 21650, loss[loss=0.1191, simple_loss=0.1882, pruned_loss=0.02502, over 4909.00 frames.], tot_loss[loss=0.1503, simple_loss=0.221, pruned_loss=0.03981, over 971541.98 frames.], batch size: 18, lr: 3.79e-04 +2022-05-05 07:20:04,069 INFO [train.py:715] (7/8) Epoch 5, batch 21700, loss[loss=0.1357, simple_loss=0.2087, pruned_loss=0.03136, over 4930.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2214, pruned_loss=0.04024, over 971120.15 frames.], batch size: 29, lr: 3.79e-04 +2022-05-05 07:20:42,463 INFO [train.py:715] (7/8) Epoch 5, batch 21750, loss[loss=0.1553, simple_loss=0.2202, pruned_loss=0.04521, over 4951.00 frames.], tot_loss[loss=0.151, simple_loss=0.2216, pruned_loss=0.04019, over 971990.73 frames.], batch size: 35, lr: 3.79e-04 +2022-05-05 07:21:20,816 INFO [train.py:715] (7/8) Epoch 5, batch 21800, loss[loss=0.1317, simple_loss=0.2055, pruned_loss=0.02895, over 4974.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2222, pruned_loss=0.04047, over 972226.78 frames.], batch size: 24, lr: 3.79e-04 +2022-05-05 07:22:00,029 INFO [train.py:715] (7/8) Epoch 5, batch 21850, loss[loss=0.1271, simple_loss=0.1959, pruned_loss=0.0291, over 4871.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2226, pruned_loss=0.0406, over 973108.40 frames.], batch size: 32, lr: 3.79e-04 +2022-05-05 07:22:38,259 INFO [train.py:715] (7/8) Epoch 5, batch 21900, loss[loss=0.1634, simple_loss=0.231, pruned_loss=0.04788, over 4959.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2219, pruned_loss=0.04078, over 973034.27 frames.], batch size: 24, lr: 3.79e-04 +2022-05-05 07:23:16,810 INFO [train.py:715] (7/8) Epoch 5, batch 21950, loss[loss=0.1877, simple_loss=0.245, pruned_loss=0.0652, over 4906.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2221, pruned_loss=0.04107, over 972520.40 frames.], batch size: 17, lr: 3.79e-04 +2022-05-05 07:23:55,216 INFO [train.py:715] (7/8) Epoch 5, batch 22000, loss[loss=0.1528, simple_loss=0.2156, pruned_loss=0.04503, over 4692.00 frames.], tot_loss[loss=0.1521, simple_loss=0.222, pruned_loss=0.0411, over 972758.47 frames.], batch size: 15, lr: 3.79e-04 +2022-05-05 07:24:34,724 INFO [train.py:715] (7/8) Epoch 5, batch 22050, loss[loss=0.1369, simple_loss=0.2153, pruned_loss=0.02929, over 4948.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2227, pruned_loss=0.04137, over 971864.27 frames.], batch size: 21, lr: 3.79e-04 +2022-05-05 07:25:13,186 INFO [train.py:715] (7/8) Epoch 5, batch 22100, loss[loss=0.1593, simple_loss=0.2415, pruned_loss=0.03856, over 4902.00 frames.], tot_loss[loss=0.1528, simple_loss=0.223, pruned_loss=0.04127, over 972121.12 frames.], batch size: 19, lr: 3.79e-04 +2022-05-05 07:25:52,415 INFO [train.py:715] (7/8) Epoch 5, batch 22150, loss[loss=0.1758, simple_loss=0.2512, pruned_loss=0.05018, over 4878.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2229, pruned_loss=0.04119, over 972323.56 frames.], batch size: 16, lr: 3.78e-04 +2022-05-05 07:26:31,444 INFO [train.py:715] (7/8) Epoch 5, batch 22200, loss[loss=0.1336, simple_loss=0.207, pruned_loss=0.03014, over 4989.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2225, pruned_loss=0.0409, over 972213.14 frames.], batch size: 25, lr: 3.78e-04 +2022-05-05 07:27:11,165 INFO [train.py:715] (7/8) Epoch 5, batch 22250, loss[loss=0.1687, simple_loss=0.2273, pruned_loss=0.05504, over 4742.00 frames.], tot_loss[loss=0.1523, simple_loss=0.223, pruned_loss=0.04078, over 972500.17 frames.], batch size: 12, lr: 3.78e-04 +2022-05-05 07:27:50,340 INFO [train.py:715] (7/8) Epoch 5, batch 22300, loss[loss=0.1471, simple_loss=0.2138, pruned_loss=0.04023, over 4970.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2223, pruned_loss=0.04046, over 972097.30 frames.], batch size: 15, lr: 3.78e-04 +2022-05-05 07:28:28,461 INFO [train.py:715] (7/8) Epoch 5, batch 22350, loss[loss=0.148, simple_loss=0.2131, pruned_loss=0.04151, over 4850.00 frames.], tot_loss[loss=0.1511, simple_loss=0.222, pruned_loss=0.04009, over 971427.73 frames.], batch size: 20, lr: 3.78e-04 +2022-05-05 07:29:06,834 INFO [train.py:715] (7/8) Epoch 5, batch 22400, loss[loss=0.1636, simple_loss=0.2425, pruned_loss=0.04233, over 4874.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2216, pruned_loss=0.04003, over 971694.75 frames.], batch size: 22, lr: 3.78e-04 +2022-05-05 07:29:45,743 INFO [train.py:715] (7/8) Epoch 5, batch 22450, loss[loss=0.135, simple_loss=0.2073, pruned_loss=0.03135, over 4861.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2221, pruned_loss=0.04033, over 972773.13 frames.], batch size: 32, lr: 3.78e-04 +2022-05-05 07:30:25,209 INFO [train.py:715] (7/8) Epoch 5, batch 22500, loss[loss=0.1406, simple_loss=0.2189, pruned_loss=0.0311, over 4945.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2228, pruned_loss=0.0408, over 972794.01 frames.], batch size: 24, lr: 3.78e-04 +2022-05-05 07:31:03,488 INFO [train.py:715] (7/8) Epoch 5, batch 22550, loss[loss=0.1338, simple_loss=0.2092, pruned_loss=0.02926, over 4871.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2224, pruned_loss=0.04053, over 972750.19 frames.], batch size: 22, lr: 3.78e-04 +2022-05-05 07:31:42,557 INFO [train.py:715] (7/8) Epoch 5, batch 22600, loss[loss=0.1384, simple_loss=0.2211, pruned_loss=0.02785, over 4812.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2223, pruned_loss=0.04031, over 972184.05 frames.], batch size: 27, lr: 3.78e-04 +2022-05-05 07:32:21,688 INFO [train.py:715] (7/8) Epoch 5, batch 22650, loss[loss=0.1502, simple_loss=0.2153, pruned_loss=0.04258, over 4872.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2226, pruned_loss=0.04046, over 972089.06 frames.], batch size: 32, lr: 3.78e-04 +2022-05-05 07:33:00,844 INFO [train.py:715] (7/8) Epoch 5, batch 22700, loss[loss=0.1507, simple_loss=0.2207, pruned_loss=0.04037, over 4922.00 frames.], tot_loss[loss=0.152, simple_loss=0.2229, pruned_loss=0.04055, over 972361.27 frames.], batch size: 18, lr: 3.78e-04 +2022-05-05 07:33:39,167 INFO [train.py:715] (7/8) Epoch 5, batch 22750, loss[loss=0.1528, simple_loss=0.2344, pruned_loss=0.03563, over 4911.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2228, pruned_loss=0.04057, over 972429.68 frames.], batch size: 29, lr: 3.78e-04 +2022-05-05 07:34:18,367 INFO [train.py:715] (7/8) Epoch 5, batch 22800, loss[loss=0.1329, simple_loss=0.2088, pruned_loss=0.02856, over 4792.00 frames.], tot_loss[loss=0.1519, simple_loss=0.223, pruned_loss=0.04039, over 972232.01 frames.], batch size: 18, lr: 3.78e-04 +2022-05-05 07:34:57,942 INFO [train.py:715] (7/8) Epoch 5, batch 22850, loss[loss=0.1792, simple_loss=0.238, pruned_loss=0.06018, over 4842.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2242, pruned_loss=0.04157, over 972454.75 frames.], batch size: 30, lr: 3.78e-04 +2022-05-05 07:35:36,332 INFO [train.py:715] (7/8) Epoch 5, batch 22900, loss[loss=0.179, simple_loss=0.2471, pruned_loss=0.05544, over 4767.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2234, pruned_loss=0.04121, over 972648.29 frames.], batch size: 16, lr: 3.78e-04 +2022-05-05 07:36:15,067 INFO [train.py:715] (7/8) Epoch 5, batch 22950, loss[loss=0.1365, simple_loss=0.2189, pruned_loss=0.02705, over 4749.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2242, pruned_loss=0.04152, over 972872.12 frames.], batch size: 16, lr: 3.78e-04 +2022-05-05 07:36:54,408 INFO [train.py:715] (7/8) Epoch 5, batch 23000, loss[loss=0.1314, simple_loss=0.2045, pruned_loss=0.02911, over 4959.00 frames.], tot_loss[loss=0.153, simple_loss=0.2235, pruned_loss=0.04123, over 972357.08 frames.], batch size: 15, lr: 3.78e-04 +2022-05-05 07:37:33,573 INFO [train.py:715] (7/8) Epoch 5, batch 23050, loss[loss=0.1395, simple_loss=0.2174, pruned_loss=0.03076, over 4927.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2243, pruned_loss=0.04169, over 971711.19 frames.], batch size: 23, lr: 3.78e-04 +2022-05-05 07:38:12,016 INFO [train.py:715] (7/8) Epoch 5, batch 23100, loss[loss=0.1578, simple_loss=0.2249, pruned_loss=0.04535, over 4951.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2245, pruned_loss=0.04135, over 971380.30 frames.], batch size: 39, lr: 3.78e-04 +2022-05-05 07:38:51,177 INFO [train.py:715] (7/8) Epoch 5, batch 23150, loss[loss=0.1436, simple_loss=0.2219, pruned_loss=0.0327, over 4954.00 frames.], tot_loss[loss=0.153, simple_loss=0.2235, pruned_loss=0.04125, over 971620.60 frames.], batch size: 21, lr: 3.78e-04 +2022-05-05 07:39:30,786 INFO [train.py:715] (7/8) Epoch 5, batch 23200, loss[loss=0.1667, simple_loss=0.2377, pruned_loss=0.04786, over 4807.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2235, pruned_loss=0.04148, over 971647.05 frames.], batch size: 14, lr: 3.77e-04 +2022-05-05 07:40:09,160 INFO [train.py:715] (7/8) Epoch 5, batch 23250, loss[loss=0.1617, simple_loss=0.2332, pruned_loss=0.04513, over 4713.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2235, pruned_loss=0.04144, over 972039.04 frames.], batch size: 15, lr: 3.77e-04 +2022-05-05 07:40:47,781 INFO [train.py:715] (7/8) Epoch 5, batch 23300, loss[loss=0.14, simple_loss=0.2122, pruned_loss=0.03385, over 4975.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2222, pruned_loss=0.04069, over 971864.59 frames.], batch size: 35, lr: 3.77e-04 +2022-05-05 07:41:27,166 INFO [train.py:715] (7/8) Epoch 5, batch 23350, loss[loss=0.1478, simple_loss=0.2158, pruned_loss=0.03988, over 4906.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2222, pruned_loss=0.04055, over 972638.39 frames.], batch size: 17, lr: 3.77e-04 +2022-05-05 07:42:05,801 INFO [train.py:715] (7/8) Epoch 5, batch 23400, loss[loss=0.1449, simple_loss=0.2171, pruned_loss=0.03635, over 4762.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2213, pruned_loss=0.0402, over 972200.33 frames.], batch size: 16, lr: 3.77e-04 +2022-05-05 07:42:44,251 INFO [train.py:715] (7/8) Epoch 5, batch 23450, loss[loss=0.148, simple_loss=0.2054, pruned_loss=0.04536, over 4852.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2213, pruned_loss=0.04032, over 972457.86 frames.], batch size: 34, lr: 3.77e-04 +2022-05-05 07:43:22,953 INFO [train.py:715] (7/8) Epoch 5, batch 23500, loss[loss=0.1662, simple_loss=0.246, pruned_loss=0.04317, over 4655.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2217, pruned_loss=0.04041, over 971460.56 frames.], batch size: 13, lr: 3.77e-04 +2022-05-05 07:44:02,010 INFO [train.py:715] (7/8) Epoch 5, batch 23550, loss[loss=0.1512, simple_loss=0.2325, pruned_loss=0.03493, over 4978.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2214, pruned_loss=0.0405, over 971918.69 frames.], batch size: 15, lr: 3.77e-04 +2022-05-05 07:44:40,889 INFO [train.py:715] (7/8) Epoch 5, batch 23600, loss[loss=0.1903, simple_loss=0.2586, pruned_loss=0.06095, over 4892.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2232, pruned_loss=0.04135, over 972256.11 frames.], batch size: 22, lr: 3.77e-04 +2022-05-05 07:45:19,394 INFO [train.py:715] (7/8) Epoch 5, batch 23650, loss[loss=0.2093, simple_loss=0.2702, pruned_loss=0.07416, over 4697.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2228, pruned_loss=0.04119, over 971703.43 frames.], batch size: 15, lr: 3.77e-04 +2022-05-05 07:45:58,898 INFO [train.py:715] (7/8) Epoch 5, batch 23700, loss[loss=0.1591, simple_loss=0.2302, pruned_loss=0.04399, over 4956.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2217, pruned_loss=0.04057, over 972257.56 frames.], batch size: 24, lr: 3.77e-04 +2022-05-05 07:46:37,473 INFO [train.py:715] (7/8) Epoch 5, batch 23750, loss[loss=0.1898, simple_loss=0.2621, pruned_loss=0.05876, over 4987.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2221, pruned_loss=0.04042, over 972785.99 frames.], batch size: 31, lr: 3.77e-04 +2022-05-05 07:47:16,504 INFO [train.py:715] (7/8) Epoch 5, batch 23800, loss[loss=0.1598, simple_loss=0.2313, pruned_loss=0.04412, over 4830.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2215, pruned_loss=0.03988, over 972238.54 frames.], batch size: 26, lr: 3.77e-04 +2022-05-05 07:47:55,208 INFO [train.py:715] (7/8) Epoch 5, batch 23850, loss[loss=0.1248, simple_loss=0.1965, pruned_loss=0.02658, over 4801.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2212, pruned_loss=0.03953, over 972239.97 frames.], batch size: 14, lr: 3.77e-04 +2022-05-05 07:48:34,417 INFO [train.py:715] (7/8) Epoch 5, batch 23900, loss[loss=0.1506, simple_loss=0.2169, pruned_loss=0.04213, over 4862.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2217, pruned_loss=0.03997, over 971901.82 frames.], batch size: 20, lr: 3.77e-04 +2022-05-05 07:49:13,371 INFO [train.py:715] (7/8) Epoch 5, batch 23950, loss[loss=0.1336, simple_loss=0.1884, pruned_loss=0.03939, over 4785.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2209, pruned_loss=0.03939, over 972231.21 frames.], batch size: 17, lr: 3.77e-04 +2022-05-05 07:49:51,794 INFO [train.py:715] (7/8) Epoch 5, batch 24000, loss[loss=0.1534, simple_loss=0.2219, pruned_loss=0.04251, over 4972.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2218, pruned_loss=0.0398, over 972761.03 frames.], batch size: 35, lr: 3.77e-04 +2022-05-05 07:49:51,795 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 07:50:02,184 INFO [train.py:742] (7/8) Epoch 5, validation: loss=0.11, simple_loss=0.1955, pruned_loss=0.0123, over 914524.00 frames. +2022-05-05 07:50:40,726 INFO [train.py:715] (7/8) Epoch 5, batch 24050, loss[loss=0.2021, simple_loss=0.2639, pruned_loss=0.07016, over 4984.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2216, pruned_loss=0.0403, over 972762.55 frames.], batch size: 25, lr: 3.77e-04 +2022-05-05 07:51:20,434 INFO [train.py:715] (7/8) Epoch 5, batch 24100, loss[loss=0.1337, simple_loss=0.2168, pruned_loss=0.02527, over 4974.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2211, pruned_loss=0.04012, over 973276.43 frames.], batch size: 28, lr: 3.77e-04 +2022-05-05 07:51:59,181 INFO [train.py:715] (7/8) Epoch 5, batch 24150, loss[loss=0.1418, simple_loss=0.2147, pruned_loss=0.03441, over 4755.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2207, pruned_loss=0.03976, over 972192.01 frames.], batch size: 19, lr: 3.77e-04 +2022-05-05 07:52:37,497 INFO [train.py:715] (7/8) Epoch 5, batch 24200, loss[loss=0.1447, simple_loss=0.2024, pruned_loss=0.04346, over 4934.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2204, pruned_loss=0.03974, over 972168.75 frames.], batch size: 23, lr: 3.77e-04 +2022-05-05 07:53:16,810 INFO [train.py:715] (7/8) Epoch 5, batch 24250, loss[loss=0.2031, simple_loss=0.2784, pruned_loss=0.06393, over 4773.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2207, pruned_loss=0.03987, over 972979.95 frames.], batch size: 17, lr: 3.76e-04 +2022-05-05 07:53:55,922 INFO [train.py:715] (7/8) Epoch 5, batch 24300, loss[loss=0.1679, simple_loss=0.2264, pruned_loss=0.05465, over 4915.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2209, pruned_loss=0.03991, over 973083.65 frames.], batch size: 19, lr: 3.76e-04 +2022-05-05 07:54:34,804 INFO [train.py:715] (7/8) Epoch 5, batch 24350, loss[loss=0.152, simple_loss=0.2211, pruned_loss=0.04146, over 4790.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2213, pruned_loss=0.03987, over 972315.68 frames.], batch size: 21, lr: 3.76e-04 +2022-05-05 07:55:13,056 INFO [train.py:715] (7/8) Epoch 5, batch 24400, loss[loss=0.1571, simple_loss=0.2296, pruned_loss=0.04226, over 4878.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2229, pruned_loss=0.04069, over 972141.26 frames.], batch size: 16, lr: 3.76e-04 +2022-05-05 07:55:52,740 INFO [train.py:715] (7/8) Epoch 5, batch 24450, loss[loss=0.1273, simple_loss=0.1988, pruned_loss=0.0279, over 4802.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2219, pruned_loss=0.04047, over 971033.27 frames.], batch size: 12, lr: 3.76e-04 +2022-05-05 07:56:30,706 INFO [train.py:715] (7/8) Epoch 5, batch 24500, loss[loss=0.1241, simple_loss=0.1975, pruned_loss=0.02537, over 4884.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2212, pruned_loss=0.03986, over 970997.51 frames.], batch size: 22, lr: 3.76e-04 +2022-05-05 07:57:09,364 INFO [train.py:715] (7/8) Epoch 5, batch 24550, loss[loss=0.161, simple_loss=0.2281, pruned_loss=0.04693, over 4789.00 frames.], tot_loss[loss=0.151, simple_loss=0.2217, pruned_loss=0.04021, over 971057.88 frames.], batch size: 17, lr: 3.76e-04 +2022-05-05 07:57:48,727 INFO [train.py:715] (7/8) Epoch 5, batch 24600, loss[loss=0.1445, simple_loss=0.2143, pruned_loss=0.03739, over 4973.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2221, pruned_loss=0.04028, over 970993.18 frames.], batch size: 40, lr: 3.76e-04 +2022-05-05 07:58:27,792 INFO [train.py:715] (7/8) Epoch 5, batch 24650, loss[loss=0.1259, simple_loss=0.2025, pruned_loss=0.02464, over 4936.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2213, pruned_loss=0.03986, over 971844.07 frames.], batch size: 21, lr: 3.76e-04 +2022-05-05 07:59:06,981 INFO [train.py:715] (7/8) Epoch 5, batch 24700, loss[loss=0.1583, simple_loss=0.238, pruned_loss=0.03928, over 4849.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2218, pruned_loss=0.04003, over 972570.72 frames.], batch size: 20, lr: 3.76e-04 +2022-05-05 07:59:45,115 INFO [train.py:715] (7/8) Epoch 5, batch 24750, loss[loss=0.1252, simple_loss=0.1926, pruned_loss=0.02887, over 4649.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2214, pruned_loss=0.0401, over 971887.45 frames.], batch size: 13, lr: 3.76e-04 +2022-05-05 08:00:24,684 INFO [train.py:715] (7/8) Epoch 5, batch 24800, loss[loss=0.109, simple_loss=0.1821, pruned_loss=0.01795, over 4836.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2218, pruned_loss=0.04024, over 970874.44 frames.], batch size: 13, lr: 3.76e-04 +2022-05-05 08:01:03,113 INFO [train.py:715] (7/8) Epoch 5, batch 24850, loss[loss=0.1436, simple_loss=0.2148, pruned_loss=0.03624, over 4915.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2217, pruned_loss=0.04011, over 971924.15 frames.], batch size: 29, lr: 3.76e-04 +2022-05-05 08:01:41,874 INFO [train.py:715] (7/8) Epoch 5, batch 24900, loss[loss=0.1808, simple_loss=0.2576, pruned_loss=0.05206, over 4884.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2221, pruned_loss=0.04016, over 972081.88 frames.], batch size: 22, lr: 3.76e-04 +2022-05-05 08:02:21,423 INFO [train.py:715] (7/8) Epoch 5, batch 24950, loss[loss=0.1508, simple_loss=0.2183, pruned_loss=0.04162, over 4697.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2214, pruned_loss=0.04005, over 972578.25 frames.], batch size: 15, lr: 3.76e-04 +2022-05-05 08:03:00,493 INFO [train.py:715] (7/8) Epoch 5, batch 25000, loss[loss=0.1637, simple_loss=0.2257, pruned_loss=0.05081, over 4943.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2218, pruned_loss=0.04037, over 972706.22 frames.], batch size: 35, lr: 3.76e-04 +2022-05-05 08:03:39,040 INFO [train.py:715] (7/8) Epoch 5, batch 25050, loss[loss=0.1723, simple_loss=0.2481, pruned_loss=0.04824, over 4779.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2226, pruned_loss=0.04081, over 972636.03 frames.], batch size: 17, lr: 3.76e-04 +2022-05-05 08:04:17,283 INFO [train.py:715] (7/8) Epoch 5, batch 25100, loss[loss=0.1558, simple_loss=0.2244, pruned_loss=0.04359, over 4928.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2226, pruned_loss=0.04079, over 971998.72 frames.], batch size: 21, lr: 3.76e-04 +2022-05-05 08:04:57,547 INFO [train.py:715] (7/8) Epoch 5, batch 25150, loss[loss=0.1359, simple_loss=0.2065, pruned_loss=0.03265, over 4804.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2229, pruned_loss=0.04097, over 972898.67 frames.], batch size: 13, lr: 3.76e-04 +2022-05-05 08:05:35,729 INFO [train.py:715] (7/8) Epoch 5, batch 25200, loss[loss=0.1509, simple_loss=0.2202, pruned_loss=0.04074, over 4868.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2245, pruned_loss=0.04184, over 972560.10 frames.], batch size: 16, lr: 3.76e-04 +2022-05-05 08:06:14,577 INFO [train.py:715] (7/8) Epoch 5, batch 25250, loss[loss=0.1455, simple_loss=0.2189, pruned_loss=0.0361, over 4824.00 frames.], tot_loss[loss=0.1532, simple_loss=0.224, pruned_loss=0.04118, over 972327.60 frames.], batch size: 15, lr: 3.76e-04 +2022-05-05 08:06:53,403 INFO [train.py:715] (7/8) Epoch 5, batch 25300, loss[loss=0.1478, simple_loss=0.2166, pruned_loss=0.03952, over 4899.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2246, pruned_loss=0.04138, over 972671.94 frames.], batch size: 19, lr: 3.75e-04 +2022-05-05 08:07:31,746 INFO [train.py:715] (7/8) Epoch 5, batch 25350, loss[loss=0.1489, simple_loss=0.2188, pruned_loss=0.03948, over 4830.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2239, pruned_loss=0.04116, over 973089.73 frames.], batch size: 15, lr: 3.75e-04 +2022-05-05 08:08:10,247 INFO [train.py:715] (7/8) Epoch 5, batch 25400, loss[loss=0.1432, simple_loss=0.2027, pruned_loss=0.04186, over 4953.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2232, pruned_loss=0.04109, over 973118.93 frames.], batch size: 35, lr: 3.75e-04 +2022-05-05 08:08:49,163 INFO [train.py:715] (7/8) Epoch 5, batch 25450, loss[loss=0.1288, simple_loss=0.203, pruned_loss=0.02729, over 4865.00 frames.], tot_loss[loss=0.154, simple_loss=0.2244, pruned_loss=0.04181, over 973352.85 frames.], batch size: 16, lr: 3.75e-04 +2022-05-05 08:09:28,390 INFO [train.py:715] (7/8) Epoch 5, batch 25500, loss[loss=0.148, simple_loss=0.2149, pruned_loss=0.04058, over 4862.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2237, pruned_loss=0.0416, over 972822.63 frames.], batch size: 20, lr: 3.75e-04 +2022-05-05 08:10:07,142 INFO [train.py:715] (7/8) Epoch 5, batch 25550, loss[loss=0.165, simple_loss=0.2354, pruned_loss=0.04731, over 4905.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2245, pruned_loss=0.04198, over 973901.68 frames.], batch size: 19, lr: 3.75e-04 +2022-05-05 08:10:45,634 INFO [train.py:715] (7/8) Epoch 5, batch 25600, loss[loss=0.1435, simple_loss=0.21, pruned_loss=0.03849, over 4860.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2241, pruned_loss=0.04171, over 972786.94 frames.], batch size: 20, lr: 3.75e-04 +2022-05-05 08:11:24,704 INFO [train.py:715] (7/8) Epoch 5, batch 25650, loss[loss=0.1779, simple_loss=0.2449, pruned_loss=0.0555, over 4758.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2238, pruned_loss=0.04186, over 972808.81 frames.], batch size: 19, lr: 3.75e-04 +2022-05-05 08:12:03,094 INFO [train.py:715] (7/8) Epoch 5, batch 25700, loss[loss=0.1297, simple_loss=0.1959, pruned_loss=0.03174, over 4793.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2237, pruned_loss=0.0418, over 972567.27 frames.], batch size: 12, lr: 3.75e-04 +2022-05-05 08:12:41,263 INFO [train.py:715] (7/8) Epoch 5, batch 25750, loss[loss=0.1398, simple_loss=0.2128, pruned_loss=0.03343, over 4979.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2234, pruned_loss=0.04157, over 972286.44 frames.], batch size: 25, lr: 3.75e-04 +2022-05-05 08:13:20,737 INFO [train.py:715] (7/8) Epoch 5, batch 25800, loss[loss=0.1299, simple_loss=0.1962, pruned_loss=0.03176, over 4794.00 frames.], tot_loss[loss=0.1521, simple_loss=0.222, pruned_loss=0.04107, over 972169.63 frames.], batch size: 13, lr: 3.75e-04 +2022-05-05 08:13:59,833 INFO [train.py:715] (7/8) Epoch 5, batch 25850, loss[loss=0.1669, simple_loss=0.2414, pruned_loss=0.04613, over 4747.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2209, pruned_loss=0.04089, over 971559.39 frames.], batch size: 16, lr: 3.75e-04 +2022-05-05 08:14:38,588 INFO [train.py:715] (7/8) Epoch 5, batch 25900, loss[loss=0.1504, simple_loss=0.2232, pruned_loss=0.03875, over 4939.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2215, pruned_loss=0.04095, over 972232.55 frames.], batch size: 23, lr: 3.75e-04 +2022-05-05 08:15:17,124 INFO [train.py:715] (7/8) Epoch 5, batch 25950, loss[loss=0.146, simple_loss=0.2126, pruned_loss=0.03972, over 4971.00 frames.], tot_loss[loss=0.151, simple_loss=0.2213, pruned_loss=0.04039, over 971794.10 frames.], batch size: 35, lr: 3.75e-04 +2022-05-05 08:15:58,602 INFO [train.py:715] (7/8) Epoch 5, batch 26000, loss[loss=0.1448, simple_loss=0.22, pruned_loss=0.03479, over 4847.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2212, pruned_loss=0.04027, over 971743.95 frames.], batch size: 34, lr: 3.75e-04 +2022-05-05 08:16:37,293 INFO [train.py:715] (7/8) Epoch 5, batch 26050, loss[loss=0.1619, simple_loss=0.2316, pruned_loss=0.04605, over 4988.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2208, pruned_loss=0.04007, over 972133.38 frames.], batch size: 15, lr: 3.75e-04 +2022-05-05 08:17:15,759 INFO [train.py:715] (7/8) Epoch 5, batch 26100, loss[loss=0.1656, simple_loss=0.2468, pruned_loss=0.04217, over 4771.00 frames.], tot_loss[loss=0.15, simple_loss=0.2207, pruned_loss=0.0397, over 972368.65 frames.], batch size: 18, lr: 3.75e-04 +2022-05-05 08:17:54,715 INFO [train.py:715] (7/8) Epoch 5, batch 26150, loss[loss=0.1321, simple_loss=0.2179, pruned_loss=0.02322, over 4819.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2203, pruned_loss=0.03941, over 972021.24 frames.], batch size: 26, lr: 3.75e-04 +2022-05-05 08:18:33,048 INFO [train.py:715] (7/8) Epoch 5, batch 26200, loss[loss=0.1552, simple_loss=0.2181, pruned_loss=0.0462, over 4834.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2201, pruned_loss=0.03932, over 972195.94 frames.], batch size: 20, lr: 3.75e-04 +2022-05-05 08:19:12,106 INFO [train.py:715] (7/8) Epoch 5, batch 26250, loss[loss=0.1455, simple_loss=0.2172, pruned_loss=0.03686, over 4810.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2204, pruned_loss=0.03933, over 972338.06 frames.], batch size: 15, lr: 3.75e-04 +2022-05-05 08:19:51,345 INFO [train.py:715] (7/8) Epoch 5, batch 26300, loss[loss=0.1282, simple_loss=0.2054, pruned_loss=0.02546, over 4873.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2215, pruned_loss=0.03976, over 972858.58 frames.], batch size: 16, lr: 3.75e-04 +2022-05-05 08:20:30,626 INFO [train.py:715] (7/8) Epoch 5, batch 26350, loss[loss=0.1422, simple_loss=0.2188, pruned_loss=0.0328, over 4929.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2213, pruned_loss=0.03956, over 972267.25 frames.], batch size: 21, lr: 3.74e-04 +2022-05-05 08:21:09,423 INFO [train.py:715] (7/8) Epoch 5, batch 26400, loss[loss=0.1624, simple_loss=0.2445, pruned_loss=0.04011, over 4815.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2217, pruned_loss=0.0398, over 971822.59 frames.], batch size: 26, lr: 3.74e-04 +2022-05-05 08:21:48,033 INFO [train.py:715] (7/8) Epoch 5, batch 26450, loss[loss=0.159, simple_loss=0.2197, pruned_loss=0.04918, over 4947.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2206, pruned_loss=0.03997, over 972151.94 frames.], batch size: 21, lr: 3.74e-04 +2022-05-05 08:22:26,955 INFO [train.py:715] (7/8) Epoch 5, batch 26500, loss[loss=0.1431, simple_loss=0.2129, pruned_loss=0.03663, over 4962.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2216, pruned_loss=0.04078, over 972244.07 frames.], batch size: 24, lr: 3.74e-04 +2022-05-05 08:23:06,040 INFO [train.py:715] (7/8) Epoch 5, batch 26550, loss[loss=0.1375, simple_loss=0.2171, pruned_loss=0.029, over 4886.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2212, pruned_loss=0.03992, over 972324.14 frames.], batch size: 22, lr: 3.74e-04 +2022-05-05 08:23:44,737 INFO [train.py:715] (7/8) Epoch 5, batch 26600, loss[loss=0.1723, simple_loss=0.2451, pruned_loss=0.04976, over 4906.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2207, pruned_loss=0.0398, over 972723.93 frames.], batch size: 17, lr: 3.74e-04 +2022-05-05 08:24:24,183 INFO [train.py:715] (7/8) Epoch 5, batch 26650, loss[loss=0.1519, simple_loss=0.2295, pruned_loss=0.03718, over 4895.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2215, pruned_loss=0.04044, over 972500.59 frames.], batch size: 22, lr: 3.74e-04 +2022-05-05 08:25:02,984 INFO [train.py:715] (7/8) Epoch 5, batch 26700, loss[loss=0.1383, simple_loss=0.2066, pruned_loss=0.03497, over 4849.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2219, pruned_loss=0.04052, over 972653.53 frames.], batch size: 34, lr: 3.74e-04 +2022-05-05 08:25:41,812 INFO [train.py:715] (7/8) Epoch 5, batch 26750, loss[loss=0.182, simple_loss=0.2451, pruned_loss=0.05941, over 4869.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2224, pruned_loss=0.04037, over 971669.84 frames.], batch size: 16, lr: 3.74e-04 +2022-05-05 08:26:20,204 INFO [train.py:715] (7/8) Epoch 5, batch 26800, loss[loss=0.1369, simple_loss=0.2157, pruned_loss=0.02907, over 4898.00 frames.], tot_loss[loss=0.1523, simple_loss=0.223, pruned_loss=0.04081, over 972591.40 frames.], batch size: 22, lr: 3.74e-04 +2022-05-05 08:26:59,360 INFO [train.py:715] (7/8) Epoch 5, batch 26850, loss[loss=0.147, simple_loss=0.2216, pruned_loss=0.03621, over 4822.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2222, pruned_loss=0.04036, over 972414.19 frames.], batch size: 26, lr: 3.74e-04 +2022-05-05 08:27:38,335 INFO [train.py:715] (7/8) Epoch 5, batch 26900, loss[loss=0.1573, simple_loss=0.2296, pruned_loss=0.0425, over 4974.00 frames.], tot_loss[loss=0.1512, simple_loss=0.222, pruned_loss=0.0402, over 972454.60 frames.], batch size: 33, lr: 3.74e-04 +2022-05-05 08:28:17,270 INFO [train.py:715] (7/8) Epoch 5, batch 26950, loss[loss=0.1647, simple_loss=0.2268, pruned_loss=0.05126, over 4850.00 frames.], tot_loss[loss=0.1513, simple_loss=0.222, pruned_loss=0.04026, over 972387.04 frames.], batch size: 32, lr: 3.74e-04 +2022-05-05 08:28:55,975 INFO [train.py:715] (7/8) Epoch 5, batch 27000, loss[loss=0.1318, simple_loss=0.2047, pruned_loss=0.02941, over 4975.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2222, pruned_loss=0.04023, over 972796.93 frames.], batch size: 28, lr: 3.74e-04 +2022-05-05 08:28:55,976 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 08:29:05,775 INFO [train.py:742] (7/8) Epoch 5, validation: loss=0.1098, simple_loss=0.195, pruned_loss=0.01232, over 914524.00 frames. +2022-05-05 08:29:45,277 INFO [train.py:715] (7/8) Epoch 5, batch 27050, loss[loss=0.1671, simple_loss=0.2377, pruned_loss=0.04828, over 4874.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2217, pruned_loss=0.04056, over 972480.73 frames.], batch size: 16, lr: 3.74e-04 +2022-05-05 08:30:24,754 INFO [train.py:715] (7/8) Epoch 5, batch 27100, loss[loss=0.138, simple_loss=0.2032, pruned_loss=0.0364, over 4769.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2214, pruned_loss=0.04015, over 973188.69 frames.], batch size: 19, lr: 3.74e-04 +2022-05-05 08:31:04,149 INFO [train.py:715] (7/8) Epoch 5, batch 27150, loss[loss=0.1676, simple_loss=0.2456, pruned_loss=0.04476, over 4931.00 frames.], tot_loss[loss=0.151, simple_loss=0.2216, pruned_loss=0.04025, over 973337.17 frames.], batch size: 29, lr: 3.74e-04 +2022-05-05 08:31:42,961 INFO [train.py:715] (7/8) Epoch 5, batch 27200, loss[loss=0.1484, simple_loss=0.2329, pruned_loss=0.03196, over 4886.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2225, pruned_loss=0.0406, over 973478.53 frames.], batch size: 22, lr: 3.74e-04 +2022-05-05 08:32:22,584 INFO [train.py:715] (7/8) Epoch 5, batch 27250, loss[loss=0.1523, simple_loss=0.2142, pruned_loss=0.04515, over 4749.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2226, pruned_loss=0.04082, over 974336.95 frames.], batch size: 16, lr: 3.74e-04 +2022-05-05 08:33:01,563 INFO [train.py:715] (7/8) Epoch 5, batch 27300, loss[loss=0.1552, simple_loss=0.2221, pruned_loss=0.04416, over 4816.00 frames.], tot_loss[loss=0.153, simple_loss=0.2235, pruned_loss=0.0412, over 973670.99 frames.], batch size: 26, lr: 3.74e-04 +2022-05-05 08:33:40,118 INFO [train.py:715] (7/8) Epoch 5, batch 27350, loss[loss=0.1374, simple_loss=0.2078, pruned_loss=0.03348, over 4904.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2234, pruned_loss=0.04139, over 973563.38 frames.], batch size: 19, lr: 3.74e-04 +2022-05-05 08:34:18,988 INFO [train.py:715] (7/8) Epoch 5, batch 27400, loss[loss=0.1349, simple_loss=0.2106, pruned_loss=0.02963, over 4899.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2238, pruned_loss=0.04166, over 975126.98 frames.], batch size: 17, lr: 3.74e-04 +2022-05-05 08:34:58,263 INFO [train.py:715] (7/8) Epoch 5, batch 27450, loss[loss=0.1514, simple_loss=0.2101, pruned_loss=0.04632, over 4764.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2234, pruned_loss=0.0414, over 974226.64 frames.], batch size: 14, lr: 3.73e-04 +2022-05-05 08:35:38,044 INFO [train.py:715] (7/8) Epoch 5, batch 27500, loss[loss=0.1679, simple_loss=0.2428, pruned_loss=0.04648, over 4788.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2237, pruned_loss=0.0414, over 973630.56 frames.], batch size: 21, lr: 3.73e-04 +2022-05-05 08:36:16,526 INFO [train.py:715] (7/8) Epoch 5, batch 27550, loss[loss=0.1467, simple_loss=0.2155, pruned_loss=0.03895, over 4797.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2233, pruned_loss=0.04124, over 973311.95 frames.], batch size: 21, lr: 3.73e-04 +2022-05-05 08:36:55,897 INFO [train.py:715] (7/8) Epoch 5, batch 27600, loss[loss=0.152, simple_loss=0.2259, pruned_loss=0.03904, over 4904.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2227, pruned_loss=0.04084, over 972519.27 frames.], batch size: 19, lr: 3.73e-04 +2022-05-05 08:37:34,977 INFO [train.py:715] (7/8) Epoch 5, batch 27650, loss[loss=0.1848, simple_loss=0.2559, pruned_loss=0.05686, over 4904.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2228, pruned_loss=0.04099, over 972564.05 frames.], batch size: 39, lr: 3.73e-04 +2022-05-05 08:38:13,252 INFO [train.py:715] (7/8) Epoch 5, batch 27700, loss[loss=0.1587, simple_loss=0.2362, pruned_loss=0.04065, over 4913.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2225, pruned_loss=0.04053, over 972242.81 frames.], batch size: 18, lr: 3.73e-04 +2022-05-05 08:38:52,829 INFO [train.py:715] (7/8) Epoch 5, batch 27750, loss[loss=0.1994, simple_loss=0.268, pruned_loss=0.06545, over 4743.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2222, pruned_loss=0.04049, over 972164.87 frames.], batch size: 19, lr: 3.73e-04 +2022-05-05 08:39:32,591 INFO [train.py:715] (7/8) Epoch 5, batch 27800, loss[loss=0.1381, simple_loss=0.2136, pruned_loss=0.03125, over 4814.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2226, pruned_loss=0.04103, over 972557.49 frames.], batch size: 25, lr: 3.73e-04 +2022-05-05 08:40:11,943 INFO [train.py:715] (7/8) Epoch 5, batch 27850, loss[loss=0.1727, simple_loss=0.2402, pruned_loss=0.05265, over 4857.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2225, pruned_loss=0.04088, over 971760.63 frames.], batch size: 30, lr: 3.73e-04 +2022-05-05 08:40:50,652 INFO [train.py:715] (7/8) Epoch 5, batch 27900, loss[loss=0.1474, simple_loss=0.2154, pruned_loss=0.03967, over 4785.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2216, pruned_loss=0.04071, over 971624.97 frames.], batch size: 14, lr: 3.73e-04 +2022-05-05 08:41:29,599 INFO [train.py:715] (7/8) Epoch 5, batch 27950, loss[loss=0.1536, simple_loss=0.2159, pruned_loss=0.04564, over 4766.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2219, pruned_loss=0.04067, over 971015.44 frames.], batch size: 19, lr: 3.73e-04 +2022-05-05 08:42:09,041 INFO [train.py:715] (7/8) Epoch 5, batch 28000, loss[loss=0.1276, simple_loss=0.1928, pruned_loss=0.03122, over 4866.00 frames.], tot_loss[loss=0.152, simple_loss=0.2225, pruned_loss=0.04073, over 971373.38 frames.], batch size: 12, lr: 3.73e-04 +2022-05-05 08:42:47,128 INFO [train.py:715] (7/8) Epoch 5, batch 28050, loss[loss=0.1469, simple_loss=0.211, pruned_loss=0.04141, over 4784.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2224, pruned_loss=0.04034, over 971668.87 frames.], batch size: 14, lr: 3.73e-04 +2022-05-05 08:43:25,854 INFO [train.py:715] (7/8) Epoch 5, batch 28100, loss[loss=0.1561, simple_loss=0.2313, pruned_loss=0.0405, over 4977.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2222, pruned_loss=0.04017, over 972234.79 frames.], batch size: 39, lr: 3.73e-04 +2022-05-05 08:44:04,996 INFO [train.py:715] (7/8) Epoch 5, batch 28150, loss[loss=0.1547, simple_loss=0.2396, pruned_loss=0.03488, over 4895.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2219, pruned_loss=0.03957, over 972030.81 frames.], batch size: 22, lr: 3.73e-04 +2022-05-05 08:44:43,937 INFO [train.py:715] (7/8) Epoch 5, batch 28200, loss[loss=0.1529, simple_loss=0.227, pruned_loss=0.03942, over 4977.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2211, pruned_loss=0.03935, over 972029.77 frames.], batch size: 15, lr: 3.73e-04 +2022-05-05 08:45:22,620 INFO [train.py:715] (7/8) Epoch 5, batch 28250, loss[loss=0.1761, simple_loss=0.2454, pruned_loss=0.05339, over 4782.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2215, pruned_loss=0.03996, over 972303.83 frames.], batch size: 17, lr: 3.73e-04 +2022-05-05 08:46:01,489 INFO [train.py:715] (7/8) Epoch 5, batch 28300, loss[loss=0.1594, simple_loss=0.2376, pruned_loss=0.04058, over 4952.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2224, pruned_loss=0.04042, over 972472.15 frames.], batch size: 35, lr: 3.73e-04 +2022-05-05 08:46:39,906 INFO [train.py:715] (7/8) Epoch 5, batch 28350, loss[loss=0.12, simple_loss=0.1788, pruned_loss=0.03063, over 4970.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2227, pruned_loss=0.0409, over 972831.59 frames.], batch size: 15, lr: 3.73e-04 +2022-05-05 08:47:18,554 INFO [train.py:715] (7/8) Epoch 5, batch 28400, loss[loss=0.1784, simple_loss=0.2459, pruned_loss=0.05548, over 4786.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2222, pruned_loss=0.04057, over 973182.97 frames.], batch size: 17, lr: 3.73e-04 +2022-05-05 08:47:57,678 INFO [train.py:715] (7/8) Epoch 5, batch 28450, loss[loss=0.1255, simple_loss=0.1966, pruned_loss=0.02727, over 4779.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2222, pruned_loss=0.04023, over 973303.40 frames.], batch size: 12, lr: 3.73e-04 +2022-05-05 08:48:36,726 INFO [train.py:715] (7/8) Epoch 5, batch 28500, loss[loss=0.1712, simple_loss=0.2367, pruned_loss=0.05287, over 4763.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2219, pruned_loss=0.04031, over 972819.79 frames.], batch size: 19, lr: 3.72e-04 +2022-05-05 08:49:15,937 INFO [train.py:715] (7/8) Epoch 5, batch 28550, loss[loss=0.1628, simple_loss=0.2354, pruned_loss=0.04514, over 4934.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2219, pruned_loss=0.03997, over 972669.43 frames.], batch size: 23, lr: 3.72e-04 +2022-05-05 08:49:54,627 INFO [train.py:715] (7/8) Epoch 5, batch 28600, loss[loss=0.1496, simple_loss=0.2139, pruned_loss=0.04261, over 4684.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2221, pruned_loss=0.04009, over 972876.46 frames.], batch size: 15, lr: 3.72e-04 +2022-05-05 08:50:34,059 INFO [train.py:715] (7/8) Epoch 5, batch 28650, loss[loss=0.1428, simple_loss=0.2133, pruned_loss=0.03617, over 4909.00 frames.], tot_loss[loss=0.151, simple_loss=0.2219, pruned_loss=0.04005, over 972731.06 frames.], batch size: 19, lr: 3.72e-04 +2022-05-05 08:51:12,501 INFO [train.py:715] (7/8) Epoch 5, batch 28700, loss[loss=0.163, simple_loss=0.237, pruned_loss=0.04448, over 4937.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2222, pruned_loss=0.04003, over 972838.17 frames.], batch size: 39, lr: 3.72e-04 +2022-05-05 08:51:51,353 INFO [train.py:715] (7/8) Epoch 5, batch 28750, loss[loss=0.1497, simple_loss=0.2237, pruned_loss=0.03785, over 4916.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2214, pruned_loss=0.03993, over 972664.01 frames.], batch size: 19, lr: 3.72e-04 +2022-05-05 08:52:30,121 INFO [train.py:715] (7/8) Epoch 5, batch 28800, loss[loss=0.1579, simple_loss=0.2339, pruned_loss=0.04092, over 4757.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2223, pruned_loss=0.04017, over 973037.69 frames.], batch size: 16, lr: 3.72e-04 +2022-05-05 08:53:09,038 INFO [train.py:715] (7/8) Epoch 5, batch 28850, loss[loss=0.2164, simple_loss=0.2658, pruned_loss=0.08346, over 4994.00 frames.], tot_loss[loss=0.1508, simple_loss=0.222, pruned_loss=0.0398, over 973576.49 frames.], batch size: 14, lr: 3.72e-04 +2022-05-05 08:53:47,815 INFO [train.py:715] (7/8) Epoch 5, batch 28900, loss[loss=0.1669, simple_loss=0.2373, pruned_loss=0.04821, over 4923.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2223, pruned_loss=0.04045, over 972791.29 frames.], batch size: 18, lr: 3.72e-04 +2022-05-05 08:54:26,497 INFO [train.py:715] (7/8) Epoch 5, batch 28950, loss[loss=0.1507, simple_loss=0.2214, pruned_loss=0.04006, over 4800.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2224, pruned_loss=0.0404, over 972570.20 frames.], batch size: 24, lr: 3.72e-04 +2022-05-05 08:55:05,610 INFO [train.py:715] (7/8) Epoch 5, batch 29000, loss[loss=0.1351, simple_loss=0.2099, pruned_loss=0.03015, over 4983.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2237, pruned_loss=0.04058, over 972991.16 frames.], batch size: 28, lr: 3.72e-04 +2022-05-05 08:55:43,853 INFO [train.py:715] (7/8) Epoch 5, batch 29050, loss[loss=0.1495, simple_loss=0.2323, pruned_loss=0.03332, over 4758.00 frames.], tot_loss[loss=0.1515, simple_loss=0.223, pruned_loss=0.03998, over 972532.46 frames.], batch size: 19, lr: 3.72e-04 +2022-05-05 08:56:22,923 INFO [train.py:715] (7/8) Epoch 5, batch 29100, loss[loss=0.1612, simple_loss=0.2305, pruned_loss=0.04596, over 4779.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2226, pruned_loss=0.03979, over 971820.06 frames.], batch size: 17, lr: 3.72e-04 +2022-05-05 08:57:01,739 INFO [train.py:715] (7/8) Epoch 5, batch 29150, loss[loss=0.1223, simple_loss=0.1912, pruned_loss=0.02673, over 4828.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2224, pruned_loss=0.03996, over 972596.99 frames.], batch size: 13, lr: 3.72e-04 +2022-05-05 08:57:40,487 INFO [train.py:715] (7/8) Epoch 5, batch 29200, loss[loss=0.1406, simple_loss=0.2165, pruned_loss=0.0324, over 4837.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2222, pruned_loss=0.04015, over 972874.07 frames.], batch size: 15, lr: 3.72e-04 +2022-05-05 08:58:19,232 INFO [train.py:715] (7/8) Epoch 5, batch 29250, loss[loss=0.1373, simple_loss=0.2003, pruned_loss=0.03712, over 4779.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2216, pruned_loss=0.03996, over 973038.69 frames.], batch size: 14, lr: 3.72e-04 +2022-05-05 08:58:57,799 INFO [train.py:715] (7/8) Epoch 5, batch 29300, loss[loss=0.169, simple_loss=0.2364, pruned_loss=0.05083, over 4879.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2231, pruned_loss=0.0406, over 973344.33 frames.], batch size: 39, lr: 3.72e-04 +2022-05-05 08:59:37,059 INFO [train.py:715] (7/8) Epoch 5, batch 29350, loss[loss=0.1496, simple_loss=0.2322, pruned_loss=0.03352, over 4810.00 frames.], tot_loss[loss=0.151, simple_loss=0.2223, pruned_loss=0.03986, over 973045.50 frames.], batch size: 14, lr: 3.72e-04 +2022-05-05 09:00:15,739 INFO [train.py:715] (7/8) Epoch 5, batch 29400, loss[loss=0.2254, simple_loss=0.2751, pruned_loss=0.08782, over 4961.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2227, pruned_loss=0.03999, over 972106.97 frames.], batch size: 35, lr: 3.72e-04 +2022-05-05 09:00:54,489 INFO [train.py:715] (7/8) Epoch 5, batch 29450, loss[loss=0.1272, simple_loss=0.2024, pruned_loss=0.02604, over 4970.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2227, pruned_loss=0.04018, over 972187.00 frames.], batch size: 25, lr: 3.72e-04 +2022-05-05 09:01:34,120 INFO [train.py:715] (7/8) Epoch 5, batch 29500, loss[loss=0.164, simple_loss=0.2315, pruned_loss=0.04828, over 4973.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2229, pruned_loss=0.04061, over 972666.10 frames.], batch size: 25, lr: 3.72e-04 +2022-05-05 09:02:13,203 INFO [train.py:715] (7/8) Epoch 5, batch 29550, loss[loss=0.1705, simple_loss=0.254, pruned_loss=0.0435, over 4944.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2226, pruned_loss=0.0406, over 972418.34 frames.], batch size: 21, lr: 3.72e-04 +2022-05-05 09:02:52,388 INFO [train.py:715] (7/8) Epoch 5, batch 29600, loss[loss=0.1564, simple_loss=0.221, pruned_loss=0.04591, over 4928.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2228, pruned_loss=0.04089, over 973142.88 frames.], batch size: 29, lr: 3.71e-04 +2022-05-05 09:03:31,059 INFO [train.py:715] (7/8) Epoch 5, batch 29650, loss[loss=0.1589, simple_loss=0.2263, pruned_loss=0.04576, over 4993.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2221, pruned_loss=0.04085, over 972205.84 frames.], batch size: 16, lr: 3.71e-04 +2022-05-05 09:04:09,888 INFO [train.py:715] (7/8) Epoch 5, batch 29700, loss[loss=0.1347, simple_loss=0.2179, pruned_loss=0.02575, over 4809.00 frames.], tot_loss[loss=0.1515, simple_loss=0.222, pruned_loss=0.04053, over 972292.43 frames.], batch size: 25, lr: 3.71e-04 +2022-05-05 09:04:48,812 INFO [train.py:715] (7/8) Epoch 5, batch 29750, loss[loss=0.1446, simple_loss=0.2177, pruned_loss=0.03578, over 4858.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2221, pruned_loss=0.04037, over 971711.16 frames.], batch size: 20, lr: 3.71e-04 +2022-05-05 09:05:27,384 INFO [train.py:715] (7/8) Epoch 5, batch 29800, loss[loss=0.1706, simple_loss=0.2357, pruned_loss=0.05278, over 4880.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2223, pruned_loss=0.04042, over 972233.60 frames.], batch size: 22, lr: 3.71e-04 +2022-05-05 09:06:05,620 INFO [train.py:715] (7/8) Epoch 5, batch 29850, loss[loss=0.1173, simple_loss=0.186, pruned_loss=0.0243, over 4752.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2222, pruned_loss=0.04066, over 971696.36 frames.], batch size: 16, lr: 3.71e-04 +2022-05-05 09:06:44,669 INFO [train.py:715] (7/8) Epoch 5, batch 29900, loss[loss=0.1613, simple_loss=0.2346, pruned_loss=0.04401, over 4782.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2229, pruned_loss=0.04065, over 972030.02 frames.], batch size: 18, lr: 3.71e-04 +2022-05-05 09:07:24,015 INFO [train.py:715] (7/8) Epoch 5, batch 29950, loss[loss=0.1393, simple_loss=0.2089, pruned_loss=0.03487, over 4948.00 frames.], tot_loss[loss=0.1517, simple_loss=0.223, pruned_loss=0.04022, over 971706.11 frames.], batch size: 29, lr: 3.71e-04 +2022-05-05 09:08:02,566 INFO [train.py:715] (7/8) Epoch 5, batch 30000, loss[loss=0.1642, simple_loss=0.243, pruned_loss=0.04269, over 4971.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2235, pruned_loss=0.04079, over 972028.25 frames.], batch size: 25, lr: 3.71e-04 +2022-05-05 09:08:02,566 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 09:08:12,296 INFO [train.py:742] (7/8) Epoch 5, validation: loss=0.11, simple_loss=0.1953, pruned_loss=0.01241, over 914524.00 frames. +2022-05-05 09:08:51,328 INFO [train.py:715] (7/8) Epoch 5, batch 30050, loss[loss=0.1391, simple_loss=0.2103, pruned_loss=0.03394, over 4650.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2231, pruned_loss=0.0407, over 972698.40 frames.], batch size: 13, lr: 3.71e-04 +2022-05-05 09:09:31,492 INFO [train.py:715] (7/8) Epoch 5, batch 30100, loss[loss=0.1405, simple_loss=0.2102, pruned_loss=0.03536, over 4875.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2223, pruned_loss=0.04026, over 972555.45 frames.], batch size: 32, lr: 3.71e-04 +2022-05-05 09:10:10,290 INFO [train.py:715] (7/8) Epoch 5, batch 30150, loss[loss=0.158, simple_loss=0.2266, pruned_loss=0.04471, over 4844.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2219, pruned_loss=0.03997, over 971863.27 frames.], batch size: 26, lr: 3.71e-04 +2022-05-05 09:10:48,820 INFO [train.py:715] (7/8) Epoch 5, batch 30200, loss[loss=0.1516, simple_loss=0.224, pruned_loss=0.03957, over 4833.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2222, pruned_loss=0.04022, over 972395.68 frames.], batch size: 26, lr: 3.71e-04 +2022-05-05 09:11:27,806 INFO [train.py:715] (7/8) Epoch 5, batch 30250, loss[loss=0.1177, simple_loss=0.1878, pruned_loss=0.02376, over 4844.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2227, pruned_loss=0.0404, over 972551.03 frames.], batch size: 20, lr: 3.71e-04 +2022-05-05 09:12:06,781 INFO [train.py:715] (7/8) Epoch 5, batch 30300, loss[loss=0.1464, simple_loss=0.2052, pruned_loss=0.04382, over 4803.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2222, pruned_loss=0.04039, over 972161.69 frames.], batch size: 14, lr: 3.71e-04 +2022-05-05 09:12:45,784 INFO [train.py:715] (7/8) Epoch 5, batch 30350, loss[loss=0.1734, simple_loss=0.2399, pruned_loss=0.05347, over 4793.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2219, pruned_loss=0.03996, over 971480.38 frames.], batch size: 21, lr: 3.71e-04 +2022-05-05 09:13:24,286 INFO [train.py:715] (7/8) Epoch 5, batch 30400, loss[loss=0.1309, simple_loss=0.2098, pruned_loss=0.026, over 4753.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2204, pruned_loss=0.0389, over 971977.17 frames.], batch size: 19, lr: 3.71e-04 +2022-05-05 09:14:03,372 INFO [train.py:715] (7/8) Epoch 5, batch 30450, loss[loss=0.1669, simple_loss=0.2418, pruned_loss=0.04599, over 4986.00 frames.], tot_loss[loss=0.149, simple_loss=0.22, pruned_loss=0.03901, over 972275.68 frames.], batch size: 16, lr: 3.71e-04 +2022-05-05 09:14:42,246 INFO [train.py:715] (7/8) Epoch 5, batch 30500, loss[loss=0.1528, simple_loss=0.2268, pruned_loss=0.03943, over 4830.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2203, pruned_loss=0.03899, over 972162.35 frames.], batch size: 15, lr: 3.71e-04 +2022-05-05 09:15:20,923 INFO [train.py:715] (7/8) Epoch 5, batch 30550, loss[loss=0.18, simple_loss=0.2415, pruned_loss=0.05924, over 4827.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2208, pruned_loss=0.03976, over 971652.28 frames.], batch size: 15, lr: 3.71e-04 +2022-05-05 09:15:58,927 INFO [train.py:715] (7/8) Epoch 5, batch 30600, loss[loss=0.136, simple_loss=0.1989, pruned_loss=0.03658, over 4856.00 frames.], tot_loss[loss=0.1506, simple_loss=0.221, pruned_loss=0.04003, over 972171.28 frames.], batch size: 13, lr: 3.71e-04 +2022-05-05 09:16:37,778 INFO [train.py:715] (7/8) Epoch 5, batch 30650, loss[loss=0.1691, simple_loss=0.2482, pruned_loss=0.04504, over 4963.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2216, pruned_loss=0.04012, over 972485.40 frames.], batch size: 24, lr: 3.71e-04 +2022-05-05 09:17:16,917 INFO [train.py:715] (7/8) Epoch 5, batch 30700, loss[loss=0.1413, simple_loss=0.2075, pruned_loss=0.0376, over 4828.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2214, pruned_loss=0.0398, over 972944.27 frames.], batch size: 15, lr: 3.70e-04 +2022-05-05 09:17:55,174 INFO [train.py:715] (7/8) Epoch 5, batch 30750, loss[loss=0.1666, simple_loss=0.2347, pruned_loss=0.04923, over 4967.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2217, pruned_loss=0.03948, over 972957.57 frames.], batch size: 14, lr: 3.70e-04 +2022-05-05 09:18:33,968 INFO [train.py:715] (7/8) Epoch 5, batch 30800, loss[loss=0.1408, simple_loss=0.2018, pruned_loss=0.03988, over 4748.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2223, pruned_loss=0.03997, over 972956.62 frames.], batch size: 16, lr: 3.70e-04 +2022-05-05 09:19:12,985 INFO [train.py:715] (7/8) Epoch 5, batch 30850, loss[loss=0.1632, simple_loss=0.2318, pruned_loss=0.04731, over 4692.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2223, pruned_loss=0.04029, over 973709.83 frames.], batch size: 15, lr: 3.70e-04 +2022-05-05 09:19:51,003 INFO [train.py:715] (7/8) Epoch 5, batch 30900, loss[loss=0.1581, simple_loss=0.2266, pruned_loss=0.04479, over 4942.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2221, pruned_loss=0.04015, over 972829.60 frames.], batch size: 35, lr: 3.70e-04 +2022-05-05 09:20:29,874 INFO [train.py:715] (7/8) Epoch 5, batch 30950, loss[loss=0.1257, simple_loss=0.1976, pruned_loss=0.02694, over 4784.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2218, pruned_loss=0.03976, over 972897.13 frames.], batch size: 14, lr: 3.70e-04 +2022-05-05 09:21:09,514 INFO [train.py:715] (7/8) Epoch 5, batch 31000, loss[loss=0.1352, simple_loss=0.1925, pruned_loss=0.03895, over 4788.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2227, pruned_loss=0.04053, over 973496.66 frames.], batch size: 12, lr: 3.70e-04 +2022-05-05 09:21:48,975 INFO [train.py:715] (7/8) Epoch 5, batch 31050, loss[loss=0.1529, simple_loss=0.2354, pruned_loss=0.03522, over 4865.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2221, pruned_loss=0.04051, over 973315.92 frames.], batch size: 20, lr: 3.70e-04 +2022-05-05 09:22:27,591 INFO [train.py:715] (7/8) Epoch 5, batch 31100, loss[loss=0.1947, simple_loss=0.265, pruned_loss=0.06224, over 4902.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2225, pruned_loss=0.04067, over 973229.68 frames.], batch size: 17, lr: 3.70e-04 +2022-05-05 09:23:06,677 INFO [train.py:715] (7/8) Epoch 5, batch 31150, loss[loss=0.1542, simple_loss=0.2151, pruned_loss=0.04667, over 4776.00 frames.], tot_loss[loss=0.152, simple_loss=0.2224, pruned_loss=0.04084, over 971379.30 frames.], batch size: 14, lr: 3.70e-04 +2022-05-05 09:23:45,592 INFO [train.py:715] (7/8) Epoch 5, batch 31200, loss[loss=0.1359, simple_loss=0.2031, pruned_loss=0.03433, over 4883.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2229, pruned_loss=0.0411, over 971326.36 frames.], batch size: 16, lr: 3.70e-04 +2022-05-05 09:24:24,057 INFO [train.py:715] (7/8) Epoch 5, batch 31250, loss[loss=0.1628, simple_loss=0.2395, pruned_loss=0.04304, over 4749.00 frames.], tot_loss[loss=0.152, simple_loss=0.2222, pruned_loss=0.04086, over 971880.32 frames.], batch size: 19, lr: 3.70e-04 +2022-05-05 09:25:02,647 INFO [train.py:715] (7/8) Epoch 5, batch 31300, loss[loss=0.1658, simple_loss=0.2326, pruned_loss=0.04948, over 4799.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2223, pruned_loss=0.04064, over 971504.90 frames.], batch size: 24, lr: 3.70e-04 +2022-05-05 09:25:41,536 INFO [train.py:715] (7/8) Epoch 5, batch 31350, loss[loss=0.1403, simple_loss=0.2067, pruned_loss=0.03696, over 4986.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2221, pruned_loss=0.04029, over 972122.76 frames.], batch size: 26, lr: 3.70e-04 +2022-05-05 09:26:20,317 INFO [train.py:715] (7/8) Epoch 5, batch 31400, loss[loss=0.165, simple_loss=0.2326, pruned_loss=0.04871, over 4874.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2216, pruned_loss=0.04006, over 971876.14 frames.], batch size: 22, lr: 3.70e-04 +2022-05-05 09:26:59,039 INFO [train.py:715] (7/8) Epoch 5, batch 31450, loss[loss=0.1338, simple_loss=0.2047, pruned_loss=0.03139, over 4802.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2208, pruned_loss=0.03991, over 972308.76 frames.], batch size: 21, lr: 3.70e-04 +2022-05-05 09:27:37,865 INFO [train.py:715] (7/8) Epoch 5, batch 31500, loss[loss=0.1588, simple_loss=0.2331, pruned_loss=0.04224, over 4893.00 frames.], tot_loss[loss=0.1504, simple_loss=0.221, pruned_loss=0.03985, over 973617.61 frames.], batch size: 17, lr: 3.70e-04 +2022-05-05 09:28:16,785 INFO [train.py:715] (7/8) Epoch 5, batch 31550, loss[loss=0.1654, simple_loss=0.2391, pruned_loss=0.0458, over 4701.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2214, pruned_loss=0.04007, over 972521.64 frames.], batch size: 15, lr: 3.70e-04 +2022-05-05 09:28:55,561 INFO [train.py:715] (7/8) Epoch 5, batch 31600, loss[loss=0.1473, simple_loss=0.2266, pruned_loss=0.03404, over 4823.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2218, pruned_loss=0.04014, over 972576.14 frames.], batch size: 25, lr: 3.70e-04 +2022-05-05 09:29:34,422 INFO [train.py:715] (7/8) Epoch 5, batch 31650, loss[loss=0.1203, simple_loss=0.1911, pruned_loss=0.02476, over 4927.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2208, pruned_loss=0.03938, over 972432.03 frames.], batch size: 29, lr: 3.70e-04 +2022-05-05 09:30:13,323 INFO [train.py:715] (7/8) Epoch 5, batch 31700, loss[loss=0.1265, simple_loss=0.1988, pruned_loss=0.02714, over 4847.00 frames.], tot_loss[loss=0.149, simple_loss=0.2201, pruned_loss=0.03893, over 972149.50 frames.], batch size: 30, lr: 3.70e-04 +2022-05-05 09:30:52,062 INFO [train.py:715] (7/8) Epoch 5, batch 31750, loss[loss=0.1606, simple_loss=0.2293, pruned_loss=0.0459, over 4887.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2206, pruned_loss=0.03944, over 972164.67 frames.], batch size: 16, lr: 3.70e-04 +2022-05-05 09:31:31,172 INFO [train.py:715] (7/8) Epoch 5, batch 31800, loss[loss=0.1459, simple_loss=0.2253, pruned_loss=0.03324, over 4881.00 frames.], tot_loss[loss=0.15, simple_loss=0.221, pruned_loss=0.03956, over 973112.62 frames.], batch size: 22, lr: 3.69e-04 +2022-05-05 09:32:09,903 INFO [train.py:715] (7/8) Epoch 5, batch 31850, loss[loss=0.144, simple_loss=0.2242, pruned_loss=0.03192, over 4795.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2214, pruned_loss=0.03919, over 973594.81 frames.], batch size: 24, lr: 3.69e-04 +2022-05-05 09:32:49,447 INFO [train.py:715] (7/8) Epoch 5, batch 31900, loss[loss=0.1774, simple_loss=0.2421, pruned_loss=0.0564, over 4934.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2207, pruned_loss=0.03922, over 973735.62 frames.], batch size: 39, lr: 3.69e-04 +2022-05-05 09:33:28,130 INFO [train.py:715] (7/8) Epoch 5, batch 31950, loss[loss=0.1615, simple_loss=0.2328, pruned_loss=0.04506, over 4770.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2215, pruned_loss=0.03938, over 973823.24 frames.], batch size: 18, lr: 3.69e-04 +2022-05-05 09:34:06,672 INFO [train.py:715] (7/8) Epoch 5, batch 32000, loss[loss=0.1629, simple_loss=0.2381, pruned_loss=0.04389, over 4960.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2213, pruned_loss=0.03963, over 972239.26 frames.], batch size: 39, lr: 3.69e-04 +2022-05-05 09:34:45,045 INFO [train.py:715] (7/8) Epoch 5, batch 32050, loss[loss=0.1248, simple_loss=0.2067, pruned_loss=0.02145, over 4833.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2218, pruned_loss=0.03996, over 971887.26 frames.], batch size: 26, lr: 3.69e-04 +2022-05-05 09:35:24,094 INFO [train.py:715] (7/8) Epoch 5, batch 32100, loss[loss=0.1603, simple_loss=0.2317, pruned_loss=0.04451, over 4786.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2216, pruned_loss=0.03972, over 972400.35 frames.], batch size: 17, lr: 3.69e-04 +2022-05-05 09:36:02,964 INFO [train.py:715] (7/8) Epoch 5, batch 32150, loss[loss=0.1282, simple_loss=0.2018, pruned_loss=0.02729, over 4801.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2204, pruned_loss=0.03904, over 972196.03 frames.], batch size: 25, lr: 3.69e-04 +2022-05-05 09:36:41,522 INFO [train.py:715] (7/8) Epoch 5, batch 32200, loss[loss=0.1368, simple_loss=0.2053, pruned_loss=0.03417, over 4970.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2205, pruned_loss=0.03895, over 972510.78 frames.], batch size: 14, lr: 3.69e-04 +2022-05-05 09:37:20,074 INFO [train.py:715] (7/8) Epoch 5, batch 32250, loss[loss=0.154, simple_loss=0.2205, pruned_loss=0.04381, over 4696.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2208, pruned_loss=0.03926, over 972064.86 frames.], batch size: 15, lr: 3.69e-04 +2022-05-05 09:37:59,208 INFO [train.py:715] (7/8) Epoch 5, batch 32300, loss[loss=0.1385, simple_loss=0.2111, pruned_loss=0.03294, over 4731.00 frames.], tot_loss[loss=0.15, simple_loss=0.2212, pruned_loss=0.03939, over 971730.92 frames.], batch size: 12, lr: 3.69e-04 +2022-05-05 09:38:37,803 INFO [train.py:715] (7/8) Epoch 5, batch 32350, loss[loss=0.1632, simple_loss=0.2254, pruned_loss=0.05056, over 4980.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2208, pruned_loss=0.03938, over 972150.24 frames.], batch size: 15, lr: 3.69e-04 +2022-05-05 09:39:16,505 INFO [train.py:715] (7/8) Epoch 5, batch 32400, loss[loss=0.1676, simple_loss=0.2134, pruned_loss=0.06092, over 4764.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2211, pruned_loss=0.04003, over 971766.23 frames.], batch size: 12, lr: 3.69e-04 +2022-05-05 09:39:55,116 INFO [train.py:715] (7/8) Epoch 5, batch 32450, loss[loss=0.1374, simple_loss=0.2072, pruned_loss=0.03382, over 4780.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2211, pruned_loss=0.04011, over 972412.73 frames.], batch size: 17, lr: 3.69e-04 +2022-05-05 09:40:33,913 INFO [train.py:715] (7/8) Epoch 5, batch 32500, loss[loss=0.133, simple_loss=0.2014, pruned_loss=0.03226, over 4825.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2211, pruned_loss=0.03981, over 971940.42 frames.], batch size: 15, lr: 3.69e-04 +2022-05-05 09:41:13,466 INFO [train.py:715] (7/8) Epoch 5, batch 32550, loss[loss=0.1547, simple_loss=0.2237, pruned_loss=0.04285, over 4783.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2207, pruned_loss=0.03981, over 972298.26 frames.], batch size: 17, lr: 3.69e-04 +2022-05-05 09:41:51,931 INFO [train.py:715] (7/8) Epoch 5, batch 32600, loss[loss=0.1524, simple_loss=0.2285, pruned_loss=0.03813, over 4695.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2204, pruned_loss=0.03964, over 972938.22 frames.], batch size: 15, lr: 3.69e-04 +2022-05-05 09:42:30,726 INFO [train.py:715] (7/8) Epoch 5, batch 32650, loss[loss=0.1448, simple_loss=0.2243, pruned_loss=0.03265, over 4873.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2211, pruned_loss=0.04004, over 972471.58 frames.], batch size: 20, lr: 3.69e-04 +2022-05-05 09:43:09,271 INFO [train.py:715] (7/8) Epoch 5, batch 32700, loss[loss=0.1402, simple_loss=0.2147, pruned_loss=0.03291, over 4784.00 frames.], tot_loss[loss=0.1507, simple_loss=0.221, pruned_loss=0.04014, over 972416.04 frames.], batch size: 17, lr: 3.69e-04 +2022-05-05 09:43:47,571 INFO [train.py:715] (7/8) Epoch 5, batch 32750, loss[loss=0.1504, simple_loss=0.2198, pruned_loss=0.04053, over 4936.00 frames.], tot_loss[loss=0.151, simple_loss=0.2214, pruned_loss=0.04028, over 971731.74 frames.], batch size: 21, lr: 3.69e-04 +2022-05-05 09:44:26,286 INFO [train.py:715] (7/8) Epoch 5, batch 32800, loss[loss=0.1464, simple_loss=0.2323, pruned_loss=0.03025, over 4889.00 frames.], tot_loss[loss=0.1513, simple_loss=0.222, pruned_loss=0.04027, over 970932.86 frames.], batch size: 19, lr: 3.69e-04 +2022-05-05 09:45:05,106 INFO [train.py:715] (7/8) Epoch 5, batch 32850, loss[loss=0.1469, simple_loss=0.2324, pruned_loss=0.03066, over 4832.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2226, pruned_loss=0.04065, over 970727.40 frames.], batch size: 15, lr: 3.69e-04 +2022-05-05 09:45:44,050 INFO [train.py:715] (7/8) Epoch 5, batch 32900, loss[loss=0.1489, simple_loss=0.2121, pruned_loss=0.04281, over 4971.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2226, pruned_loss=0.04093, over 970691.01 frames.], batch size: 15, lr: 3.69e-04 +2022-05-05 09:46:22,921 INFO [train.py:715] (7/8) Epoch 5, batch 32950, loss[loss=0.151, simple_loss=0.2245, pruned_loss=0.03876, over 4788.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2216, pruned_loss=0.03992, over 970788.58 frames.], batch size: 13, lr: 3.68e-04 +2022-05-05 09:47:01,972 INFO [train.py:715] (7/8) Epoch 5, batch 33000, loss[loss=0.1537, simple_loss=0.2199, pruned_loss=0.04375, over 4988.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2213, pruned_loss=0.0397, over 971726.39 frames.], batch size: 14, lr: 3.68e-04 +2022-05-05 09:47:01,973 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 09:47:11,685 INFO [train.py:742] (7/8) Epoch 5, validation: loss=0.1099, simple_loss=0.1951, pruned_loss=0.01236, over 914524.00 frames. +2022-05-05 09:47:50,704 INFO [train.py:715] (7/8) Epoch 5, batch 33050, loss[loss=0.1387, simple_loss=0.2244, pruned_loss=0.02653, over 4948.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2216, pruned_loss=0.03959, over 972081.58 frames.], batch size: 21, lr: 3.68e-04 +2022-05-05 09:48:29,615 INFO [train.py:715] (7/8) Epoch 5, batch 33100, loss[loss=0.1657, simple_loss=0.2213, pruned_loss=0.055, over 4788.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2216, pruned_loss=0.0399, over 972460.94 frames.], batch size: 12, lr: 3.68e-04 +2022-05-05 09:49:07,621 INFO [train.py:715] (7/8) Epoch 5, batch 33150, loss[loss=0.1127, simple_loss=0.1816, pruned_loss=0.0219, over 4977.00 frames.], tot_loss[loss=0.15, simple_loss=0.2208, pruned_loss=0.03959, over 972499.84 frames.], batch size: 25, lr: 3.68e-04 +2022-05-05 09:49:46,216 INFO [train.py:715] (7/8) Epoch 5, batch 33200, loss[loss=0.1435, simple_loss=0.2092, pruned_loss=0.03889, over 4646.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2212, pruned_loss=0.03958, over 972129.07 frames.], batch size: 13, lr: 3.68e-04 +2022-05-05 09:50:25,079 INFO [train.py:715] (7/8) Epoch 5, batch 33250, loss[loss=0.1602, simple_loss=0.2327, pruned_loss=0.0439, over 4992.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2221, pruned_loss=0.04016, over 972207.60 frames.], batch size: 33, lr: 3.68e-04 +2022-05-05 09:51:03,569 INFO [train.py:715] (7/8) Epoch 5, batch 33300, loss[loss=0.1542, simple_loss=0.2185, pruned_loss=0.04496, over 4717.00 frames.], tot_loss[loss=0.1513, simple_loss=0.222, pruned_loss=0.04029, over 972100.48 frames.], batch size: 15, lr: 3.68e-04 +2022-05-05 09:51:41,937 INFO [train.py:715] (7/8) Epoch 5, batch 33350, loss[loss=0.1457, simple_loss=0.2136, pruned_loss=0.03885, over 4746.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2217, pruned_loss=0.04057, over 971921.63 frames.], batch size: 16, lr: 3.68e-04 +2022-05-05 09:52:21,209 INFO [train.py:715] (7/8) Epoch 5, batch 33400, loss[loss=0.1313, simple_loss=0.1932, pruned_loss=0.0347, over 4770.00 frames.], tot_loss[loss=0.152, simple_loss=0.2224, pruned_loss=0.04083, over 972038.41 frames.], batch size: 12, lr: 3.68e-04 +2022-05-05 09:52:59,900 INFO [train.py:715] (7/8) Epoch 5, batch 33450, loss[loss=0.1441, simple_loss=0.2163, pruned_loss=0.03589, over 4979.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2231, pruned_loss=0.04113, over 972159.42 frames.], batch size: 15, lr: 3.68e-04 +2022-05-05 09:53:38,251 INFO [train.py:715] (7/8) Epoch 5, batch 33500, loss[loss=0.1697, simple_loss=0.2288, pruned_loss=0.05528, over 4873.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2226, pruned_loss=0.04096, over 971437.80 frames.], batch size: 32, lr: 3.68e-04 +2022-05-05 09:54:16,985 INFO [train.py:715] (7/8) Epoch 5, batch 33550, loss[loss=0.1084, simple_loss=0.1895, pruned_loss=0.01366, over 4942.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2219, pruned_loss=0.04044, over 971514.64 frames.], batch size: 29, lr: 3.68e-04 +2022-05-05 09:54:55,688 INFO [train.py:715] (7/8) Epoch 5, batch 33600, loss[loss=0.1342, simple_loss=0.2149, pruned_loss=0.02676, over 4794.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2227, pruned_loss=0.04084, over 972020.91 frames.], batch size: 24, lr: 3.68e-04 +2022-05-05 09:55:34,354 INFO [train.py:715] (7/8) Epoch 5, batch 33650, loss[loss=0.1582, simple_loss=0.2241, pruned_loss=0.0462, over 4986.00 frames.], tot_loss[loss=0.152, simple_loss=0.2227, pruned_loss=0.04065, over 971900.93 frames.], batch size: 33, lr: 3.68e-04 +2022-05-05 09:56:12,631 INFO [train.py:715] (7/8) Epoch 5, batch 33700, loss[loss=0.1572, simple_loss=0.2257, pruned_loss=0.04439, over 4986.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2219, pruned_loss=0.04052, over 972238.50 frames.], batch size: 14, lr: 3.68e-04 +2022-05-05 09:56:51,511 INFO [train.py:715] (7/8) Epoch 5, batch 33750, loss[loss=0.1164, simple_loss=0.1868, pruned_loss=0.02305, over 4863.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2209, pruned_loss=0.03986, over 971600.98 frames.], batch size: 16, lr: 3.68e-04 +2022-05-05 09:57:30,156 INFO [train.py:715] (7/8) Epoch 5, batch 33800, loss[loss=0.1942, simple_loss=0.261, pruned_loss=0.06375, over 4987.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2214, pruned_loss=0.04023, over 972223.79 frames.], batch size: 25, lr: 3.68e-04 +2022-05-05 09:58:09,140 INFO [train.py:715] (7/8) Epoch 5, batch 33850, loss[loss=0.1336, simple_loss=0.2009, pruned_loss=0.03318, over 4697.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2205, pruned_loss=0.04002, over 971254.59 frames.], batch size: 15, lr: 3.68e-04 +2022-05-05 09:58:47,624 INFO [train.py:715] (7/8) Epoch 5, batch 33900, loss[loss=0.1272, simple_loss=0.207, pruned_loss=0.02368, over 4892.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2201, pruned_loss=0.0393, over 971307.56 frames.], batch size: 19, lr: 3.68e-04 +2022-05-05 09:59:25,960 INFO [train.py:715] (7/8) Epoch 5, batch 33950, loss[loss=0.1996, simple_loss=0.2674, pruned_loss=0.06592, over 4952.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2213, pruned_loss=0.03923, over 972232.66 frames.], batch size: 15, lr: 3.68e-04 +2022-05-05 10:00:06,988 INFO [train.py:715] (7/8) Epoch 5, batch 34000, loss[loss=0.1142, simple_loss=0.1927, pruned_loss=0.01785, over 4874.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2208, pruned_loss=0.03893, over 971714.90 frames.], batch size: 22, lr: 3.68e-04 +2022-05-05 10:00:45,229 INFO [train.py:715] (7/8) Epoch 5, batch 34050, loss[loss=0.1423, simple_loss=0.2038, pruned_loss=0.04045, over 4817.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2209, pruned_loss=0.03894, over 971389.16 frames.], batch size: 15, lr: 3.67e-04 +2022-05-05 10:01:23,920 INFO [train.py:715] (7/8) Epoch 5, batch 34100, loss[loss=0.1695, simple_loss=0.2442, pruned_loss=0.04739, over 4906.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2212, pruned_loss=0.03924, over 971113.80 frames.], batch size: 39, lr: 3.67e-04 +2022-05-05 10:02:02,745 INFO [train.py:715] (7/8) Epoch 5, batch 34150, loss[loss=0.1535, simple_loss=0.226, pruned_loss=0.04047, over 4985.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2214, pruned_loss=0.03966, over 971099.22 frames.], batch size: 14, lr: 3.67e-04 +2022-05-05 10:02:41,106 INFO [train.py:715] (7/8) Epoch 5, batch 34200, loss[loss=0.1633, simple_loss=0.2304, pruned_loss=0.04813, over 4840.00 frames.], tot_loss[loss=0.15, simple_loss=0.2212, pruned_loss=0.0394, over 971883.48 frames.], batch size: 30, lr: 3.67e-04 +2022-05-05 10:03:20,095 INFO [train.py:715] (7/8) Epoch 5, batch 34250, loss[loss=0.1537, simple_loss=0.2228, pruned_loss=0.04232, over 4957.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2208, pruned_loss=0.03984, over 971787.59 frames.], batch size: 39, lr: 3.67e-04 +2022-05-05 10:03:58,246 INFO [train.py:715] (7/8) Epoch 5, batch 34300, loss[loss=0.1139, simple_loss=0.1852, pruned_loss=0.02133, over 4816.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2208, pruned_loss=0.03971, over 971125.51 frames.], batch size: 26, lr: 3.67e-04 +2022-05-05 10:04:36,908 INFO [train.py:715] (7/8) Epoch 5, batch 34350, loss[loss=0.1454, simple_loss=0.2177, pruned_loss=0.03657, over 4916.00 frames.], tot_loss[loss=0.151, simple_loss=0.2214, pruned_loss=0.04031, over 971951.77 frames.], batch size: 18, lr: 3.67e-04 +2022-05-05 10:05:14,794 INFO [train.py:715] (7/8) Epoch 5, batch 34400, loss[loss=0.1392, simple_loss=0.2178, pruned_loss=0.03026, over 4752.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2213, pruned_loss=0.04007, over 971383.97 frames.], batch size: 16, lr: 3.67e-04 +2022-05-05 10:05:53,762 INFO [train.py:715] (7/8) Epoch 5, batch 34450, loss[loss=0.1754, simple_loss=0.2532, pruned_loss=0.04877, over 4793.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2222, pruned_loss=0.04041, over 971743.96 frames.], batch size: 24, lr: 3.67e-04 +2022-05-05 10:06:32,733 INFO [train.py:715] (7/8) Epoch 5, batch 34500, loss[loss=0.1178, simple_loss=0.1867, pruned_loss=0.02447, over 4872.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2226, pruned_loss=0.04043, over 971879.36 frames.], batch size: 16, lr: 3.67e-04 +2022-05-05 10:07:11,201 INFO [train.py:715] (7/8) Epoch 5, batch 34550, loss[loss=0.1311, simple_loss=0.208, pruned_loss=0.0271, over 4976.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2225, pruned_loss=0.04063, over 971731.39 frames.], batch size: 25, lr: 3.67e-04 +2022-05-05 10:07:49,949 INFO [train.py:715] (7/8) Epoch 5, batch 34600, loss[loss=0.2097, simple_loss=0.2599, pruned_loss=0.07977, over 4782.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2228, pruned_loss=0.04108, over 971674.43 frames.], batch size: 14, lr: 3.67e-04 +2022-05-05 10:08:28,653 INFO [train.py:715] (7/8) Epoch 5, batch 34650, loss[loss=0.1406, simple_loss=0.2203, pruned_loss=0.0305, over 4748.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2233, pruned_loss=0.04126, over 971506.73 frames.], batch size: 16, lr: 3.67e-04 +2022-05-05 10:09:07,579 INFO [train.py:715] (7/8) Epoch 5, batch 34700, loss[loss=0.1534, simple_loss=0.2359, pruned_loss=0.03546, over 4924.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2231, pruned_loss=0.04083, over 972056.00 frames.], batch size: 19, lr: 3.67e-04 +2022-05-05 10:09:44,908 INFO [train.py:715] (7/8) Epoch 5, batch 34750, loss[loss=0.161, simple_loss=0.231, pruned_loss=0.0455, over 4794.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2232, pruned_loss=0.04099, over 971445.12 frames.], batch size: 21, lr: 3.67e-04 +2022-05-05 10:10:21,599 INFO [train.py:715] (7/8) Epoch 5, batch 34800, loss[loss=0.1611, simple_loss=0.2227, pruned_loss=0.04976, over 4779.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2218, pruned_loss=0.04023, over 969234.54 frames.], batch size: 14, lr: 3.67e-04 +2022-05-05 10:11:11,227 INFO [train.py:715] (7/8) Epoch 6, batch 0, loss[loss=0.1602, simple_loss=0.2341, pruned_loss=0.04322, over 4955.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2341, pruned_loss=0.04322, over 4955.00 frames.], batch size: 35, lr: 3.46e-04 +2022-05-05 10:11:50,191 INFO [train.py:715] (7/8) Epoch 6, batch 50, loss[loss=0.1421, simple_loss=0.2215, pruned_loss=0.0313, over 4804.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2224, pruned_loss=0.04129, over 219309.51 frames.], batch size: 25, lr: 3.46e-04 +2022-05-05 10:12:29,113 INFO [train.py:715] (7/8) Epoch 6, batch 100, loss[loss=0.1421, simple_loss=0.2191, pruned_loss=0.03258, over 4870.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2215, pruned_loss=0.04116, over 387038.25 frames.], batch size: 16, lr: 3.46e-04 +2022-05-05 10:13:08,353 INFO [train.py:715] (7/8) Epoch 6, batch 150, loss[loss=0.1394, simple_loss=0.2173, pruned_loss=0.03071, over 4888.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2204, pruned_loss=0.04013, over 517180.02 frames.], batch size: 22, lr: 3.46e-04 +2022-05-05 10:13:47,634 INFO [train.py:715] (7/8) Epoch 6, batch 200, loss[loss=0.1287, simple_loss=0.2009, pruned_loss=0.0282, over 4829.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2205, pruned_loss=0.03992, over 618241.60 frames.], batch size: 13, lr: 3.45e-04 +2022-05-05 10:14:26,642 INFO [train.py:715] (7/8) Epoch 6, batch 250, loss[loss=0.1449, simple_loss=0.2132, pruned_loss=0.03831, over 4857.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2208, pruned_loss=0.04029, over 696849.68 frames.], batch size: 20, lr: 3.45e-04 +2022-05-05 10:15:05,460 INFO [train.py:715] (7/8) Epoch 6, batch 300, loss[loss=0.1307, simple_loss=0.2021, pruned_loss=0.02968, over 4903.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2218, pruned_loss=0.0408, over 758649.62 frames.], batch size: 19, lr: 3.45e-04 +2022-05-05 10:15:44,447 INFO [train.py:715] (7/8) Epoch 6, batch 350, loss[loss=0.1743, simple_loss=0.2428, pruned_loss=0.05294, over 4862.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2207, pruned_loss=0.04022, over 806581.86 frames.], batch size: 32, lr: 3.45e-04 +2022-05-05 10:16:23,656 INFO [train.py:715] (7/8) Epoch 6, batch 400, loss[loss=0.1506, simple_loss=0.2384, pruned_loss=0.03137, over 4818.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2217, pruned_loss=0.04006, over 843130.45 frames.], batch size: 25, lr: 3.45e-04 +2022-05-05 10:17:02,412 INFO [train.py:715] (7/8) Epoch 6, batch 450, loss[loss=0.1448, simple_loss=0.2163, pruned_loss=0.03665, over 4829.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2225, pruned_loss=0.04043, over 871482.07 frames.], batch size: 26, lr: 3.45e-04 +2022-05-05 10:17:41,008 INFO [train.py:715] (7/8) Epoch 6, batch 500, loss[loss=0.1453, simple_loss=0.2111, pruned_loss=0.03979, over 4912.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2228, pruned_loss=0.04067, over 893961.79 frames.], batch size: 19, lr: 3.45e-04 +2022-05-05 10:18:20,500 INFO [train.py:715] (7/8) Epoch 6, batch 550, loss[loss=0.1318, simple_loss=0.2047, pruned_loss=0.02942, over 4905.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2215, pruned_loss=0.03994, over 911098.21 frames.], batch size: 17, lr: 3.45e-04 +2022-05-05 10:18:59,384 INFO [train.py:715] (7/8) Epoch 6, batch 600, loss[loss=0.1422, simple_loss=0.2251, pruned_loss=0.0296, over 4817.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2218, pruned_loss=0.0399, over 924313.41 frames.], batch size: 27, lr: 3.45e-04 +2022-05-05 10:19:38,403 INFO [train.py:715] (7/8) Epoch 6, batch 650, loss[loss=0.1791, simple_loss=0.2637, pruned_loss=0.04729, over 4790.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2218, pruned_loss=0.03987, over 934649.23 frames.], batch size: 21, lr: 3.45e-04 +2022-05-05 10:20:17,489 INFO [train.py:715] (7/8) Epoch 6, batch 700, loss[loss=0.151, simple_loss=0.2175, pruned_loss=0.04224, over 4985.00 frames.], tot_loss[loss=0.151, simple_loss=0.2218, pruned_loss=0.04008, over 943109.24 frames.], batch size: 15, lr: 3.45e-04 +2022-05-05 10:20:57,108 INFO [train.py:715] (7/8) Epoch 6, batch 750, loss[loss=0.1902, simple_loss=0.258, pruned_loss=0.06121, over 4927.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2219, pruned_loss=0.04044, over 948931.43 frames.], batch size: 39, lr: 3.45e-04 +2022-05-05 10:21:35,859 INFO [train.py:715] (7/8) Epoch 6, batch 800, loss[loss=0.1411, simple_loss=0.2276, pruned_loss=0.02728, over 4805.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2215, pruned_loss=0.03999, over 954004.03 frames.], batch size: 21, lr: 3.45e-04 +2022-05-05 10:22:14,570 INFO [train.py:715] (7/8) Epoch 6, batch 850, loss[loss=0.1449, simple_loss=0.2239, pruned_loss=0.03301, over 4920.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2212, pruned_loss=0.03979, over 957794.40 frames.], batch size: 18, lr: 3.45e-04 +2022-05-05 10:22:54,104 INFO [train.py:715] (7/8) Epoch 6, batch 900, loss[loss=0.1202, simple_loss=0.1853, pruned_loss=0.02754, over 4756.00 frames.], tot_loss[loss=0.1495, simple_loss=0.22, pruned_loss=0.03946, over 960678.31 frames.], batch size: 12, lr: 3.45e-04 +2022-05-05 10:23:33,398 INFO [train.py:715] (7/8) Epoch 6, batch 950, loss[loss=0.1681, simple_loss=0.2416, pruned_loss=0.04731, over 4978.00 frames.], tot_loss[loss=0.15, simple_loss=0.2208, pruned_loss=0.03965, over 962884.58 frames.], batch size: 26, lr: 3.45e-04 +2022-05-05 10:24:12,121 INFO [train.py:715] (7/8) Epoch 6, batch 1000, loss[loss=0.1533, simple_loss=0.2269, pruned_loss=0.03984, over 4707.00 frames.], tot_loss[loss=0.151, simple_loss=0.2209, pruned_loss=0.04051, over 964758.14 frames.], batch size: 15, lr: 3.45e-04 +2022-05-05 10:24:51,184 INFO [train.py:715] (7/8) Epoch 6, batch 1050, loss[loss=0.1439, simple_loss=0.214, pruned_loss=0.03688, over 4850.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2206, pruned_loss=0.04016, over 966205.07 frames.], batch size: 15, lr: 3.45e-04 +2022-05-05 10:25:30,703 INFO [train.py:715] (7/8) Epoch 6, batch 1100, loss[loss=0.1605, simple_loss=0.2354, pruned_loss=0.04282, over 4848.00 frames.], tot_loss[loss=0.15, simple_loss=0.2204, pruned_loss=0.03981, over 967639.91 frames.], batch size: 20, lr: 3.45e-04 +2022-05-05 10:26:09,925 INFO [train.py:715] (7/8) Epoch 6, batch 1150, loss[loss=0.1547, simple_loss=0.2253, pruned_loss=0.042, over 4944.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2198, pruned_loss=0.03962, over 968451.19 frames.], batch size: 23, lr: 3.45e-04 +2022-05-05 10:26:48,495 INFO [train.py:715] (7/8) Epoch 6, batch 1200, loss[loss=0.1235, simple_loss=0.1951, pruned_loss=0.02591, over 4784.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2201, pruned_loss=0.03957, over 969623.16 frames.], batch size: 12, lr: 3.45e-04 +2022-05-05 10:27:28,194 INFO [train.py:715] (7/8) Epoch 6, batch 1250, loss[loss=0.1512, simple_loss=0.2198, pruned_loss=0.0413, over 4850.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2202, pruned_loss=0.03968, over 970698.23 frames.], batch size: 32, lr: 3.45e-04 +2022-05-05 10:28:07,471 INFO [train.py:715] (7/8) Epoch 6, batch 1300, loss[loss=0.1378, simple_loss=0.2115, pruned_loss=0.03206, over 4939.00 frames.], tot_loss[loss=0.15, simple_loss=0.2205, pruned_loss=0.03978, over 970817.39 frames.], batch size: 18, lr: 3.45e-04 +2022-05-05 10:28:46,064 INFO [train.py:715] (7/8) Epoch 6, batch 1350, loss[loss=0.1459, simple_loss=0.2204, pruned_loss=0.03569, over 4873.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2203, pruned_loss=0.03958, over 971449.94 frames.], batch size: 20, lr: 3.45e-04 +2022-05-05 10:29:24,992 INFO [train.py:715] (7/8) Epoch 6, batch 1400, loss[loss=0.138, simple_loss=0.2115, pruned_loss=0.03227, over 4769.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2191, pruned_loss=0.03919, over 970914.66 frames.], batch size: 12, lr: 3.45e-04 +2022-05-05 10:30:04,139 INFO [train.py:715] (7/8) Epoch 6, batch 1450, loss[loss=0.1393, simple_loss=0.2099, pruned_loss=0.03435, over 4831.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2191, pruned_loss=0.03897, over 970526.67 frames.], batch size: 15, lr: 3.44e-04 +2022-05-05 10:30:42,812 INFO [train.py:715] (7/8) Epoch 6, batch 1500, loss[loss=0.1389, simple_loss=0.2068, pruned_loss=0.03549, over 4833.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2188, pruned_loss=0.03845, over 970823.94 frames.], batch size: 30, lr: 3.44e-04 +2022-05-05 10:31:21,216 INFO [train.py:715] (7/8) Epoch 6, batch 1550, loss[loss=0.1534, simple_loss=0.231, pruned_loss=0.03791, over 4818.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2203, pruned_loss=0.03939, over 972410.00 frames.], batch size: 26, lr: 3.44e-04 +2022-05-05 10:32:00,472 INFO [train.py:715] (7/8) Epoch 6, batch 1600, loss[loss=0.2138, simple_loss=0.2858, pruned_loss=0.07095, over 4884.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2203, pruned_loss=0.03925, over 972780.91 frames.], batch size: 16, lr: 3.44e-04 +2022-05-05 10:32:40,007 INFO [train.py:715] (7/8) Epoch 6, batch 1650, loss[loss=0.1455, simple_loss=0.2124, pruned_loss=0.03924, over 4985.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2204, pruned_loss=0.03897, over 972379.09 frames.], batch size: 35, lr: 3.44e-04 +2022-05-05 10:33:18,412 INFO [train.py:715] (7/8) Epoch 6, batch 1700, loss[loss=0.1425, simple_loss=0.2159, pruned_loss=0.03452, over 4863.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2201, pruned_loss=0.03907, over 973244.29 frames.], batch size: 32, lr: 3.44e-04 +2022-05-05 10:33:57,728 INFO [train.py:715] (7/8) Epoch 6, batch 1750, loss[loss=0.1381, simple_loss=0.2124, pruned_loss=0.03194, over 4892.00 frames.], tot_loss[loss=0.1501, simple_loss=0.221, pruned_loss=0.03965, over 972782.08 frames.], batch size: 22, lr: 3.44e-04 +2022-05-05 10:34:37,329 INFO [train.py:715] (7/8) Epoch 6, batch 1800, loss[loss=0.1605, simple_loss=0.2211, pruned_loss=0.04999, over 4806.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2199, pruned_loss=0.03938, over 973282.24 frames.], batch size: 12, lr: 3.44e-04 +2022-05-05 10:35:16,402 INFO [train.py:715] (7/8) Epoch 6, batch 1850, loss[loss=0.163, simple_loss=0.2263, pruned_loss=0.04988, over 4980.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2211, pruned_loss=0.04018, over 972641.45 frames.], batch size: 33, lr: 3.44e-04 +2022-05-05 10:35:54,729 INFO [train.py:715] (7/8) Epoch 6, batch 1900, loss[loss=0.1339, simple_loss=0.2069, pruned_loss=0.03045, over 4872.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2202, pruned_loss=0.03952, over 973175.84 frames.], batch size: 22, lr: 3.44e-04 +2022-05-05 10:36:34,277 INFO [train.py:715] (7/8) Epoch 6, batch 1950, loss[loss=0.1477, simple_loss=0.2267, pruned_loss=0.03434, over 4859.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2197, pruned_loss=0.03939, over 972973.80 frames.], batch size: 20, lr: 3.44e-04 +2022-05-05 10:37:13,033 INFO [train.py:715] (7/8) Epoch 6, batch 2000, loss[loss=0.1412, simple_loss=0.2178, pruned_loss=0.0323, over 4812.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2198, pruned_loss=0.03932, over 972748.81 frames.], batch size: 14, lr: 3.44e-04 +2022-05-05 10:37:52,082 INFO [train.py:715] (7/8) Epoch 6, batch 2050, loss[loss=0.1387, simple_loss=0.2146, pruned_loss=0.03144, over 4965.00 frames.], tot_loss[loss=0.1495, simple_loss=0.22, pruned_loss=0.03944, over 973309.72 frames.], batch size: 24, lr: 3.44e-04 +2022-05-05 10:38:30,927 INFO [train.py:715] (7/8) Epoch 6, batch 2100, loss[loss=0.1379, simple_loss=0.2185, pruned_loss=0.02862, over 4980.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2205, pruned_loss=0.03921, over 973287.56 frames.], batch size: 25, lr: 3.44e-04 +2022-05-05 10:39:10,115 INFO [train.py:715] (7/8) Epoch 6, batch 2150, loss[loss=0.1813, simple_loss=0.2546, pruned_loss=0.05398, over 4740.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2207, pruned_loss=0.03919, over 972370.31 frames.], batch size: 16, lr: 3.44e-04 +2022-05-05 10:39:49,072 INFO [train.py:715] (7/8) Epoch 6, batch 2200, loss[loss=0.1109, simple_loss=0.1926, pruned_loss=0.01458, over 4929.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2208, pruned_loss=0.03911, over 972073.69 frames.], batch size: 29, lr: 3.44e-04 +2022-05-05 10:40:27,527 INFO [train.py:715] (7/8) Epoch 6, batch 2250, loss[loss=0.1713, simple_loss=0.2452, pruned_loss=0.04869, over 4945.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2212, pruned_loss=0.03946, over 972224.29 frames.], batch size: 21, lr: 3.44e-04 +2022-05-05 10:41:06,875 INFO [train.py:715] (7/8) Epoch 6, batch 2300, loss[loss=0.1753, simple_loss=0.2404, pruned_loss=0.05512, over 4698.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2207, pruned_loss=0.03923, over 971453.45 frames.], batch size: 15, lr: 3.44e-04 +2022-05-05 10:41:45,979 INFO [train.py:715] (7/8) Epoch 6, batch 2350, loss[loss=0.1428, simple_loss=0.2139, pruned_loss=0.03584, over 4823.00 frames.], tot_loss[loss=0.1504, simple_loss=0.221, pruned_loss=0.03987, over 971651.55 frames.], batch size: 27, lr: 3.44e-04 +2022-05-05 10:42:24,703 INFO [train.py:715] (7/8) Epoch 6, batch 2400, loss[loss=0.1678, simple_loss=0.2322, pruned_loss=0.0517, over 4790.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2212, pruned_loss=0.03988, over 972361.65 frames.], batch size: 18, lr: 3.44e-04 +2022-05-05 10:43:03,443 INFO [train.py:715] (7/8) Epoch 6, batch 2450, loss[loss=0.1498, simple_loss=0.2175, pruned_loss=0.0411, over 4753.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2204, pruned_loss=0.03961, over 972574.93 frames.], batch size: 19, lr: 3.44e-04 +2022-05-05 10:43:42,678 INFO [train.py:715] (7/8) Epoch 6, batch 2500, loss[loss=0.1818, simple_loss=0.2408, pruned_loss=0.06133, over 4979.00 frames.], tot_loss[loss=0.1494, simple_loss=0.22, pruned_loss=0.03937, over 971830.48 frames.], batch size: 39, lr: 3.44e-04 +2022-05-05 10:44:21,858 INFO [train.py:715] (7/8) Epoch 6, batch 2550, loss[loss=0.1583, simple_loss=0.2269, pruned_loss=0.04488, over 4760.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2189, pruned_loss=0.03897, over 971991.89 frames.], batch size: 19, lr: 3.44e-04 +2022-05-05 10:45:00,767 INFO [train.py:715] (7/8) Epoch 6, batch 2600, loss[loss=0.1441, simple_loss=0.2263, pruned_loss=0.0309, over 4924.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2196, pruned_loss=0.03961, over 972611.78 frames.], batch size: 18, lr: 3.44e-04 +2022-05-05 10:45:40,392 INFO [train.py:715] (7/8) Epoch 6, batch 2650, loss[loss=0.1581, simple_loss=0.2393, pruned_loss=0.03846, over 4740.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2186, pruned_loss=0.03826, over 972254.02 frames.], batch size: 16, lr: 3.43e-04 +2022-05-05 10:46:19,982 INFO [train.py:715] (7/8) Epoch 6, batch 2700, loss[loss=0.1613, simple_loss=0.2283, pruned_loss=0.04711, over 4863.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2193, pruned_loss=0.03849, over 971042.11 frames.], batch size: 38, lr: 3.43e-04 +2022-05-05 10:46:58,102 INFO [train.py:715] (7/8) Epoch 6, batch 2750, loss[loss=0.1516, simple_loss=0.2125, pruned_loss=0.04533, over 4770.00 frames.], tot_loss[loss=0.149, simple_loss=0.2201, pruned_loss=0.03892, over 970660.78 frames.], batch size: 19, lr: 3.43e-04 +2022-05-05 10:47:37,114 INFO [train.py:715] (7/8) Epoch 6, batch 2800, loss[loss=0.1469, simple_loss=0.2135, pruned_loss=0.04021, over 4930.00 frames.], tot_loss[loss=0.149, simple_loss=0.2199, pruned_loss=0.03908, over 971356.43 frames.], batch size: 29, lr: 3.43e-04 +2022-05-05 10:48:16,474 INFO [train.py:715] (7/8) Epoch 6, batch 2850, loss[loss=0.1521, simple_loss=0.2281, pruned_loss=0.038, over 4924.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2205, pruned_loss=0.03985, over 971325.51 frames.], batch size: 29, lr: 3.43e-04 +2022-05-05 10:48:55,297 INFO [train.py:715] (7/8) Epoch 6, batch 2900, loss[loss=0.1516, simple_loss=0.2288, pruned_loss=0.03724, over 4959.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2204, pruned_loss=0.03967, over 971059.53 frames.], batch size: 35, lr: 3.43e-04 +2022-05-05 10:49:33,638 INFO [train.py:715] (7/8) Epoch 6, batch 2950, loss[loss=0.1416, simple_loss=0.2184, pruned_loss=0.03239, over 4894.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2205, pruned_loss=0.03942, over 970747.53 frames.], batch size: 39, lr: 3.43e-04 +2022-05-05 10:50:12,865 INFO [train.py:715] (7/8) Epoch 6, batch 3000, loss[loss=0.1567, simple_loss=0.2268, pruned_loss=0.04334, over 4974.00 frames.], tot_loss[loss=0.15, simple_loss=0.221, pruned_loss=0.0395, over 970747.19 frames.], batch size: 24, lr: 3.43e-04 +2022-05-05 10:50:12,866 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 10:50:22,539 INFO [train.py:742] (7/8) Epoch 6, validation: loss=0.1095, simple_loss=0.1945, pruned_loss=0.01223, over 914524.00 frames. +2022-05-05 10:51:02,173 INFO [train.py:715] (7/8) Epoch 6, batch 3050, loss[loss=0.1527, simple_loss=0.2257, pruned_loss=0.03987, over 4952.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2219, pruned_loss=0.03999, over 969970.07 frames.], batch size: 21, lr: 3.43e-04 +2022-05-05 10:51:41,565 INFO [train.py:715] (7/8) Epoch 6, batch 3100, loss[loss=0.1952, simple_loss=0.2543, pruned_loss=0.06809, over 4900.00 frames.], tot_loss[loss=0.1512, simple_loss=0.222, pruned_loss=0.04014, over 971024.56 frames.], batch size: 39, lr: 3.43e-04 +2022-05-05 10:52:20,131 INFO [train.py:715] (7/8) Epoch 6, batch 3150, loss[loss=0.1574, simple_loss=0.2267, pruned_loss=0.04405, over 4791.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2225, pruned_loss=0.04039, over 971638.39 frames.], batch size: 13, lr: 3.43e-04 +2022-05-05 10:52:58,782 INFO [train.py:715] (7/8) Epoch 6, batch 3200, loss[loss=0.1581, simple_loss=0.2289, pruned_loss=0.04364, over 4740.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2223, pruned_loss=0.04001, over 971912.03 frames.], batch size: 16, lr: 3.43e-04 +2022-05-05 10:53:38,609 INFO [train.py:715] (7/8) Epoch 6, batch 3250, loss[loss=0.128, simple_loss=0.2001, pruned_loss=0.02789, over 4850.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2219, pruned_loss=0.04001, over 973111.10 frames.], batch size: 13, lr: 3.43e-04 +2022-05-05 10:54:17,333 INFO [train.py:715] (7/8) Epoch 6, batch 3300, loss[loss=0.145, simple_loss=0.2174, pruned_loss=0.03633, over 4839.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2228, pruned_loss=0.04003, over 973137.72 frames.], batch size: 32, lr: 3.43e-04 +2022-05-05 10:54:55,861 INFO [train.py:715] (7/8) Epoch 6, batch 3350, loss[loss=0.1525, simple_loss=0.2349, pruned_loss=0.03509, over 4861.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2227, pruned_loss=0.03998, over 973067.50 frames.], batch size: 20, lr: 3.43e-04 +2022-05-05 10:55:35,258 INFO [train.py:715] (7/8) Epoch 6, batch 3400, loss[loss=0.1521, simple_loss=0.2326, pruned_loss=0.03581, over 4933.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2227, pruned_loss=0.0402, over 973328.15 frames.], batch size: 21, lr: 3.43e-04 +2022-05-05 10:56:14,431 INFO [train.py:715] (7/8) Epoch 6, batch 3450, loss[loss=0.1561, simple_loss=0.2212, pruned_loss=0.04548, over 4852.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2216, pruned_loss=0.03996, over 972955.68 frames.], batch size: 32, lr: 3.43e-04 +2022-05-05 10:56:52,541 INFO [train.py:715] (7/8) Epoch 6, batch 3500, loss[loss=0.1132, simple_loss=0.1875, pruned_loss=0.01947, over 4817.00 frames.], tot_loss[loss=0.151, simple_loss=0.2218, pruned_loss=0.04016, over 972676.10 frames.], batch size: 13, lr: 3.43e-04 +2022-05-05 10:57:31,368 INFO [train.py:715] (7/8) Epoch 6, batch 3550, loss[loss=0.1521, simple_loss=0.2228, pruned_loss=0.04068, over 4820.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2216, pruned_loss=0.03978, over 972355.74 frames.], batch size: 26, lr: 3.43e-04 +2022-05-05 10:58:10,832 INFO [train.py:715] (7/8) Epoch 6, batch 3600, loss[loss=0.1499, simple_loss=0.2196, pruned_loss=0.0401, over 4811.00 frames.], tot_loss[loss=0.15, simple_loss=0.2213, pruned_loss=0.03938, over 971666.84 frames.], batch size: 13, lr: 3.43e-04 +2022-05-05 10:58:49,771 INFO [train.py:715] (7/8) Epoch 6, batch 3650, loss[loss=0.1494, simple_loss=0.2272, pruned_loss=0.03575, over 4829.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2216, pruned_loss=0.03978, over 971079.29 frames.], batch size: 27, lr: 3.43e-04 +2022-05-05 10:59:27,965 INFO [train.py:715] (7/8) Epoch 6, batch 3700, loss[loss=0.1416, simple_loss=0.215, pruned_loss=0.03411, over 4862.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2213, pruned_loss=0.03968, over 971273.39 frames.], batch size: 22, lr: 3.43e-04 +2022-05-05 11:00:07,249 INFO [train.py:715] (7/8) Epoch 6, batch 3750, loss[loss=0.134, simple_loss=0.1961, pruned_loss=0.0359, over 4916.00 frames.], tot_loss[loss=0.1501, simple_loss=0.221, pruned_loss=0.03963, over 972165.45 frames.], batch size: 23, lr: 3.43e-04 +2022-05-05 11:00:46,317 INFO [train.py:715] (7/8) Epoch 6, batch 3800, loss[loss=0.1382, simple_loss=0.2109, pruned_loss=0.03272, over 4935.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2203, pruned_loss=0.03909, over 972287.55 frames.], batch size: 23, lr: 3.43e-04 +2022-05-05 11:01:24,439 INFO [train.py:715] (7/8) Epoch 6, batch 3850, loss[loss=0.1518, simple_loss=0.2278, pruned_loss=0.03786, over 4884.00 frames.], tot_loss[loss=0.15, simple_loss=0.221, pruned_loss=0.03952, over 973152.64 frames.], batch size: 16, lr: 3.43e-04 +2022-05-05 11:02:03,351 INFO [train.py:715] (7/8) Epoch 6, batch 3900, loss[loss=0.1344, simple_loss=0.2059, pruned_loss=0.0314, over 4832.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2208, pruned_loss=0.03928, over 971993.34 frames.], batch size: 26, lr: 3.42e-04 +2022-05-05 11:02:42,645 INFO [train.py:715] (7/8) Epoch 6, batch 3950, loss[loss=0.1437, simple_loss=0.2144, pruned_loss=0.03648, over 4887.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2207, pruned_loss=0.03935, over 971377.41 frames.], batch size: 19, lr: 3.42e-04 +2022-05-05 11:03:21,708 INFO [train.py:715] (7/8) Epoch 6, batch 4000, loss[loss=0.1286, simple_loss=0.1926, pruned_loss=0.03227, over 4988.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2211, pruned_loss=0.03963, over 971449.62 frames.], batch size: 14, lr: 3.42e-04 +2022-05-05 11:04:00,015 INFO [train.py:715] (7/8) Epoch 6, batch 4050, loss[loss=0.1221, simple_loss=0.1946, pruned_loss=0.02476, over 4844.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2213, pruned_loss=0.03956, over 971293.78 frames.], batch size: 15, lr: 3.42e-04 +2022-05-05 11:04:39,114 INFO [train.py:715] (7/8) Epoch 6, batch 4100, loss[loss=0.1394, simple_loss=0.2147, pruned_loss=0.0321, over 4842.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2203, pruned_loss=0.03952, over 971800.94 frames.], batch size: 32, lr: 3.42e-04 +2022-05-05 11:05:17,851 INFO [train.py:715] (7/8) Epoch 6, batch 4150, loss[loss=0.1272, simple_loss=0.1991, pruned_loss=0.02761, over 4876.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2211, pruned_loss=0.03999, over 972238.88 frames.], batch size: 32, lr: 3.42e-04 +2022-05-05 11:05:56,005 INFO [train.py:715] (7/8) Epoch 6, batch 4200, loss[loss=0.1597, simple_loss=0.2427, pruned_loss=0.03838, over 4787.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2214, pruned_loss=0.0396, over 972784.52 frames.], batch size: 18, lr: 3.42e-04 +2022-05-05 11:06:34,726 INFO [train.py:715] (7/8) Epoch 6, batch 4250, loss[loss=0.1523, simple_loss=0.219, pruned_loss=0.04277, over 4775.00 frames.], tot_loss[loss=0.1508, simple_loss=0.222, pruned_loss=0.03985, over 972568.99 frames.], batch size: 18, lr: 3.42e-04 +2022-05-05 11:07:13,788 INFO [train.py:715] (7/8) Epoch 6, batch 4300, loss[loss=0.1439, simple_loss=0.2106, pruned_loss=0.03861, over 4861.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2218, pruned_loss=0.03964, over 973537.54 frames.], batch size: 20, lr: 3.42e-04 +2022-05-05 11:07:52,584 INFO [train.py:715] (7/8) Epoch 6, batch 4350, loss[loss=0.1615, simple_loss=0.2357, pruned_loss=0.0437, over 4706.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2205, pruned_loss=0.03887, over 973716.24 frames.], batch size: 15, lr: 3.42e-04 +2022-05-05 11:08:30,487 INFO [train.py:715] (7/8) Epoch 6, batch 4400, loss[loss=0.1187, simple_loss=0.1923, pruned_loss=0.02248, over 4840.00 frames.], tot_loss[loss=0.15, simple_loss=0.2213, pruned_loss=0.03936, over 973487.87 frames.], batch size: 13, lr: 3.42e-04 +2022-05-05 11:09:08,943 INFO [train.py:715] (7/8) Epoch 6, batch 4450, loss[loss=0.1565, simple_loss=0.2336, pruned_loss=0.03972, over 4920.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2214, pruned_loss=0.03946, over 973138.79 frames.], batch size: 29, lr: 3.42e-04 +2022-05-05 11:09:48,073 INFO [train.py:715] (7/8) Epoch 6, batch 4500, loss[loss=0.1564, simple_loss=0.2268, pruned_loss=0.04302, over 4850.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2217, pruned_loss=0.03928, over 973237.07 frames.], batch size: 30, lr: 3.42e-04 +2022-05-05 11:10:26,353 INFO [train.py:715] (7/8) Epoch 6, batch 4550, loss[loss=0.1154, simple_loss=0.1962, pruned_loss=0.01733, over 4811.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2212, pruned_loss=0.03917, over 973241.71 frames.], batch size: 12, lr: 3.42e-04 +2022-05-05 11:11:04,823 INFO [train.py:715] (7/8) Epoch 6, batch 4600, loss[loss=0.1759, simple_loss=0.2373, pruned_loss=0.0573, over 4831.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2215, pruned_loss=0.03935, over 973340.97 frames.], batch size: 30, lr: 3.42e-04 +2022-05-05 11:11:44,225 INFO [train.py:715] (7/8) Epoch 6, batch 4650, loss[loss=0.1656, simple_loss=0.2253, pruned_loss=0.05295, over 4840.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2204, pruned_loss=0.03905, over 972491.44 frames.], batch size: 32, lr: 3.42e-04 +2022-05-05 11:12:23,353 INFO [train.py:715] (7/8) Epoch 6, batch 4700, loss[loss=0.1761, simple_loss=0.2479, pruned_loss=0.0522, over 4692.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2209, pruned_loss=0.03918, over 972034.51 frames.], batch size: 15, lr: 3.42e-04 +2022-05-05 11:13:01,630 INFO [train.py:715] (7/8) Epoch 6, batch 4750, loss[loss=0.1627, simple_loss=0.2255, pruned_loss=0.0499, over 4794.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2193, pruned_loss=0.03842, over 973506.91 frames.], batch size: 24, lr: 3.42e-04 +2022-05-05 11:13:40,646 INFO [train.py:715] (7/8) Epoch 6, batch 4800, loss[loss=0.1055, simple_loss=0.1772, pruned_loss=0.01689, over 4736.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03868, over 972785.04 frames.], batch size: 12, lr: 3.42e-04 +2022-05-05 11:14:19,738 INFO [train.py:715] (7/8) Epoch 6, batch 4850, loss[loss=0.1601, simple_loss=0.2282, pruned_loss=0.04601, over 4984.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2208, pruned_loss=0.03951, over 973606.34 frames.], batch size: 14, lr: 3.42e-04 +2022-05-05 11:14:58,283 INFO [train.py:715] (7/8) Epoch 6, batch 4900, loss[loss=0.1794, simple_loss=0.2387, pruned_loss=0.06004, over 4838.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2211, pruned_loss=0.04007, over 972986.84 frames.], batch size: 15, lr: 3.42e-04 +2022-05-05 11:15:37,165 INFO [train.py:715] (7/8) Epoch 6, batch 4950, loss[loss=0.1781, simple_loss=0.2418, pruned_loss=0.05722, over 4889.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2217, pruned_loss=0.04051, over 972506.91 frames.], batch size: 16, lr: 3.42e-04 +2022-05-05 11:16:16,919 INFO [train.py:715] (7/8) Epoch 6, batch 5000, loss[loss=0.1797, simple_loss=0.2474, pruned_loss=0.05601, over 4942.00 frames.], tot_loss[loss=0.1518, simple_loss=0.222, pruned_loss=0.04081, over 972460.01 frames.], batch size: 23, lr: 3.42e-04 +2022-05-05 11:16:55,991 INFO [train.py:715] (7/8) Epoch 6, batch 5050, loss[loss=0.186, simple_loss=0.2393, pruned_loss=0.06635, over 4826.00 frames.], tot_loss[loss=0.1513, simple_loss=0.222, pruned_loss=0.04033, over 973814.99 frames.], batch size: 15, lr: 3.42e-04 +2022-05-05 11:17:34,331 INFO [train.py:715] (7/8) Epoch 6, batch 5100, loss[loss=0.1399, simple_loss=0.2186, pruned_loss=0.03065, over 4950.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2209, pruned_loss=0.03988, over 973836.07 frames.], batch size: 24, lr: 3.42e-04 +2022-05-05 11:18:13,254 INFO [train.py:715] (7/8) Epoch 6, batch 5150, loss[loss=0.1372, simple_loss=0.2031, pruned_loss=0.03567, over 4845.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2213, pruned_loss=0.03996, over 973103.15 frames.], batch size: 30, lr: 3.41e-04 +2022-05-05 11:18:52,358 INFO [train.py:715] (7/8) Epoch 6, batch 5200, loss[loss=0.1465, simple_loss=0.2172, pruned_loss=0.03791, over 4694.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2196, pruned_loss=0.03903, over 972200.19 frames.], batch size: 15, lr: 3.41e-04 +2022-05-05 11:19:30,494 INFO [train.py:715] (7/8) Epoch 6, batch 5250, loss[loss=0.1354, simple_loss=0.2029, pruned_loss=0.03392, over 4992.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2196, pruned_loss=0.03885, over 972353.82 frames.], batch size: 16, lr: 3.41e-04 +2022-05-05 11:20:09,578 INFO [train.py:715] (7/8) Epoch 6, batch 5300, loss[loss=0.1865, simple_loss=0.2537, pruned_loss=0.05967, over 4965.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2194, pruned_loss=0.03886, over 972400.03 frames.], batch size: 15, lr: 3.41e-04 +2022-05-05 11:20:48,894 INFO [train.py:715] (7/8) Epoch 6, batch 5350, loss[loss=0.1352, simple_loss=0.2215, pruned_loss=0.02447, over 4881.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2192, pruned_loss=0.03869, over 972396.83 frames.], batch size: 22, lr: 3.41e-04 +2022-05-05 11:21:27,942 INFO [train.py:715] (7/8) Epoch 6, batch 5400, loss[loss=0.1443, simple_loss=0.2206, pruned_loss=0.03403, over 4948.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2206, pruned_loss=0.03942, over 972953.35 frames.], batch size: 21, lr: 3.41e-04 +2022-05-05 11:22:06,520 INFO [train.py:715] (7/8) Epoch 6, batch 5450, loss[loss=0.144, simple_loss=0.2057, pruned_loss=0.04118, over 4847.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2204, pruned_loss=0.03929, over 972693.22 frames.], batch size: 13, lr: 3.41e-04 +2022-05-05 11:22:45,315 INFO [train.py:715] (7/8) Epoch 6, batch 5500, loss[loss=0.1575, simple_loss=0.2154, pruned_loss=0.04983, over 4876.00 frames.], tot_loss[loss=0.149, simple_loss=0.2195, pruned_loss=0.03925, over 973031.06 frames.], batch size: 22, lr: 3.41e-04 +2022-05-05 11:23:24,193 INFO [train.py:715] (7/8) Epoch 6, batch 5550, loss[loss=0.1592, simple_loss=0.2384, pruned_loss=0.04003, over 4935.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2195, pruned_loss=0.03895, over 973400.45 frames.], batch size: 23, lr: 3.41e-04 +2022-05-05 11:24:02,781 INFO [train.py:715] (7/8) Epoch 6, batch 5600, loss[loss=0.1746, simple_loss=0.2528, pruned_loss=0.04817, over 4879.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2199, pruned_loss=0.03935, over 973358.11 frames.], batch size: 16, lr: 3.41e-04 +2022-05-05 11:24:42,275 INFO [train.py:715] (7/8) Epoch 6, batch 5650, loss[loss=0.1387, simple_loss=0.2075, pruned_loss=0.03492, over 4908.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2206, pruned_loss=0.03953, over 973029.85 frames.], batch size: 23, lr: 3.41e-04 +2022-05-05 11:25:21,627 INFO [train.py:715] (7/8) Epoch 6, batch 5700, loss[loss=0.1601, simple_loss=0.2272, pruned_loss=0.04652, over 4930.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2211, pruned_loss=0.03964, over 973213.67 frames.], batch size: 18, lr: 3.41e-04 +2022-05-05 11:26:00,237 INFO [train.py:715] (7/8) Epoch 6, batch 5750, loss[loss=0.1627, simple_loss=0.2289, pruned_loss=0.04825, over 4891.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2204, pruned_loss=0.03924, over 973142.85 frames.], batch size: 32, lr: 3.41e-04 +2022-05-05 11:26:38,643 INFO [train.py:715] (7/8) Epoch 6, batch 5800, loss[loss=0.1447, simple_loss=0.2129, pruned_loss=0.03826, over 4770.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2205, pruned_loss=0.03946, over 972157.29 frames.], batch size: 18, lr: 3.41e-04 +2022-05-05 11:27:17,537 INFO [train.py:715] (7/8) Epoch 6, batch 5850, loss[loss=0.1788, simple_loss=0.2617, pruned_loss=0.04799, over 4816.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2193, pruned_loss=0.03859, over 971930.39 frames.], batch size: 25, lr: 3.41e-04 +2022-05-05 11:27:56,993 INFO [train.py:715] (7/8) Epoch 6, batch 5900, loss[loss=0.1318, simple_loss=0.2078, pruned_loss=0.0279, over 4951.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.03827, over 972082.91 frames.], batch size: 24, lr: 3.41e-04 +2022-05-05 11:28:34,913 INFO [train.py:715] (7/8) Epoch 6, batch 5950, loss[loss=0.1639, simple_loss=0.2341, pruned_loss=0.04684, over 4903.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2193, pruned_loss=0.03818, over 972008.70 frames.], batch size: 17, lr: 3.41e-04 +2022-05-05 11:29:14,285 INFO [train.py:715] (7/8) Epoch 6, batch 6000, loss[loss=0.1488, simple_loss=0.2204, pruned_loss=0.03866, over 4964.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2194, pruned_loss=0.03872, over 973621.38 frames.], batch size: 15, lr: 3.41e-04 +2022-05-05 11:29:14,286 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 11:29:24,855 INFO [train.py:742] (7/8) Epoch 6, validation: loss=0.1095, simple_loss=0.1945, pruned_loss=0.01229, over 914524.00 frames. +2022-05-05 11:30:04,468 INFO [train.py:715] (7/8) Epoch 6, batch 6050, loss[loss=0.1553, simple_loss=0.2269, pruned_loss=0.04182, over 4987.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2198, pruned_loss=0.03895, over 974514.67 frames.], batch size: 25, lr: 3.41e-04 +2022-05-05 11:30:43,726 INFO [train.py:715] (7/8) Epoch 6, batch 6100, loss[loss=0.1681, simple_loss=0.2299, pruned_loss=0.05314, over 4771.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2204, pruned_loss=0.03929, over 973340.91 frames.], batch size: 17, lr: 3.41e-04 +2022-05-05 11:31:23,119 INFO [train.py:715] (7/8) Epoch 6, batch 6150, loss[loss=0.1613, simple_loss=0.2294, pruned_loss=0.04658, over 4842.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2199, pruned_loss=0.03918, over 973259.21 frames.], batch size: 15, lr: 3.41e-04 +2022-05-05 11:32:01,617 INFO [train.py:715] (7/8) Epoch 6, batch 6200, loss[loss=0.1393, simple_loss=0.2053, pruned_loss=0.03659, over 4835.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2203, pruned_loss=0.03948, over 973040.43 frames.], batch size: 15, lr: 3.41e-04 +2022-05-05 11:32:40,936 INFO [train.py:715] (7/8) Epoch 6, batch 6250, loss[loss=0.1489, simple_loss=0.2218, pruned_loss=0.03805, over 4948.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2199, pruned_loss=0.03954, over 972724.53 frames.], batch size: 29, lr: 3.41e-04 +2022-05-05 11:33:20,233 INFO [train.py:715] (7/8) Epoch 6, batch 6300, loss[loss=0.163, simple_loss=0.234, pruned_loss=0.04603, over 4769.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2191, pruned_loss=0.03875, over 971731.03 frames.], batch size: 17, lr: 3.41e-04 +2022-05-05 11:33:58,706 INFO [train.py:715] (7/8) Epoch 6, batch 6350, loss[loss=0.1414, simple_loss=0.2028, pruned_loss=0.03995, over 4795.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2196, pruned_loss=0.03896, over 972149.71 frames.], batch size: 12, lr: 3.41e-04 +2022-05-05 11:34:37,341 INFO [train.py:715] (7/8) Epoch 6, batch 6400, loss[loss=0.1433, simple_loss=0.2103, pruned_loss=0.03817, over 4698.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2208, pruned_loss=0.03973, over 971618.77 frames.], batch size: 15, lr: 3.40e-04 +2022-05-05 11:35:16,566 INFO [train.py:715] (7/8) Epoch 6, batch 6450, loss[loss=0.1904, simple_loss=0.2567, pruned_loss=0.0621, over 4956.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2209, pruned_loss=0.03988, over 972153.95 frames.], batch size: 21, lr: 3.40e-04 +2022-05-05 11:35:55,386 INFO [train.py:715] (7/8) Epoch 6, batch 6500, loss[loss=0.1402, simple_loss=0.214, pruned_loss=0.03325, over 4955.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2212, pruned_loss=0.04015, over 972174.63 frames.], batch size: 24, lr: 3.40e-04 +2022-05-05 11:36:33,970 INFO [train.py:715] (7/8) Epoch 6, batch 6550, loss[loss=0.1242, simple_loss=0.1972, pruned_loss=0.02555, over 4913.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2201, pruned_loss=0.03962, over 973006.58 frames.], batch size: 35, lr: 3.40e-04 +2022-05-05 11:37:12,775 INFO [train.py:715] (7/8) Epoch 6, batch 6600, loss[loss=0.1444, simple_loss=0.2183, pruned_loss=0.0352, over 4925.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2203, pruned_loss=0.03942, over 972639.11 frames.], batch size: 23, lr: 3.40e-04 +2022-05-05 11:37:52,972 INFO [train.py:715] (7/8) Epoch 6, batch 6650, loss[loss=0.1157, simple_loss=0.1893, pruned_loss=0.02107, over 4845.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2203, pruned_loss=0.03931, over 972378.92 frames.], batch size: 13, lr: 3.40e-04 +2022-05-05 11:38:31,784 INFO [train.py:715] (7/8) Epoch 6, batch 6700, loss[loss=0.1744, simple_loss=0.2324, pruned_loss=0.05821, over 4917.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2211, pruned_loss=0.03981, over 972379.98 frames.], batch size: 18, lr: 3.40e-04 +2022-05-05 11:39:10,523 INFO [train.py:715] (7/8) Epoch 6, batch 6750, loss[loss=0.1384, simple_loss=0.21, pruned_loss=0.03344, over 4825.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2219, pruned_loss=0.04031, over 972335.94 frames.], batch size: 25, lr: 3.40e-04 +2022-05-05 11:39:49,805 INFO [train.py:715] (7/8) Epoch 6, batch 6800, loss[loss=0.1587, simple_loss=0.2379, pruned_loss=0.03973, over 4901.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2214, pruned_loss=0.03998, over 972416.01 frames.], batch size: 22, lr: 3.40e-04 +2022-05-05 11:40:28,790 INFO [train.py:715] (7/8) Epoch 6, batch 6850, loss[loss=0.1513, simple_loss=0.2183, pruned_loss=0.04214, over 4840.00 frames.], tot_loss[loss=0.151, simple_loss=0.222, pruned_loss=0.04003, over 972822.22 frames.], batch size: 34, lr: 3.40e-04 +2022-05-05 11:41:06,842 INFO [train.py:715] (7/8) Epoch 6, batch 6900, loss[loss=0.15, simple_loss=0.2262, pruned_loss=0.03691, over 4889.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2221, pruned_loss=0.03987, over 972483.02 frames.], batch size: 19, lr: 3.40e-04 +2022-05-05 11:41:45,914 INFO [train.py:715] (7/8) Epoch 6, batch 6950, loss[loss=0.1476, simple_loss=0.2175, pruned_loss=0.03885, over 4936.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2206, pruned_loss=0.03913, over 972585.42 frames.], batch size: 23, lr: 3.40e-04 +2022-05-05 11:42:25,622 INFO [train.py:715] (7/8) Epoch 6, batch 7000, loss[loss=0.1474, simple_loss=0.2202, pruned_loss=0.03727, over 4784.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2209, pruned_loss=0.03921, over 972553.15 frames.], batch size: 14, lr: 3.40e-04 +2022-05-05 11:43:04,219 INFO [train.py:715] (7/8) Epoch 6, batch 7050, loss[loss=0.1607, simple_loss=0.2249, pruned_loss=0.04824, over 4759.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2203, pruned_loss=0.03918, over 972301.29 frames.], batch size: 16, lr: 3.40e-04 +2022-05-05 11:43:42,732 INFO [train.py:715] (7/8) Epoch 6, batch 7100, loss[loss=0.1524, simple_loss=0.2247, pruned_loss=0.0401, over 4911.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2214, pruned_loss=0.03986, over 972267.44 frames.], batch size: 19, lr: 3.40e-04 +2022-05-05 11:44:25,535 INFO [train.py:715] (7/8) Epoch 6, batch 7150, loss[loss=0.1156, simple_loss=0.189, pruned_loss=0.02108, over 4757.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2207, pruned_loss=0.03926, over 972217.73 frames.], batch size: 19, lr: 3.40e-04 +2022-05-05 11:45:04,231 INFO [train.py:715] (7/8) Epoch 6, batch 7200, loss[loss=0.1208, simple_loss=0.195, pruned_loss=0.02334, over 4888.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2205, pruned_loss=0.03944, over 972201.78 frames.], batch size: 16, lr: 3.40e-04 +2022-05-05 11:45:42,694 INFO [train.py:715] (7/8) Epoch 6, batch 7250, loss[loss=0.114, simple_loss=0.1855, pruned_loss=0.02127, over 4837.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2198, pruned_loss=0.03878, over 972844.28 frames.], batch size: 30, lr: 3.40e-04 +2022-05-05 11:46:21,450 INFO [train.py:715] (7/8) Epoch 6, batch 7300, loss[loss=0.1678, simple_loss=0.2395, pruned_loss=0.04807, over 4815.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2196, pruned_loss=0.03847, over 972921.02 frames.], batch size: 24, lr: 3.40e-04 +2022-05-05 11:47:01,053 INFO [train.py:715] (7/8) Epoch 6, batch 7350, loss[loss=0.1784, simple_loss=0.2482, pruned_loss=0.05434, over 4761.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2195, pruned_loss=0.03895, over 972938.63 frames.], batch size: 19, lr: 3.40e-04 +2022-05-05 11:47:38,860 INFO [train.py:715] (7/8) Epoch 6, batch 7400, loss[loss=0.127, simple_loss=0.1982, pruned_loss=0.02793, over 4775.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2196, pruned_loss=0.03828, over 972914.95 frames.], batch size: 12, lr: 3.40e-04 +2022-05-05 11:48:18,377 INFO [train.py:715] (7/8) Epoch 6, batch 7450, loss[loss=0.1417, simple_loss=0.22, pruned_loss=0.0317, over 4879.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2204, pruned_loss=0.03844, over 973388.34 frames.], batch size: 16, lr: 3.40e-04 +2022-05-05 11:48:56,993 INFO [train.py:715] (7/8) Epoch 6, batch 7500, loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02971, over 4787.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2204, pruned_loss=0.03854, over 973453.37 frames.], batch size: 18, lr: 3.40e-04 +2022-05-05 11:49:35,695 INFO [train.py:715] (7/8) Epoch 6, batch 7550, loss[loss=0.181, simple_loss=0.243, pruned_loss=0.05944, over 4813.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2204, pruned_loss=0.03859, over 973328.79 frames.], batch size: 26, lr: 3.40e-04 +2022-05-05 11:50:14,634 INFO [train.py:715] (7/8) Epoch 6, batch 7600, loss[loss=0.1297, simple_loss=0.1964, pruned_loss=0.03147, over 4873.00 frames.], tot_loss[loss=0.1493, simple_loss=0.221, pruned_loss=0.03876, over 972665.39 frames.], batch size: 22, lr: 3.40e-04 +2022-05-05 11:50:53,764 INFO [train.py:715] (7/8) Epoch 6, batch 7650, loss[loss=0.1448, simple_loss=0.207, pruned_loss=0.04129, over 4843.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2198, pruned_loss=0.03831, over 972545.70 frames.], batch size: 15, lr: 3.40e-04 +2022-05-05 11:51:33,381 INFO [train.py:715] (7/8) Epoch 6, batch 7700, loss[loss=0.1587, simple_loss=0.2299, pruned_loss=0.04376, over 4908.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2191, pruned_loss=0.03809, over 972452.42 frames.], batch size: 22, lr: 3.39e-04 +2022-05-05 11:52:11,594 INFO [train.py:715] (7/8) Epoch 6, batch 7750, loss[loss=0.1578, simple_loss=0.2303, pruned_loss=0.0427, over 4855.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2199, pruned_loss=0.03793, over 972898.68 frames.], batch size: 20, lr: 3.39e-04 +2022-05-05 11:52:51,084 INFO [train.py:715] (7/8) Epoch 6, batch 7800, loss[loss=0.1443, simple_loss=0.2178, pruned_loss=0.03542, over 4805.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2208, pruned_loss=0.03812, over 973321.23 frames.], batch size: 21, lr: 3.39e-04 +2022-05-05 11:53:30,017 INFO [train.py:715] (7/8) Epoch 6, batch 7850, loss[loss=0.1622, simple_loss=0.2258, pruned_loss=0.04929, over 4809.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2213, pruned_loss=0.03889, over 972379.73 frames.], batch size: 12, lr: 3.39e-04 +2022-05-05 11:54:08,585 INFO [train.py:715] (7/8) Epoch 6, batch 7900, loss[loss=0.1256, simple_loss=0.2024, pruned_loss=0.02439, over 4946.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2211, pruned_loss=0.03907, over 972083.24 frames.], batch size: 21, lr: 3.39e-04 +2022-05-05 11:54:47,344 INFO [train.py:715] (7/8) Epoch 6, batch 7950, loss[loss=0.137, simple_loss=0.2044, pruned_loss=0.03485, over 4984.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2205, pruned_loss=0.03889, over 972322.27 frames.], batch size: 31, lr: 3.39e-04 +2022-05-05 11:55:26,527 INFO [train.py:715] (7/8) Epoch 6, batch 8000, loss[loss=0.1686, simple_loss=0.2216, pruned_loss=0.05783, over 4854.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2199, pruned_loss=0.03854, over 972138.30 frames.], batch size: 32, lr: 3.39e-04 +2022-05-05 11:56:05,893 INFO [train.py:715] (7/8) Epoch 6, batch 8050, loss[loss=0.1712, simple_loss=0.2335, pruned_loss=0.05439, over 4988.00 frames.], tot_loss[loss=0.149, simple_loss=0.2202, pruned_loss=0.03894, over 971918.54 frames.], batch size: 31, lr: 3.39e-04 +2022-05-05 11:56:43,894 INFO [train.py:715] (7/8) Epoch 6, batch 8100, loss[loss=0.1559, simple_loss=0.232, pruned_loss=0.03992, over 4802.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2208, pruned_loss=0.03922, over 971970.85 frames.], batch size: 25, lr: 3.39e-04 +2022-05-05 11:57:22,886 INFO [train.py:715] (7/8) Epoch 6, batch 8150, loss[loss=0.1217, simple_loss=0.19, pruned_loss=0.0267, over 4811.00 frames.], tot_loss[loss=0.15, simple_loss=0.2211, pruned_loss=0.03944, over 971703.39 frames.], batch size: 12, lr: 3.39e-04 +2022-05-05 11:58:01,961 INFO [train.py:715] (7/8) Epoch 6, batch 8200, loss[loss=0.1764, simple_loss=0.2444, pruned_loss=0.05422, over 4776.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2216, pruned_loss=0.03977, over 971892.75 frames.], batch size: 14, lr: 3.39e-04 +2022-05-05 11:58:41,279 INFO [train.py:715] (7/8) Epoch 6, batch 8250, loss[loss=0.1331, simple_loss=0.2034, pruned_loss=0.03139, over 4898.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2216, pruned_loss=0.03957, over 972198.76 frames.], batch size: 19, lr: 3.39e-04 +2022-05-05 11:59:19,576 INFO [train.py:715] (7/8) Epoch 6, batch 8300, loss[loss=0.1135, simple_loss=0.1832, pruned_loss=0.02192, over 4820.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2209, pruned_loss=0.03941, over 972661.27 frames.], batch size: 26, lr: 3.39e-04 +2022-05-05 11:59:58,764 INFO [train.py:715] (7/8) Epoch 6, batch 8350, loss[loss=0.1588, simple_loss=0.2272, pruned_loss=0.04523, over 4964.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2212, pruned_loss=0.03932, over 972607.85 frames.], batch size: 35, lr: 3.39e-04 +2022-05-05 12:00:37,621 INFO [train.py:715] (7/8) Epoch 6, batch 8400, loss[loss=0.1628, simple_loss=0.2381, pruned_loss=0.04379, over 4984.00 frames.], tot_loss[loss=0.151, simple_loss=0.2222, pruned_loss=0.03991, over 972090.35 frames.], batch size: 28, lr: 3.39e-04 +2022-05-05 12:01:15,842 INFO [train.py:715] (7/8) Epoch 6, batch 8450, loss[loss=0.1572, simple_loss=0.2245, pruned_loss=0.04496, over 4844.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2206, pruned_loss=0.0391, over 971628.96 frames.], batch size: 15, lr: 3.39e-04 +2022-05-05 12:01:54,984 INFO [train.py:715] (7/8) Epoch 6, batch 8500, loss[loss=0.1466, simple_loss=0.2192, pruned_loss=0.03703, over 4979.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2202, pruned_loss=0.03912, over 970358.55 frames.], batch size: 25, lr: 3.39e-04 +2022-05-05 12:02:33,547 INFO [train.py:715] (7/8) Epoch 6, batch 8550, loss[loss=0.2025, simple_loss=0.2704, pruned_loss=0.06725, over 4962.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2208, pruned_loss=0.03916, over 970922.94 frames.], batch size: 24, lr: 3.39e-04 +2022-05-05 12:03:12,440 INFO [train.py:715] (7/8) Epoch 6, batch 8600, loss[loss=0.141, simple_loss=0.2163, pruned_loss=0.03283, over 4938.00 frames.], tot_loss[loss=0.149, simple_loss=0.2203, pruned_loss=0.03881, over 971891.73 frames.], batch size: 39, lr: 3.39e-04 +2022-05-05 12:03:50,311 INFO [train.py:715] (7/8) Epoch 6, batch 8650, loss[loss=0.1584, simple_loss=0.2329, pruned_loss=0.04194, over 4927.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2215, pruned_loss=0.03951, over 971654.43 frames.], batch size: 29, lr: 3.39e-04 +2022-05-05 12:04:29,734 INFO [train.py:715] (7/8) Epoch 6, batch 8700, loss[loss=0.1579, simple_loss=0.2213, pruned_loss=0.04728, over 4987.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2218, pruned_loss=0.03992, over 971747.40 frames.], batch size: 14, lr: 3.39e-04 +2022-05-05 12:05:08,432 INFO [train.py:715] (7/8) Epoch 6, batch 8750, loss[loss=0.1632, simple_loss=0.2378, pruned_loss=0.04429, over 4816.00 frames.], tot_loss[loss=0.15, simple_loss=0.2212, pruned_loss=0.03942, over 971675.45 frames.], batch size: 26, lr: 3.39e-04 +2022-05-05 12:05:46,863 INFO [train.py:715] (7/8) Epoch 6, batch 8800, loss[loss=0.1489, simple_loss=0.2134, pruned_loss=0.04223, over 4726.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2211, pruned_loss=0.03972, over 971935.83 frames.], batch size: 16, lr: 3.39e-04 +2022-05-05 12:06:25,684 INFO [train.py:715] (7/8) Epoch 6, batch 8850, loss[loss=0.1347, simple_loss=0.2091, pruned_loss=0.03018, over 4758.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2211, pruned_loss=0.03971, over 971574.96 frames.], batch size: 12, lr: 3.39e-04 +2022-05-05 12:07:04,757 INFO [train.py:715] (7/8) Epoch 6, batch 8900, loss[loss=0.1341, simple_loss=0.2087, pruned_loss=0.02975, over 4852.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2205, pruned_loss=0.03953, over 971056.72 frames.], batch size: 20, lr: 3.39e-04 +2022-05-05 12:07:44,001 INFO [train.py:715] (7/8) Epoch 6, batch 8950, loss[loss=0.1368, simple_loss=0.2108, pruned_loss=0.0314, over 4949.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2207, pruned_loss=0.03937, over 971245.10 frames.], batch size: 29, lr: 3.38e-04 +2022-05-05 12:08:22,491 INFO [train.py:715] (7/8) Epoch 6, batch 9000, loss[loss=0.1368, simple_loss=0.2022, pruned_loss=0.03572, over 4825.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2201, pruned_loss=0.03955, over 971821.68 frames.], batch size: 27, lr: 3.38e-04 +2022-05-05 12:08:22,491 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 12:08:35,891 INFO [train.py:742] (7/8) Epoch 6, validation: loss=0.1094, simple_loss=0.1946, pruned_loss=0.01213, over 914524.00 frames. +2022-05-05 12:09:14,898 INFO [train.py:715] (7/8) Epoch 6, batch 9050, loss[loss=0.1614, simple_loss=0.2279, pruned_loss=0.04749, over 4927.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2206, pruned_loss=0.03994, over 972436.16 frames.], batch size: 23, lr: 3.38e-04 +2022-05-05 12:09:53,934 INFO [train.py:715] (7/8) Epoch 6, batch 9100, loss[loss=0.1234, simple_loss=0.1972, pruned_loss=0.0248, over 4787.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2199, pruned_loss=0.03956, over 972396.58 frames.], batch size: 18, lr: 3.38e-04 +2022-05-05 12:10:33,370 INFO [train.py:715] (7/8) Epoch 6, batch 9150, loss[loss=0.1525, simple_loss=0.2198, pruned_loss=0.04256, over 4928.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2198, pruned_loss=0.03959, over 972633.84 frames.], batch size: 29, lr: 3.38e-04 +2022-05-05 12:11:11,398 INFO [train.py:715] (7/8) Epoch 6, batch 9200, loss[loss=0.1816, simple_loss=0.2472, pruned_loss=0.05795, over 4687.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2191, pruned_loss=0.03897, over 972329.49 frames.], batch size: 15, lr: 3.38e-04 +2022-05-05 12:11:50,804 INFO [train.py:715] (7/8) Epoch 6, batch 9250, loss[loss=0.1351, simple_loss=0.2163, pruned_loss=0.02696, over 4982.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2194, pruned_loss=0.0387, over 972628.90 frames.], batch size: 28, lr: 3.38e-04 +2022-05-05 12:12:29,885 INFO [train.py:715] (7/8) Epoch 6, batch 9300, loss[loss=0.1312, simple_loss=0.2146, pruned_loss=0.02392, over 4971.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2191, pruned_loss=0.03837, over 972977.41 frames.], batch size: 25, lr: 3.38e-04 +2022-05-05 12:13:08,399 INFO [train.py:715] (7/8) Epoch 6, batch 9350, loss[loss=0.1484, simple_loss=0.2188, pruned_loss=0.039, over 4959.00 frames.], tot_loss[loss=0.148, simple_loss=0.2187, pruned_loss=0.03865, over 971827.55 frames.], batch size: 24, lr: 3.38e-04 +2022-05-05 12:13:47,634 INFO [train.py:715] (7/8) Epoch 6, batch 9400, loss[loss=0.1464, simple_loss=0.2149, pruned_loss=0.03899, over 4922.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2202, pruned_loss=0.03918, over 971862.76 frames.], batch size: 23, lr: 3.38e-04 +2022-05-05 12:14:26,440 INFO [train.py:715] (7/8) Epoch 6, batch 9450, loss[loss=0.1417, simple_loss=0.2083, pruned_loss=0.03753, over 4771.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2192, pruned_loss=0.03907, over 971610.70 frames.], batch size: 14, lr: 3.38e-04 +2022-05-05 12:15:05,767 INFO [train.py:715] (7/8) Epoch 6, batch 9500, loss[loss=0.1583, simple_loss=0.2259, pruned_loss=0.04533, over 4819.00 frames.], tot_loss[loss=0.149, simple_loss=0.2195, pruned_loss=0.03927, over 972212.64 frames.], batch size: 26, lr: 3.38e-04 +2022-05-05 12:15:44,434 INFO [train.py:715] (7/8) Epoch 6, batch 9550, loss[loss=0.1679, simple_loss=0.2388, pruned_loss=0.04851, over 4782.00 frames.], tot_loss[loss=0.1492, simple_loss=0.22, pruned_loss=0.03915, over 972510.97 frames.], batch size: 17, lr: 3.38e-04 +2022-05-05 12:16:23,404 INFO [train.py:715] (7/8) Epoch 6, batch 9600, loss[loss=0.1412, simple_loss=0.2186, pruned_loss=0.03187, over 4815.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2205, pruned_loss=0.03936, over 971360.09 frames.], batch size: 27, lr: 3.38e-04 +2022-05-05 12:17:02,130 INFO [train.py:715] (7/8) Epoch 6, batch 9650, loss[loss=0.222, simple_loss=0.2884, pruned_loss=0.07784, over 4901.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2204, pruned_loss=0.03932, over 972000.42 frames.], batch size: 19, lr: 3.38e-04 +2022-05-05 12:17:40,454 INFO [train.py:715] (7/8) Epoch 6, batch 9700, loss[loss=0.1116, simple_loss=0.1838, pruned_loss=0.0197, over 4759.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2207, pruned_loss=0.03937, over 972285.21 frames.], batch size: 19, lr: 3.38e-04 +2022-05-05 12:18:19,758 INFO [train.py:715] (7/8) Epoch 6, batch 9750, loss[loss=0.1357, simple_loss=0.2018, pruned_loss=0.03485, over 4939.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2208, pruned_loss=0.03955, over 972108.36 frames.], batch size: 23, lr: 3.38e-04 +2022-05-05 12:18:59,480 INFO [train.py:715] (7/8) Epoch 6, batch 9800, loss[loss=0.1375, simple_loss=0.2108, pruned_loss=0.03204, over 4784.00 frames.], tot_loss[loss=0.1498, simple_loss=0.221, pruned_loss=0.03934, over 973531.75 frames.], batch size: 14, lr: 3.38e-04 +2022-05-05 12:19:39,850 INFO [train.py:715] (7/8) Epoch 6, batch 9850, loss[loss=0.1984, simple_loss=0.2612, pruned_loss=0.06783, over 4759.00 frames.], tot_loss[loss=0.149, simple_loss=0.2201, pruned_loss=0.03898, over 972799.33 frames.], batch size: 16, lr: 3.38e-04 +2022-05-05 12:20:18,998 INFO [train.py:715] (7/8) Epoch 6, batch 9900, loss[loss=0.1427, simple_loss=0.2211, pruned_loss=0.03218, over 4776.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2198, pruned_loss=0.03891, over 973070.51 frames.], batch size: 14, lr: 3.38e-04 +2022-05-05 12:20:59,140 INFO [train.py:715] (7/8) Epoch 6, batch 9950, loss[loss=0.1489, simple_loss=0.2235, pruned_loss=0.03715, over 4821.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2203, pruned_loss=0.03926, over 972245.73 frames.], batch size: 15, lr: 3.38e-04 +2022-05-05 12:21:39,158 INFO [train.py:715] (7/8) Epoch 6, batch 10000, loss[loss=0.1536, simple_loss=0.2256, pruned_loss=0.04079, over 4907.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2213, pruned_loss=0.03968, over 972674.16 frames.], batch size: 39, lr: 3.38e-04 +2022-05-05 12:22:17,402 INFO [train.py:715] (7/8) Epoch 6, batch 10050, loss[loss=0.1565, simple_loss=0.2229, pruned_loss=0.045, over 4854.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2214, pruned_loss=0.03944, over 972560.87 frames.], batch size: 20, lr: 3.38e-04 +2022-05-05 12:22:56,777 INFO [train.py:715] (7/8) Epoch 6, batch 10100, loss[loss=0.1465, simple_loss=0.2232, pruned_loss=0.03496, over 4768.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2211, pruned_loss=0.03951, over 973129.15 frames.], batch size: 14, lr: 3.38e-04 +2022-05-05 12:23:34,995 INFO [train.py:715] (7/8) Epoch 6, batch 10150, loss[loss=0.1552, simple_loss=0.2337, pruned_loss=0.03839, over 4866.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2211, pruned_loss=0.03926, over 972802.91 frames.], batch size: 20, lr: 3.38e-04 +2022-05-05 12:24:14,028 INFO [train.py:715] (7/8) Epoch 6, batch 10200, loss[loss=0.1262, simple_loss=0.2042, pruned_loss=0.02407, over 4686.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2208, pruned_loss=0.03934, over 971941.22 frames.], batch size: 15, lr: 3.38e-04 +2022-05-05 12:24:52,553 INFO [train.py:715] (7/8) Epoch 6, batch 10250, loss[loss=0.1254, simple_loss=0.1976, pruned_loss=0.02663, over 4947.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2207, pruned_loss=0.03925, over 972070.17 frames.], batch size: 21, lr: 3.37e-04 +2022-05-05 12:25:31,644 INFO [train.py:715] (7/8) Epoch 6, batch 10300, loss[loss=0.1675, simple_loss=0.2228, pruned_loss=0.05607, over 4785.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2202, pruned_loss=0.03912, over 971575.51 frames.], batch size: 17, lr: 3.37e-04 +2022-05-05 12:26:10,145 INFO [train.py:715] (7/8) Epoch 6, batch 10350, loss[loss=0.1633, simple_loss=0.2389, pruned_loss=0.04383, over 4805.00 frames.], tot_loss[loss=0.15, simple_loss=0.2211, pruned_loss=0.03944, over 971316.90 frames.], batch size: 21, lr: 3.37e-04 +2022-05-05 12:26:49,279 INFO [train.py:715] (7/8) Epoch 6, batch 10400, loss[loss=0.1339, simple_loss=0.2154, pruned_loss=0.02621, over 4765.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2214, pruned_loss=0.03966, over 971697.13 frames.], batch size: 19, lr: 3.37e-04 +2022-05-05 12:27:27,708 INFO [train.py:715] (7/8) Epoch 6, batch 10450, loss[loss=0.1549, simple_loss=0.2259, pruned_loss=0.04194, over 4692.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2207, pruned_loss=0.03944, over 971381.67 frames.], batch size: 15, lr: 3.37e-04 +2022-05-05 12:28:06,361 INFO [train.py:715] (7/8) Epoch 6, batch 10500, loss[loss=0.1434, simple_loss=0.2143, pruned_loss=0.03628, over 4802.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2207, pruned_loss=0.03904, over 971261.94 frames.], batch size: 24, lr: 3.37e-04 +2022-05-05 12:28:45,429 INFO [train.py:715] (7/8) Epoch 6, batch 10550, loss[loss=0.1446, simple_loss=0.2152, pruned_loss=0.03702, over 4859.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2212, pruned_loss=0.03896, over 971228.51 frames.], batch size: 32, lr: 3.37e-04 +2022-05-05 12:29:23,700 INFO [train.py:715] (7/8) Epoch 6, batch 10600, loss[loss=0.1266, simple_loss=0.1975, pruned_loss=0.02785, over 4981.00 frames.], tot_loss[loss=0.15, simple_loss=0.2222, pruned_loss=0.03888, over 972271.12 frames.], batch size: 25, lr: 3.37e-04 +2022-05-05 12:30:02,908 INFO [train.py:715] (7/8) Epoch 6, batch 10650, loss[loss=0.1582, simple_loss=0.2381, pruned_loss=0.03918, over 4876.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2222, pruned_loss=0.03934, over 972749.78 frames.], batch size: 16, lr: 3.37e-04 +2022-05-05 12:30:41,620 INFO [train.py:715] (7/8) Epoch 6, batch 10700, loss[loss=0.1496, simple_loss=0.2248, pruned_loss=0.0372, over 4839.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2219, pruned_loss=0.03925, over 973500.06 frames.], batch size: 26, lr: 3.37e-04 +2022-05-05 12:31:20,569 INFO [train.py:715] (7/8) Epoch 6, batch 10750, loss[loss=0.1567, simple_loss=0.225, pruned_loss=0.04417, over 4846.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2208, pruned_loss=0.03884, over 973479.93 frames.], batch size: 34, lr: 3.37e-04 +2022-05-05 12:31:59,032 INFO [train.py:715] (7/8) Epoch 6, batch 10800, loss[loss=0.167, simple_loss=0.2312, pruned_loss=0.05138, over 4915.00 frames.], tot_loss[loss=0.1494, simple_loss=0.221, pruned_loss=0.03892, over 973117.45 frames.], batch size: 39, lr: 3.37e-04 +2022-05-05 12:32:37,568 INFO [train.py:715] (7/8) Epoch 6, batch 10850, loss[loss=0.1675, simple_loss=0.2316, pruned_loss=0.05175, over 4798.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2197, pruned_loss=0.03859, over 973016.67 frames.], batch size: 24, lr: 3.37e-04 +2022-05-05 12:33:15,994 INFO [train.py:715] (7/8) Epoch 6, batch 10900, loss[loss=0.1507, simple_loss=0.2175, pruned_loss=0.04196, over 4992.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2202, pruned_loss=0.03912, over 972380.02 frames.], batch size: 16, lr: 3.37e-04 +2022-05-05 12:33:54,115 INFO [train.py:715] (7/8) Epoch 6, batch 10950, loss[loss=0.1349, simple_loss=0.2117, pruned_loss=0.02902, over 4857.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2195, pruned_loss=0.039, over 971144.45 frames.], batch size: 20, lr: 3.37e-04 +2022-05-05 12:34:33,268 INFO [train.py:715] (7/8) Epoch 6, batch 11000, loss[loss=0.1764, simple_loss=0.2377, pruned_loss=0.05761, over 4936.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2189, pruned_loss=0.03864, over 971820.02 frames.], batch size: 35, lr: 3.37e-04 +2022-05-05 12:35:11,625 INFO [train.py:715] (7/8) Epoch 6, batch 11050, loss[loss=0.1541, simple_loss=0.2263, pruned_loss=0.04101, over 4944.00 frames.], tot_loss[loss=0.148, simple_loss=0.2189, pruned_loss=0.03858, over 972249.85 frames.], batch size: 21, lr: 3.37e-04 +2022-05-05 12:35:50,639 INFO [train.py:715] (7/8) Epoch 6, batch 11100, loss[loss=0.146, simple_loss=0.2119, pruned_loss=0.04009, over 4911.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2188, pruned_loss=0.03845, over 972789.77 frames.], batch size: 23, lr: 3.37e-04 +2022-05-05 12:36:29,030 INFO [train.py:715] (7/8) Epoch 6, batch 11150, loss[loss=0.1311, simple_loss=0.2005, pruned_loss=0.03084, over 4973.00 frames.], tot_loss[loss=0.149, simple_loss=0.2199, pruned_loss=0.03902, over 973182.74 frames.], batch size: 14, lr: 3.37e-04 +2022-05-05 12:37:07,406 INFO [train.py:715] (7/8) Epoch 6, batch 11200, loss[loss=0.1885, simple_loss=0.2421, pruned_loss=0.06745, over 4951.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2197, pruned_loss=0.03891, over 972364.67 frames.], batch size: 39, lr: 3.37e-04 +2022-05-05 12:37:45,842 INFO [train.py:715] (7/8) Epoch 6, batch 11250, loss[loss=0.1449, simple_loss=0.2063, pruned_loss=0.04178, over 4851.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2197, pruned_loss=0.03906, over 972411.91 frames.], batch size: 32, lr: 3.37e-04 +2022-05-05 12:38:24,403 INFO [train.py:715] (7/8) Epoch 6, batch 11300, loss[loss=0.1251, simple_loss=0.2027, pruned_loss=0.0238, over 4819.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2194, pruned_loss=0.03861, over 972686.72 frames.], batch size: 27, lr: 3.37e-04 +2022-05-05 12:39:03,682 INFO [train.py:715] (7/8) Epoch 6, batch 11350, loss[loss=0.2314, simple_loss=0.2956, pruned_loss=0.08365, over 4962.00 frames.], tot_loss[loss=0.149, simple_loss=0.2196, pruned_loss=0.03919, over 972555.67 frames.], batch size: 24, lr: 3.37e-04 +2022-05-05 12:39:42,622 INFO [train.py:715] (7/8) Epoch 6, batch 11400, loss[loss=0.1599, simple_loss=0.2295, pruned_loss=0.04516, over 4818.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2195, pruned_loss=0.03914, over 973345.32 frames.], batch size: 25, lr: 3.37e-04 +2022-05-05 12:40:21,681 INFO [train.py:715] (7/8) Epoch 6, batch 11450, loss[loss=0.1515, simple_loss=0.224, pruned_loss=0.03946, over 4986.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2199, pruned_loss=0.03954, over 972300.74 frames.], batch size: 25, lr: 3.37e-04 +2022-05-05 12:40:59,948 INFO [train.py:715] (7/8) Epoch 6, batch 11500, loss[loss=0.1327, simple_loss=0.2062, pruned_loss=0.02956, over 4824.00 frames.], tot_loss[loss=0.1496, simple_loss=0.22, pruned_loss=0.03959, over 972540.06 frames.], batch size: 25, lr: 3.37e-04 +2022-05-05 12:41:38,298 INFO [train.py:715] (7/8) Epoch 6, batch 11550, loss[loss=0.1655, simple_loss=0.2354, pruned_loss=0.04782, over 4785.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2209, pruned_loss=0.03975, over 972393.78 frames.], batch size: 18, lr: 3.36e-04 +2022-05-05 12:42:17,677 INFO [train.py:715] (7/8) Epoch 6, batch 11600, loss[loss=0.1791, simple_loss=0.2559, pruned_loss=0.05111, over 4984.00 frames.], tot_loss[loss=0.15, simple_loss=0.2211, pruned_loss=0.03945, over 972858.64 frames.], batch size: 28, lr: 3.36e-04 +2022-05-05 12:42:56,131 INFO [train.py:715] (7/8) Epoch 6, batch 11650, loss[loss=0.1388, simple_loss=0.1993, pruned_loss=0.03917, over 4749.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2209, pruned_loss=0.03907, over 972713.99 frames.], batch size: 19, lr: 3.36e-04 +2022-05-05 12:43:34,997 INFO [train.py:715] (7/8) Epoch 6, batch 11700, loss[loss=0.1483, simple_loss=0.2202, pruned_loss=0.0382, over 4785.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2195, pruned_loss=0.03836, over 972498.51 frames.], batch size: 14, lr: 3.36e-04 +2022-05-05 12:44:13,934 INFO [train.py:715] (7/8) Epoch 6, batch 11750, loss[loss=0.1401, simple_loss=0.212, pruned_loss=0.0341, over 4873.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2203, pruned_loss=0.03849, over 972801.30 frames.], batch size: 22, lr: 3.36e-04 +2022-05-05 12:44:53,167 INFO [train.py:715] (7/8) Epoch 6, batch 11800, loss[loss=0.1128, simple_loss=0.1827, pruned_loss=0.02146, over 4745.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2201, pruned_loss=0.03789, over 972761.90 frames.], batch size: 12, lr: 3.36e-04 +2022-05-05 12:45:31,847 INFO [train.py:715] (7/8) Epoch 6, batch 11850, loss[loss=0.1638, simple_loss=0.2347, pruned_loss=0.04643, over 4975.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2199, pruned_loss=0.03824, over 972685.82 frames.], batch size: 14, lr: 3.36e-04 +2022-05-05 12:46:10,416 INFO [train.py:715] (7/8) Epoch 6, batch 11900, loss[loss=0.1279, simple_loss=0.2004, pruned_loss=0.02768, over 4760.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2202, pruned_loss=0.03864, over 972444.25 frames.], batch size: 19, lr: 3.36e-04 +2022-05-05 12:46:49,723 INFO [train.py:715] (7/8) Epoch 6, batch 11950, loss[loss=0.1725, simple_loss=0.2388, pruned_loss=0.05309, over 4966.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2194, pruned_loss=0.03845, over 972608.71 frames.], batch size: 28, lr: 3.36e-04 +2022-05-05 12:47:28,222 INFO [train.py:715] (7/8) Epoch 6, batch 12000, loss[loss=0.1456, simple_loss=0.2185, pruned_loss=0.03631, over 4813.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2189, pruned_loss=0.03818, over 972087.77 frames.], batch size: 25, lr: 3.36e-04 +2022-05-05 12:47:28,223 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 12:47:37,945 INFO [train.py:742] (7/8) Epoch 6, validation: loss=0.1091, simple_loss=0.1942, pruned_loss=0.01199, over 914524.00 frames. +2022-05-05 12:48:16,699 INFO [train.py:715] (7/8) Epoch 6, batch 12050, loss[loss=0.1545, simple_loss=0.23, pruned_loss=0.03953, over 4835.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2194, pruned_loss=0.03849, over 972392.01 frames.], batch size: 27, lr: 3.36e-04 +2022-05-05 12:48:56,375 INFO [train.py:715] (7/8) Epoch 6, batch 12100, loss[loss=0.1918, simple_loss=0.2596, pruned_loss=0.06202, over 4773.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2201, pruned_loss=0.03868, over 972318.00 frames.], batch size: 18, lr: 3.36e-04 +2022-05-05 12:49:35,321 INFO [train.py:715] (7/8) Epoch 6, batch 12150, loss[loss=0.1677, simple_loss=0.2408, pruned_loss=0.04727, over 4949.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2203, pruned_loss=0.0389, over 972263.79 frames.], batch size: 23, lr: 3.36e-04 +2022-05-05 12:50:14,106 INFO [train.py:715] (7/8) Epoch 6, batch 12200, loss[loss=0.1554, simple_loss=0.2361, pruned_loss=0.03731, over 4863.00 frames.], tot_loss[loss=0.1496, simple_loss=0.221, pruned_loss=0.0391, over 972575.97 frames.], batch size: 20, lr: 3.36e-04 +2022-05-05 12:50:53,315 INFO [train.py:715] (7/8) Epoch 6, batch 12250, loss[loss=0.1936, simple_loss=0.259, pruned_loss=0.06413, over 4824.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2212, pruned_loss=0.03917, over 972141.62 frames.], batch size: 25, lr: 3.36e-04 +2022-05-05 12:51:32,108 INFO [train.py:715] (7/8) Epoch 6, batch 12300, loss[loss=0.164, simple_loss=0.2325, pruned_loss=0.04777, over 4890.00 frames.], tot_loss[loss=0.15, simple_loss=0.2214, pruned_loss=0.03923, over 972946.57 frames.], batch size: 16, lr: 3.36e-04 +2022-05-05 12:52:11,887 INFO [train.py:715] (7/8) Epoch 6, batch 12350, loss[loss=0.1443, simple_loss=0.2148, pruned_loss=0.03694, over 4984.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2216, pruned_loss=0.03947, over 972268.71 frames.], batch size: 14, lr: 3.36e-04 +2022-05-05 12:52:50,511 INFO [train.py:715] (7/8) Epoch 6, batch 12400, loss[loss=0.1475, simple_loss=0.2142, pruned_loss=0.04038, over 4908.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2212, pruned_loss=0.03957, over 972939.31 frames.], batch size: 23, lr: 3.36e-04 +2022-05-05 12:53:29,626 INFO [train.py:715] (7/8) Epoch 6, batch 12450, loss[loss=0.1524, simple_loss=0.2234, pruned_loss=0.04071, over 4812.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2207, pruned_loss=0.03901, over 973020.56 frames.], batch size: 15, lr: 3.36e-04 +2022-05-05 12:54:08,744 INFO [train.py:715] (7/8) Epoch 6, batch 12500, loss[loss=0.158, simple_loss=0.2257, pruned_loss=0.04514, over 4980.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2205, pruned_loss=0.03929, over 972996.22 frames.], batch size: 31, lr: 3.36e-04 +2022-05-05 12:54:47,050 INFO [train.py:715] (7/8) Epoch 6, batch 12550, loss[loss=0.1537, simple_loss=0.2235, pruned_loss=0.04195, over 4975.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2209, pruned_loss=0.03939, over 971850.16 frames.], batch size: 28, lr: 3.36e-04 +2022-05-05 12:55:26,407 INFO [train.py:715] (7/8) Epoch 6, batch 12600, loss[loss=0.1552, simple_loss=0.2225, pruned_loss=0.0439, over 4807.00 frames.], tot_loss[loss=0.15, simple_loss=0.2211, pruned_loss=0.03949, over 971544.03 frames.], batch size: 13, lr: 3.36e-04 +2022-05-05 12:56:05,096 INFO [train.py:715] (7/8) Epoch 6, batch 12650, loss[loss=0.1373, simple_loss=0.2072, pruned_loss=0.03368, over 4776.00 frames.], tot_loss[loss=0.1505, simple_loss=0.221, pruned_loss=0.04, over 972132.16 frames.], batch size: 18, lr: 3.36e-04 +2022-05-05 12:56:43,911 INFO [train.py:715] (7/8) Epoch 6, batch 12700, loss[loss=0.1432, simple_loss=0.2207, pruned_loss=0.03289, over 4802.00 frames.], tot_loss[loss=0.15, simple_loss=0.2209, pruned_loss=0.03959, over 971899.11 frames.], batch size: 21, lr: 3.36e-04 +2022-05-05 12:57:22,046 INFO [train.py:715] (7/8) Epoch 6, batch 12750, loss[loss=0.155, simple_loss=0.2305, pruned_loss=0.03971, over 4806.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2203, pruned_loss=0.03928, over 972111.77 frames.], batch size: 26, lr: 3.36e-04 +2022-05-05 12:58:01,015 INFO [train.py:715] (7/8) Epoch 6, batch 12800, loss[loss=0.1432, simple_loss=0.2138, pruned_loss=0.03633, over 4811.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2209, pruned_loss=0.0398, over 972253.40 frames.], batch size: 25, lr: 3.36e-04 +2022-05-05 12:58:39,733 INFO [train.py:715] (7/8) Epoch 6, batch 12850, loss[loss=0.1519, simple_loss=0.2234, pruned_loss=0.04021, over 4861.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2212, pruned_loss=0.03968, over 972131.16 frames.], batch size: 32, lr: 3.35e-04 +2022-05-05 12:59:18,383 INFO [train.py:715] (7/8) Epoch 6, batch 12900, loss[loss=0.1373, simple_loss=0.2115, pruned_loss=0.03151, over 4761.00 frames.], tot_loss[loss=0.15, simple_loss=0.2208, pruned_loss=0.0396, over 971460.84 frames.], batch size: 14, lr: 3.35e-04 +2022-05-05 12:59:58,337 INFO [train.py:715] (7/8) Epoch 6, batch 12950, loss[loss=0.1396, simple_loss=0.2131, pruned_loss=0.03304, over 4793.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2206, pruned_loss=0.03958, over 971786.50 frames.], batch size: 24, lr: 3.35e-04 +2022-05-05 13:00:37,482 INFO [train.py:715] (7/8) Epoch 6, batch 13000, loss[loss=0.1259, simple_loss=0.1968, pruned_loss=0.02756, over 4785.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2205, pruned_loss=0.03955, over 971809.74 frames.], batch size: 17, lr: 3.35e-04 +2022-05-05 13:01:16,472 INFO [train.py:715] (7/8) Epoch 6, batch 13050, loss[loss=0.1534, simple_loss=0.2321, pruned_loss=0.03732, over 4949.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2209, pruned_loss=0.0398, over 970793.46 frames.], batch size: 21, lr: 3.35e-04 +2022-05-05 13:01:54,768 INFO [train.py:715] (7/8) Epoch 6, batch 13100, loss[loss=0.1228, simple_loss=0.2008, pruned_loss=0.02244, over 4705.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2211, pruned_loss=0.03974, over 971275.00 frames.], batch size: 15, lr: 3.35e-04 +2022-05-05 13:02:34,348 INFO [train.py:715] (7/8) Epoch 6, batch 13150, loss[loss=0.1437, simple_loss=0.2292, pruned_loss=0.02912, over 4879.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2207, pruned_loss=0.03938, over 971960.05 frames.], batch size: 22, lr: 3.35e-04 +2022-05-05 13:03:12,923 INFO [train.py:715] (7/8) Epoch 6, batch 13200, loss[loss=0.1395, simple_loss=0.2172, pruned_loss=0.03088, over 4975.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2214, pruned_loss=0.03995, over 972469.88 frames.], batch size: 15, lr: 3.35e-04 +2022-05-05 13:03:51,763 INFO [train.py:715] (7/8) Epoch 6, batch 13250, loss[loss=0.1806, simple_loss=0.2558, pruned_loss=0.05272, over 4756.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2215, pruned_loss=0.04018, over 972103.09 frames.], batch size: 14, lr: 3.35e-04 +2022-05-05 13:04:30,642 INFO [train.py:715] (7/8) Epoch 6, batch 13300, loss[loss=0.1475, simple_loss=0.219, pruned_loss=0.03799, over 4779.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2201, pruned_loss=0.03937, over 972203.95 frames.], batch size: 14, lr: 3.35e-04 +2022-05-05 13:05:09,757 INFO [train.py:715] (7/8) Epoch 6, batch 13350, loss[loss=0.1673, simple_loss=0.2523, pruned_loss=0.04114, over 4982.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2199, pruned_loss=0.03889, over 971927.98 frames.], batch size: 24, lr: 3.35e-04 +2022-05-05 13:05:48,885 INFO [train.py:715] (7/8) Epoch 6, batch 13400, loss[loss=0.149, simple_loss=0.2169, pruned_loss=0.04058, over 4872.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2189, pruned_loss=0.03834, over 971795.35 frames.], batch size: 32, lr: 3.35e-04 +2022-05-05 13:06:27,484 INFO [train.py:715] (7/8) Epoch 6, batch 13450, loss[loss=0.1814, simple_loss=0.2605, pruned_loss=0.05113, over 4903.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2205, pruned_loss=0.03912, over 972005.21 frames.], batch size: 39, lr: 3.35e-04 +2022-05-05 13:07:07,012 INFO [train.py:715] (7/8) Epoch 6, batch 13500, loss[loss=0.1528, simple_loss=0.2233, pruned_loss=0.04119, over 4981.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2207, pruned_loss=0.03893, over 972657.87 frames.], batch size: 27, lr: 3.35e-04 +2022-05-05 13:07:45,024 INFO [train.py:715] (7/8) Epoch 6, batch 13550, loss[loss=0.1702, simple_loss=0.2311, pruned_loss=0.05468, over 4811.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2202, pruned_loss=0.03936, over 971924.51 frames.], batch size: 14, lr: 3.35e-04 +2022-05-05 13:08:23,967 INFO [train.py:715] (7/8) Epoch 6, batch 13600, loss[loss=0.1395, simple_loss=0.2157, pruned_loss=0.03163, over 4978.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2204, pruned_loss=0.0396, over 972529.95 frames.], batch size: 25, lr: 3.35e-04 +2022-05-05 13:09:03,111 INFO [train.py:715] (7/8) Epoch 6, batch 13650, loss[loss=0.1471, simple_loss=0.2279, pruned_loss=0.03319, over 4974.00 frames.], tot_loss[loss=0.149, simple_loss=0.2197, pruned_loss=0.03912, over 973057.66 frames.], batch size: 24, lr: 3.35e-04 +2022-05-05 13:09:42,436 INFO [train.py:715] (7/8) Epoch 6, batch 13700, loss[loss=0.1492, simple_loss=0.2148, pruned_loss=0.04181, over 4987.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2197, pruned_loss=0.0389, over 973530.03 frames.], batch size: 28, lr: 3.35e-04 +2022-05-05 13:10:21,544 INFO [train.py:715] (7/8) Epoch 6, batch 13750, loss[loss=0.1462, simple_loss=0.223, pruned_loss=0.03475, over 4791.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2197, pruned_loss=0.03879, over 973977.34 frames.], batch size: 17, lr: 3.35e-04 +2022-05-05 13:11:00,144 INFO [train.py:715] (7/8) Epoch 6, batch 13800, loss[loss=0.1829, simple_loss=0.2318, pruned_loss=0.06693, over 4856.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2207, pruned_loss=0.04006, over 974643.16 frames.], batch size: 20, lr: 3.35e-04 +2022-05-05 13:11:40,115 INFO [train.py:715] (7/8) Epoch 6, batch 13850, loss[loss=0.1376, simple_loss=0.218, pruned_loss=0.02858, over 4757.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2204, pruned_loss=0.03965, over 974190.52 frames.], batch size: 19, lr: 3.35e-04 +2022-05-05 13:12:18,448 INFO [train.py:715] (7/8) Epoch 6, batch 13900, loss[loss=0.1405, simple_loss=0.2224, pruned_loss=0.02931, over 4862.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2203, pruned_loss=0.0393, over 973662.68 frames.], batch size: 20, lr: 3.35e-04 +2022-05-05 13:12:57,460 INFO [train.py:715] (7/8) Epoch 6, batch 13950, loss[loss=0.151, simple_loss=0.2266, pruned_loss=0.03772, over 4874.00 frames.], tot_loss[loss=0.1489, simple_loss=0.22, pruned_loss=0.03888, over 972753.51 frames.], batch size: 22, lr: 3.35e-04 +2022-05-05 13:13:36,063 INFO [train.py:715] (7/8) Epoch 6, batch 14000, loss[loss=0.1558, simple_loss=0.2174, pruned_loss=0.04714, over 4980.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2207, pruned_loss=0.03911, over 973063.12 frames.], batch size: 31, lr: 3.35e-04 +2022-05-05 13:14:15,110 INFO [train.py:715] (7/8) Epoch 6, batch 14050, loss[loss=0.157, simple_loss=0.2172, pruned_loss=0.0484, over 4778.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2204, pruned_loss=0.03861, over 972966.31 frames.], batch size: 14, lr: 3.35e-04 +2022-05-05 13:14:53,529 INFO [train.py:715] (7/8) Epoch 6, batch 14100, loss[loss=0.142, simple_loss=0.2288, pruned_loss=0.02755, over 4779.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2208, pruned_loss=0.03867, over 972638.61 frames.], batch size: 17, lr: 3.35e-04 +2022-05-05 13:15:32,017 INFO [train.py:715] (7/8) Epoch 6, batch 14150, loss[loss=0.1253, simple_loss=0.1997, pruned_loss=0.02543, over 4983.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2204, pruned_loss=0.03853, over 972640.74 frames.], batch size: 28, lr: 3.35e-04 +2022-05-05 13:16:11,449 INFO [train.py:715] (7/8) Epoch 6, batch 14200, loss[loss=0.2024, simple_loss=0.2638, pruned_loss=0.07045, over 4834.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2202, pruned_loss=0.03875, over 972552.92 frames.], batch size: 15, lr: 3.34e-04 +2022-05-05 13:16:50,086 INFO [train.py:715] (7/8) Epoch 6, batch 14250, loss[loss=0.1267, simple_loss=0.1985, pruned_loss=0.02742, over 4765.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2204, pruned_loss=0.03887, over 971981.48 frames.], batch size: 19, lr: 3.34e-04 +2022-05-05 13:17:29,126 INFO [train.py:715] (7/8) Epoch 6, batch 14300, loss[loss=0.2107, simple_loss=0.2659, pruned_loss=0.07772, over 4890.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2202, pruned_loss=0.03848, over 972589.25 frames.], batch size: 16, lr: 3.34e-04 +2022-05-05 13:18:07,580 INFO [train.py:715] (7/8) Epoch 6, batch 14350, loss[loss=0.1359, simple_loss=0.2029, pruned_loss=0.0344, over 4742.00 frames.], tot_loss[loss=0.1496, simple_loss=0.221, pruned_loss=0.03907, over 971117.60 frames.], batch size: 19, lr: 3.34e-04 +2022-05-05 13:18:47,508 INFO [train.py:715] (7/8) Epoch 6, batch 14400, loss[loss=0.1614, simple_loss=0.2383, pruned_loss=0.0422, over 4947.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2202, pruned_loss=0.03912, over 971567.27 frames.], batch size: 21, lr: 3.34e-04 +2022-05-05 13:19:25,857 INFO [train.py:715] (7/8) Epoch 6, batch 14450, loss[loss=0.187, simple_loss=0.2538, pruned_loss=0.06006, over 4894.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2197, pruned_loss=0.03867, over 971608.32 frames.], batch size: 17, lr: 3.34e-04 +2022-05-05 13:20:04,246 INFO [train.py:715] (7/8) Epoch 6, batch 14500, loss[loss=0.1434, simple_loss=0.2144, pruned_loss=0.03619, over 4755.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2194, pruned_loss=0.03875, over 972023.31 frames.], batch size: 19, lr: 3.34e-04 +2022-05-05 13:20:43,935 INFO [train.py:715] (7/8) Epoch 6, batch 14550, loss[loss=0.1615, simple_loss=0.2425, pruned_loss=0.04019, over 4808.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2196, pruned_loss=0.03883, over 971810.14 frames.], batch size: 21, lr: 3.34e-04 +2022-05-05 13:21:22,653 INFO [train.py:715] (7/8) Epoch 6, batch 14600, loss[loss=0.1329, simple_loss=0.1965, pruned_loss=0.03462, over 4749.00 frames.], tot_loss[loss=0.15, simple_loss=0.221, pruned_loss=0.03946, over 971940.91 frames.], batch size: 16, lr: 3.34e-04 +2022-05-05 13:22:01,119 INFO [train.py:715] (7/8) Epoch 6, batch 14650, loss[loss=0.1562, simple_loss=0.2241, pruned_loss=0.04412, over 4785.00 frames.], tot_loss[loss=0.1498, simple_loss=0.221, pruned_loss=0.03931, over 971863.73 frames.], batch size: 17, lr: 3.34e-04 +2022-05-05 13:22:40,130 INFO [train.py:715] (7/8) Epoch 6, batch 14700, loss[loss=0.1518, simple_loss=0.2278, pruned_loss=0.03787, over 4652.00 frames.], tot_loss[loss=0.149, simple_loss=0.22, pruned_loss=0.03903, over 971416.13 frames.], batch size: 13, lr: 3.34e-04 +2022-05-05 13:23:19,674 INFO [train.py:715] (7/8) Epoch 6, batch 14750, loss[loss=0.1574, simple_loss=0.2312, pruned_loss=0.04183, over 4782.00 frames.], tot_loss[loss=0.148, simple_loss=0.219, pruned_loss=0.03848, over 971121.59 frames.], batch size: 17, lr: 3.34e-04 +2022-05-05 13:23:57,831 INFO [train.py:715] (7/8) Epoch 6, batch 14800, loss[loss=0.1341, simple_loss=0.2153, pruned_loss=0.0264, over 4841.00 frames.], tot_loss[loss=0.149, simple_loss=0.22, pruned_loss=0.03898, over 970430.49 frames.], batch size: 15, lr: 3.34e-04 +2022-05-05 13:24:35,994 INFO [train.py:715] (7/8) Epoch 6, batch 14850, loss[loss=0.1374, simple_loss=0.2101, pruned_loss=0.03235, over 4794.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2209, pruned_loss=0.0393, over 970279.08 frames.], batch size: 24, lr: 3.34e-04 +2022-05-05 13:25:15,104 INFO [train.py:715] (7/8) Epoch 6, batch 14900, loss[loss=0.15, simple_loss=0.2213, pruned_loss=0.03931, over 4805.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2204, pruned_loss=0.03913, over 970001.84 frames.], batch size: 26, lr: 3.34e-04 +2022-05-05 13:25:53,358 INFO [train.py:715] (7/8) Epoch 6, batch 14950, loss[loss=0.1492, simple_loss=0.2244, pruned_loss=0.03702, over 4955.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2203, pruned_loss=0.03924, over 970802.38 frames.], batch size: 39, lr: 3.34e-04 +2022-05-05 13:26:32,024 INFO [train.py:715] (7/8) Epoch 6, batch 15000, loss[loss=0.1226, simple_loss=0.1912, pruned_loss=0.02693, over 4988.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2207, pruned_loss=0.03912, over 971016.34 frames.], batch size: 28, lr: 3.34e-04 +2022-05-05 13:26:32,025 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 13:26:41,819 INFO [train.py:742] (7/8) Epoch 6, validation: loss=0.1091, simple_loss=0.1941, pruned_loss=0.01202, over 914524.00 frames. +2022-05-05 13:27:20,601 INFO [train.py:715] (7/8) Epoch 6, batch 15050, loss[loss=0.1739, simple_loss=0.2393, pruned_loss=0.05425, over 4836.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2208, pruned_loss=0.03933, over 971234.04 frames.], batch size: 30, lr: 3.34e-04 +2022-05-05 13:27:59,352 INFO [train.py:715] (7/8) Epoch 6, batch 15100, loss[loss=0.1543, simple_loss=0.2275, pruned_loss=0.04059, over 4867.00 frames.], tot_loss[loss=0.1487, simple_loss=0.22, pruned_loss=0.03866, over 971045.63 frames.], batch size: 38, lr: 3.34e-04 +2022-05-05 13:28:41,260 INFO [train.py:715] (7/8) Epoch 6, batch 15150, loss[loss=0.1823, simple_loss=0.2555, pruned_loss=0.0545, over 4826.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2196, pruned_loss=0.03887, over 971287.78 frames.], batch size: 26, lr: 3.34e-04 +2022-05-05 13:29:19,830 INFO [train.py:715] (7/8) Epoch 6, batch 15200, loss[loss=0.1385, simple_loss=0.2158, pruned_loss=0.0306, over 4819.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2195, pruned_loss=0.03895, over 971870.25 frames.], batch size: 25, lr: 3.34e-04 +2022-05-05 13:29:58,373 INFO [train.py:715] (7/8) Epoch 6, batch 15250, loss[loss=0.1703, simple_loss=0.2313, pruned_loss=0.05466, over 4844.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2201, pruned_loss=0.03906, over 971799.08 frames.], batch size: 20, lr: 3.34e-04 +2022-05-05 13:30:37,906 INFO [train.py:715] (7/8) Epoch 6, batch 15300, loss[loss=0.1399, simple_loss=0.2153, pruned_loss=0.03226, over 4933.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2202, pruned_loss=0.03905, over 971962.70 frames.], batch size: 29, lr: 3.34e-04 +2022-05-05 13:31:15,932 INFO [train.py:715] (7/8) Epoch 6, batch 15350, loss[loss=0.1298, simple_loss=0.2038, pruned_loss=0.02786, over 4849.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2192, pruned_loss=0.03808, over 972521.22 frames.], batch size: 20, lr: 3.34e-04 +2022-05-05 13:31:54,940 INFO [train.py:715] (7/8) Epoch 6, batch 15400, loss[loss=0.1339, simple_loss=0.2087, pruned_loss=0.02952, over 4799.00 frames.], tot_loss[loss=0.148, simple_loss=0.2192, pruned_loss=0.03842, over 972459.27 frames.], batch size: 13, lr: 3.34e-04 +2022-05-05 13:32:33,863 INFO [train.py:715] (7/8) Epoch 6, batch 15450, loss[loss=0.1256, simple_loss=0.1996, pruned_loss=0.02583, over 4887.00 frames.], tot_loss[loss=0.148, simple_loss=0.219, pruned_loss=0.03847, over 971888.86 frames.], batch size: 22, lr: 3.34e-04 +2022-05-05 13:33:13,327 INFO [train.py:715] (7/8) Epoch 6, batch 15500, loss[loss=0.182, simple_loss=0.2587, pruned_loss=0.05264, over 4898.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2206, pruned_loss=0.03899, over 971671.98 frames.], batch size: 18, lr: 3.34e-04 +2022-05-05 13:33:51,506 INFO [train.py:715] (7/8) Epoch 6, batch 15550, loss[loss=0.1337, simple_loss=0.209, pruned_loss=0.02919, over 4987.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2205, pruned_loss=0.03858, over 972383.62 frames.], batch size: 28, lr: 3.33e-04 +2022-05-05 13:34:30,401 INFO [train.py:715] (7/8) Epoch 6, batch 15600, loss[loss=0.1832, simple_loss=0.2521, pruned_loss=0.05719, over 4757.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2203, pruned_loss=0.03865, over 971441.40 frames.], batch size: 19, lr: 3.33e-04 +2022-05-05 13:35:09,325 INFO [train.py:715] (7/8) Epoch 6, batch 15650, loss[loss=0.1435, simple_loss=0.207, pruned_loss=0.04006, over 4904.00 frames.], tot_loss[loss=0.149, simple_loss=0.2202, pruned_loss=0.03892, over 971484.51 frames.], batch size: 17, lr: 3.33e-04 +2022-05-05 13:35:47,371 INFO [train.py:715] (7/8) Epoch 6, batch 15700, loss[loss=0.1234, simple_loss=0.2037, pruned_loss=0.02157, over 4659.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2192, pruned_loss=0.03848, over 972119.96 frames.], batch size: 13, lr: 3.33e-04 +2022-05-05 13:36:26,051 INFO [train.py:715] (7/8) Epoch 6, batch 15750, loss[loss=0.158, simple_loss=0.2164, pruned_loss=0.04977, over 4965.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2197, pruned_loss=0.0387, over 972804.90 frames.], batch size: 24, lr: 3.33e-04 +2022-05-05 13:37:04,793 INFO [train.py:715] (7/8) Epoch 6, batch 15800, loss[loss=0.1492, simple_loss=0.221, pruned_loss=0.03871, over 4768.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2191, pruned_loss=0.03831, over 973531.73 frames.], batch size: 12, lr: 3.33e-04 +2022-05-05 13:37:43,838 INFO [train.py:715] (7/8) Epoch 6, batch 15850, loss[loss=0.1353, simple_loss=0.2018, pruned_loss=0.03441, over 4844.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2192, pruned_loss=0.03844, over 973926.53 frames.], batch size: 13, lr: 3.33e-04 +2022-05-05 13:38:22,281 INFO [train.py:715] (7/8) Epoch 6, batch 15900, loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03227, over 4759.00 frames.], tot_loss[loss=0.1476, simple_loss=0.219, pruned_loss=0.03809, over 973643.10 frames.], batch size: 14, lr: 3.33e-04 +2022-05-05 13:39:00,649 INFO [train.py:715] (7/8) Epoch 6, batch 15950, loss[loss=0.1474, simple_loss=0.2159, pruned_loss=0.03946, over 4923.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2196, pruned_loss=0.03877, over 972797.78 frames.], batch size: 29, lr: 3.33e-04 +2022-05-05 13:39:39,974 INFO [train.py:715] (7/8) Epoch 6, batch 16000, loss[loss=0.1692, simple_loss=0.2301, pruned_loss=0.05418, over 4816.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2202, pruned_loss=0.039, over 973046.90 frames.], batch size: 13, lr: 3.33e-04 +2022-05-05 13:40:18,429 INFO [train.py:715] (7/8) Epoch 6, batch 16050, loss[loss=0.1588, simple_loss=0.22, pruned_loss=0.04885, over 4874.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2209, pruned_loss=0.03932, over 973323.00 frames.], batch size: 32, lr: 3.33e-04 +2022-05-05 13:40:56,905 INFO [train.py:715] (7/8) Epoch 6, batch 16100, loss[loss=0.128, simple_loss=0.2071, pruned_loss=0.02443, over 4917.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2197, pruned_loss=0.03843, over 972013.20 frames.], batch size: 18, lr: 3.33e-04 +2022-05-05 13:41:35,298 INFO [train.py:715] (7/8) Epoch 6, batch 16150, loss[loss=0.1708, simple_loss=0.224, pruned_loss=0.05884, over 4837.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2204, pruned_loss=0.03901, over 971523.99 frames.], batch size: 15, lr: 3.33e-04 +2022-05-05 13:42:14,797 INFO [train.py:715] (7/8) Epoch 6, batch 16200, loss[loss=0.1553, simple_loss=0.2307, pruned_loss=0.03994, over 4794.00 frames.], tot_loss[loss=0.1486, simple_loss=0.22, pruned_loss=0.03857, over 970116.62 frames.], batch size: 24, lr: 3.33e-04 +2022-05-05 13:42:53,108 INFO [train.py:715] (7/8) Epoch 6, batch 16250, loss[loss=0.1821, simple_loss=0.2521, pruned_loss=0.05605, over 4759.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2212, pruned_loss=0.03948, over 970745.84 frames.], batch size: 19, lr: 3.33e-04 +2022-05-05 13:43:31,725 INFO [train.py:715] (7/8) Epoch 6, batch 16300, loss[loss=0.1476, simple_loss=0.2183, pruned_loss=0.03842, over 4811.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2207, pruned_loss=0.03894, over 970872.71 frames.], batch size: 21, lr: 3.33e-04 +2022-05-05 13:44:11,205 INFO [train.py:715] (7/8) Epoch 6, batch 16350, loss[loss=0.1859, simple_loss=0.2503, pruned_loss=0.06077, over 4927.00 frames.], tot_loss[loss=0.15, simple_loss=0.2216, pruned_loss=0.03922, over 970003.55 frames.], batch size: 23, lr: 3.33e-04 +2022-05-05 13:44:49,507 INFO [train.py:715] (7/8) Epoch 6, batch 16400, loss[loss=0.1403, simple_loss=0.212, pruned_loss=0.03426, over 4926.00 frames.], tot_loss[loss=0.1505, simple_loss=0.2219, pruned_loss=0.03952, over 970683.33 frames.], batch size: 39, lr: 3.33e-04 +2022-05-05 13:45:28,823 INFO [train.py:715] (7/8) Epoch 6, batch 16450, loss[loss=0.1589, simple_loss=0.2182, pruned_loss=0.0498, over 4803.00 frames.], tot_loss[loss=0.15, simple_loss=0.2211, pruned_loss=0.03946, over 971885.77 frames.], batch size: 25, lr: 3.33e-04 +2022-05-05 13:46:07,627 INFO [train.py:715] (7/8) Epoch 6, batch 16500, loss[loss=0.1668, simple_loss=0.2394, pruned_loss=0.04705, over 4970.00 frames.], tot_loss[loss=0.1506, simple_loss=0.2213, pruned_loss=0.0399, over 971987.74 frames.], batch size: 24, lr: 3.33e-04 +2022-05-05 13:46:46,581 INFO [train.py:715] (7/8) Epoch 6, batch 16550, loss[loss=0.1524, simple_loss=0.2187, pruned_loss=0.04312, over 4871.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2211, pruned_loss=0.03967, over 971393.62 frames.], batch size: 32, lr: 3.33e-04 +2022-05-05 13:47:24,413 INFO [train.py:715] (7/8) Epoch 6, batch 16600, loss[loss=0.1489, simple_loss=0.2198, pruned_loss=0.03899, over 4965.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2209, pruned_loss=0.03959, over 973012.82 frames.], batch size: 15, lr: 3.33e-04 +2022-05-05 13:48:03,152 INFO [train.py:715] (7/8) Epoch 6, batch 16650, loss[loss=0.1997, simple_loss=0.264, pruned_loss=0.0677, over 4844.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2217, pruned_loss=0.03941, over 971909.54 frames.], batch size: 30, lr: 3.33e-04 +2022-05-05 13:48:42,815 INFO [train.py:715] (7/8) Epoch 6, batch 16700, loss[loss=0.1526, simple_loss=0.2148, pruned_loss=0.04516, over 4828.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2217, pruned_loss=0.03952, over 971367.74 frames.], batch size: 13, lr: 3.33e-04 +2022-05-05 13:49:21,221 INFO [train.py:715] (7/8) Epoch 6, batch 16750, loss[loss=0.1462, simple_loss=0.2229, pruned_loss=0.03474, over 4785.00 frames.], tot_loss[loss=0.1507, simple_loss=0.2222, pruned_loss=0.03953, over 971743.29 frames.], batch size: 17, lr: 3.33e-04 +2022-05-05 13:50:00,118 INFO [train.py:715] (7/8) Epoch 6, batch 16800, loss[loss=0.1484, simple_loss=0.2165, pruned_loss=0.04013, over 4913.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2211, pruned_loss=0.03918, over 972009.22 frames.], batch size: 22, lr: 3.33e-04 +2022-05-05 13:50:39,327 INFO [train.py:715] (7/8) Epoch 6, batch 16850, loss[loss=0.1529, simple_loss=0.2232, pruned_loss=0.04132, over 4770.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2197, pruned_loss=0.03885, over 971615.05 frames.], batch size: 17, lr: 3.33e-04 +2022-05-05 13:51:19,120 INFO [train.py:715] (7/8) Epoch 6, batch 16900, loss[loss=0.1897, simple_loss=0.2564, pruned_loss=0.06153, over 4844.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2213, pruned_loss=0.03973, over 971413.34 frames.], batch size: 15, lr: 3.32e-04 +2022-05-05 13:51:57,175 INFO [train.py:715] (7/8) Epoch 6, batch 16950, loss[loss=0.1521, simple_loss=0.2158, pruned_loss=0.04421, over 4804.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2213, pruned_loss=0.03968, over 971561.69 frames.], batch size: 21, lr: 3.32e-04 +2022-05-05 13:52:36,229 INFO [train.py:715] (7/8) Epoch 6, batch 17000, loss[loss=0.1395, simple_loss=0.2091, pruned_loss=0.03493, over 4904.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2209, pruned_loss=0.03948, over 971611.31 frames.], batch size: 17, lr: 3.32e-04 +2022-05-05 13:53:15,747 INFO [train.py:715] (7/8) Epoch 6, batch 17050, loss[loss=0.1657, simple_loss=0.231, pruned_loss=0.05014, over 4982.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2201, pruned_loss=0.03936, over 972707.94 frames.], batch size: 14, lr: 3.32e-04 +2022-05-05 13:53:53,903 INFO [train.py:715] (7/8) Epoch 6, batch 17100, loss[loss=0.172, simple_loss=0.2487, pruned_loss=0.04765, over 4827.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2205, pruned_loss=0.03939, over 972476.02 frames.], batch size: 26, lr: 3.32e-04 +2022-05-05 13:54:32,774 INFO [train.py:715] (7/8) Epoch 6, batch 17150, loss[loss=0.1351, simple_loss=0.2152, pruned_loss=0.02754, over 4957.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2204, pruned_loss=0.03941, over 972337.32 frames.], batch size: 24, lr: 3.32e-04 +2022-05-05 13:55:11,750 INFO [train.py:715] (7/8) Epoch 6, batch 17200, loss[loss=0.1746, simple_loss=0.2403, pruned_loss=0.0545, over 4990.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2208, pruned_loss=0.03941, over 972963.40 frames.], batch size: 28, lr: 3.32e-04 +2022-05-05 13:55:51,116 INFO [train.py:715] (7/8) Epoch 6, batch 17250, loss[loss=0.1521, simple_loss=0.2287, pruned_loss=0.03778, over 4787.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2207, pruned_loss=0.03944, over 972797.90 frames.], batch size: 17, lr: 3.32e-04 +2022-05-05 13:56:29,075 INFO [train.py:715] (7/8) Epoch 6, batch 17300, loss[loss=0.1533, simple_loss=0.2352, pruned_loss=0.03574, over 4803.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2206, pruned_loss=0.03914, over 973452.36 frames.], batch size: 24, lr: 3.32e-04 +2022-05-05 13:57:07,890 INFO [train.py:715] (7/8) Epoch 6, batch 17350, loss[loss=0.1453, simple_loss=0.2203, pruned_loss=0.03518, over 4987.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2194, pruned_loss=0.03917, over 973687.84 frames.], batch size: 25, lr: 3.32e-04 +2022-05-05 13:57:47,278 INFO [train.py:715] (7/8) Epoch 6, batch 17400, loss[loss=0.1578, simple_loss=0.2291, pruned_loss=0.04325, over 4819.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2191, pruned_loss=0.03876, over 972852.09 frames.], batch size: 27, lr: 3.32e-04 +2022-05-05 13:58:26,213 INFO [train.py:715] (7/8) Epoch 6, batch 17450, loss[loss=0.1967, simple_loss=0.2649, pruned_loss=0.06422, over 4897.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2194, pruned_loss=0.03884, over 973255.98 frames.], batch size: 16, lr: 3.32e-04 +2022-05-05 13:59:04,829 INFO [train.py:715] (7/8) Epoch 6, batch 17500, loss[loss=0.1492, simple_loss=0.2165, pruned_loss=0.04096, over 4900.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2199, pruned_loss=0.03911, over 973635.06 frames.], batch size: 39, lr: 3.32e-04 +2022-05-05 13:59:43,982 INFO [train.py:715] (7/8) Epoch 6, batch 17550, loss[loss=0.1492, simple_loss=0.2162, pruned_loss=0.04112, over 4891.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2207, pruned_loss=0.03936, over 973911.57 frames.], batch size: 19, lr: 3.32e-04 +2022-05-05 14:00:23,862 INFO [train.py:715] (7/8) Epoch 6, batch 17600, loss[loss=0.13, simple_loss=0.1894, pruned_loss=0.03533, over 4705.00 frames.], tot_loss[loss=0.1484, simple_loss=0.219, pruned_loss=0.03888, over 973648.29 frames.], batch size: 15, lr: 3.32e-04 +2022-05-05 14:01:01,422 INFO [train.py:715] (7/8) Epoch 6, batch 17650, loss[loss=0.1684, simple_loss=0.2396, pruned_loss=0.04857, over 4863.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2195, pruned_loss=0.0387, over 973301.86 frames.], batch size: 16, lr: 3.32e-04 +2022-05-05 14:01:40,866 INFO [train.py:715] (7/8) Epoch 6, batch 17700, loss[loss=0.1605, simple_loss=0.2368, pruned_loss=0.04204, over 4768.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2193, pruned_loss=0.0388, over 973183.67 frames.], batch size: 14, lr: 3.32e-04 +2022-05-05 14:02:20,250 INFO [train.py:715] (7/8) Epoch 6, batch 17750, loss[loss=0.1709, simple_loss=0.2429, pruned_loss=0.04944, over 4865.00 frames.], tot_loss[loss=0.148, simple_loss=0.2188, pruned_loss=0.03858, over 973611.28 frames.], batch size: 16, lr: 3.32e-04 +2022-05-05 14:02:58,609 INFO [train.py:715] (7/8) Epoch 6, batch 17800, loss[loss=0.1526, simple_loss=0.2248, pruned_loss=0.04022, over 4804.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2191, pruned_loss=0.0383, over 974107.71 frames.], batch size: 25, lr: 3.32e-04 +2022-05-05 14:03:37,541 INFO [train.py:715] (7/8) Epoch 6, batch 17850, loss[loss=0.15, simple_loss=0.2226, pruned_loss=0.03877, over 4802.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2194, pruned_loss=0.03839, over 973998.51 frames.], batch size: 17, lr: 3.32e-04 +2022-05-05 14:04:16,748 INFO [train.py:715] (7/8) Epoch 6, batch 17900, loss[loss=0.1727, simple_loss=0.2327, pruned_loss=0.05634, over 4773.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2199, pruned_loss=0.03835, over 974347.55 frames.], batch size: 14, lr: 3.32e-04 +2022-05-05 14:04:56,309 INFO [train.py:715] (7/8) Epoch 6, batch 17950, loss[loss=0.1384, simple_loss=0.2155, pruned_loss=0.03068, over 4965.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2195, pruned_loss=0.03817, over 974234.73 frames.], batch size: 15, lr: 3.32e-04 +2022-05-05 14:05:34,139 INFO [train.py:715] (7/8) Epoch 6, batch 18000, loss[loss=0.1709, simple_loss=0.2431, pruned_loss=0.04941, over 4859.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2198, pruned_loss=0.03821, over 973504.90 frames.], batch size: 30, lr: 3.32e-04 +2022-05-05 14:05:34,140 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 14:05:43,884 INFO [train.py:742] (7/8) Epoch 6, validation: loss=0.1087, simple_loss=0.1939, pruned_loss=0.0118, over 914524.00 frames. +2022-05-05 14:06:22,338 INFO [train.py:715] (7/8) Epoch 6, batch 18050, loss[loss=0.1473, simple_loss=0.2214, pruned_loss=0.03665, over 4873.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2199, pruned_loss=0.03872, over 972901.14 frames.], batch size: 22, lr: 3.32e-04 +2022-05-05 14:07:01,820 INFO [train.py:715] (7/8) Epoch 6, batch 18100, loss[loss=0.1432, simple_loss=0.212, pruned_loss=0.03719, over 4902.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2199, pruned_loss=0.03885, over 973590.09 frames.], batch size: 17, lr: 3.32e-04 +2022-05-05 14:07:41,269 INFO [train.py:715] (7/8) Epoch 6, batch 18150, loss[loss=0.1309, simple_loss=0.2025, pruned_loss=0.0296, over 4724.00 frames.], tot_loss[loss=0.1502, simple_loss=0.221, pruned_loss=0.03963, over 972947.25 frames.], batch size: 15, lr: 3.32e-04 +2022-05-05 14:08:19,363 INFO [train.py:715] (7/8) Epoch 6, batch 18200, loss[loss=0.1246, simple_loss=0.2051, pruned_loss=0.02201, over 4750.00 frames.], tot_loss[loss=0.15, simple_loss=0.2208, pruned_loss=0.03963, over 973302.15 frames.], batch size: 19, lr: 3.32e-04 +2022-05-05 14:08:58,862 INFO [train.py:715] (7/8) Epoch 6, batch 18250, loss[loss=0.1389, simple_loss=0.2063, pruned_loss=0.03575, over 4857.00 frames.], tot_loss[loss=0.1491, simple_loss=0.22, pruned_loss=0.03903, over 972483.56 frames.], batch size: 20, lr: 3.31e-04 +2022-05-05 14:09:38,213 INFO [train.py:715] (7/8) Epoch 6, batch 18300, loss[loss=0.156, simple_loss=0.2298, pruned_loss=0.04105, over 4897.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2211, pruned_loss=0.03919, over 971908.79 frames.], batch size: 18, lr: 3.31e-04 +2022-05-05 14:10:17,262 INFO [train.py:715] (7/8) Epoch 6, batch 18350, loss[loss=0.1559, simple_loss=0.2311, pruned_loss=0.04036, over 4759.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2219, pruned_loss=0.0394, over 971032.53 frames.], batch size: 19, lr: 3.31e-04 +2022-05-05 14:10:55,593 INFO [train.py:715] (7/8) Epoch 6, batch 18400, loss[loss=0.1495, simple_loss=0.2254, pruned_loss=0.03681, over 4733.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2214, pruned_loss=0.03934, over 970782.98 frames.], batch size: 16, lr: 3.31e-04 +2022-05-05 14:11:34,890 INFO [train.py:715] (7/8) Epoch 6, batch 18450, loss[loss=0.1434, simple_loss=0.2175, pruned_loss=0.03462, over 4859.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2201, pruned_loss=0.03864, over 971247.25 frames.], batch size: 20, lr: 3.31e-04 +2022-05-05 14:12:14,319 INFO [train.py:715] (7/8) Epoch 6, batch 18500, loss[loss=0.1281, simple_loss=0.2012, pruned_loss=0.02748, over 4958.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2197, pruned_loss=0.03804, over 972619.09 frames.], batch size: 24, lr: 3.31e-04 +2022-05-05 14:12:52,317 INFO [train.py:715] (7/8) Epoch 6, batch 18550, loss[loss=0.1402, simple_loss=0.209, pruned_loss=0.03577, over 4952.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2196, pruned_loss=0.03809, over 972695.53 frames.], batch size: 24, lr: 3.31e-04 +2022-05-05 14:13:31,753 INFO [train.py:715] (7/8) Epoch 6, batch 18600, loss[loss=0.1348, simple_loss=0.2007, pruned_loss=0.03444, over 4874.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2204, pruned_loss=0.03853, over 973054.88 frames.], batch size: 32, lr: 3.31e-04 +2022-05-05 14:14:10,848 INFO [train.py:715] (7/8) Epoch 6, batch 18650, loss[loss=0.1467, simple_loss=0.2224, pruned_loss=0.0355, over 4750.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2202, pruned_loss=0.03902, over 971841.70 frames.], batch size: 16, lr: 3.31e-04 +2022-05-05 14:14:50,390 INFO [train.py:715] (7/8) Epoch 6, batch 18700, loss[loss=0.1504, simple_loss=0.2276, pruned_loss=0.03665, over 4935.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2209, pruned_loss=0.03916, over 972103.04 frames.], batch size: 21, lr: 3.31e-04 +2022-05-05 14:15:28,529 INFO [train.py:715] (7/8) Epoch 6, batch 18750, loss[loss=0.1497, simple_loss=0.1998, pruned_loss=0.04985, over 4960.00 frames.], tot_loss[loss=0.1496, simple_loss=0.221, pruned_loss=0.03915, over 972398.43 frames.], batch size: 35, lr: 3.31e-04 +2022-05-05 14:16:07,702 INFO [train.py:715] (7/8) Epoch 6, batch 18800, loss[loss=0.1454, simple_loss=0.2247, pruned_loss=0.03304, over 4981.00 frames.], tot_loss[loss=0.1503, simple_loss=0.2214, pruned_loss=0.03957, over 973026.70 frames.], batch size: 25, lr: 3.31e-04 +2022-05-05 14:16:47,206 INFO [train.py:715] (7/8) Epoch 6, batch 18850, loss[loss=0.1431, simple_loss=0.2139, pruned_loss=0.03611, over 4828.00 frames.], tot_loss[loss=0.149, simple_loss=0.2203, pruned_loss=0.03888, over 972965.67 frames.], batch size: 26, lr: 3.31e-04 +2022-05-05 14:17:25,254 INFO [train.py:715] (7/8) Epoch 6, batch 18900, loss[loss=0.1159, simple_loss=0.1973, pruned_loss=0.01725, over 4902.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2195, pruned_loss=0.03851, over 972618.13 frames.], batch size: 19, lr: 3.31e-04 +2022-05-05 14:18:04,841 INFO [train.py:715] (7/8) Epoch 6, batch 18950, loss[loss=0.1666, simple_loss=0.2258, pruned_loss=0.05373, over 4945.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03868, over 971708.51 frames.], batch size: 35, lr: 3.31e-04 +2022-05-05 14:18:43,971 INFO [train.py:715] (7/8) Epoch 6, batch 19000, loss[loss=0.1793, simple_loss=0.2383, pruned_loss=0.0602, over 4947.00 frames.], tot_loss[loss=0.148, simple_loss=0.2196, pruned_loss=0.03823, over 971949.28 frames.], batch size: 14, lr: 3.31e-04 +2022-05-05 14:19:23,156 INFO [train.py:715] (7/8) Epoch 6, batch 19050, loss[loss=0.1644, simple_loss=0.229, pruned_loss=0.04994, over 4846.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2198, pruned_loss=0.0386, over 972166.21 frames.], batch size: 15, lr: 3.31e-04 +2022-05-05 14:20:01,546 INFO [train.py:715] (7/8) Epoch 6, batch 19100, loss[loss=0.1367, simple_loss=0.2154, pruned_loss=0.02902, over 4878.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2196, pruned_loss=0.03858, over 972700.60 frames.], batch size: 16, lr: 3.31e-04 +2022-05-05 14:20:40,517 INFO [train.py:715] (7/8) Epoch 6, batch 19150, loss[loss=0.1623, simple_loss=0.239, pruned_loss=0.04286, over 4953.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2198, pruned_loss=0.03868, over 972212.97 frames.], batch size: 24, lr: 3.31e-04 +2022-05-05 14:21:20,170 INFO [train.py:715] (7/8) Epoch 6, batch 19200, loss[loss=0.1492, simple_loss=0.2198, pruned_loss=0.03927, over 4909.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.0382, over 971506.36 frames.], batch size: 17, lr: 3.31e-04 +2022-05-05 14:21:58,237 INFO [train.py:715] (7/8) Epoch 6, batch 19250, loss[loss=0.1345, simple_loss=0.2179, pruned_loss=0.02555, over 4851.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2184, pruned_loss=0.03799, over 972235.97 frames.], batch size: 30, lr: 3.31e-04 +2022-05-05 14:22:37,141 INFO [train.py:715] (7/8) Epoch 6, batch 19300, loss[loss=0.1162, simple_loss=0.1875, pruned_loss=0.02244, over 4765.00 frames.], tot_loss[loss=0.147, simple_loss=0.2184, pruned_loss=0.03781, over 972750.39 frames.], batch size: 12, lr: 3.31e-04 +2022-05-05 14:23:16,401 INFO [train.py:715] (7/8) Epoch 6, batch 19350, loss[loss=0.1654, simple_loss=0.2288, pruned_loss=0.05102, over 4853.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2186, pruned_loss=0.03782, over 972025.32 frames.], batch size: 30, lr: 3.31e-04 +2022-05-05 14:23:54,993 INFO [train.py:715] (7/8) Epoch 6, batch 19400, loss[loss=0.1433, simple_loss=0.2044, pruned_loss=0.04114, over 4994.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2189, pruned_loss=0.03817, over 972094.30 frames.], batch size: 14, lr: 3.31e-04 +2022-05-05 14:24:33,669 INFO [train.py:715] (7/8) Epoch 6, batch 19450, loss[loss=0.127, simple_loss=0.1977, pruned_loss=0.02814, over 4802.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.03824, over 972327.87 frames.], batch size: 18, lr: 3.31e-04 +2022-05-05 14:25:13,067 INFO [train.py:715] (7/8) Epoch 6, batch 19500, loss[loss=0.1218, simple_loss=0.1958, pruned_loss=0.02391, over 4817.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2188, pruned_loss=0.0383, over 971907.09 frames.], batch size: 13, lr: 3.31e-04 +2022-05-05 14:25:51,975 INFO [train.py:715] (7/8) Epoch 6, batch 19550, loss[loss=0.1474, simple_loss=0.2105, pruned_loss=0.04212, over 4815.00 frames.], tot_loss[loss=0.148, simple_loss=0.219, pruned_loss=0.03845, over 971620.34 frames.], batch size: 12, lr: 3.31e-04 +2022-05-05 14:26:30,334 INFO [train.py:715] (7/8) Epoch 6, batch 19600, loss[loss=0.1347, simple_loss=0.2025, pruned_loss=0.03344, over 4874.00 frames.], tot_loss[loss=0.148, simple_loss=0.2189, pruned_loss=0.03856, over 971524.88 frames.], batch size: 32, lr: 3.31e-04 +2022-05-05 14:27:09,231 INFO [train.py:715] (7/8) Epoch 6, batch 19650, loss[loss=0.1772, simple_loss=0.2435, pruned_loss=0.05544, over 4939.00 frames.], tot_loss[loss=0.148, simple_loss=0.2188, pruned_loss=0.03855, over 972054.84 frames.], batch size: 21, lr: 3.30e-04 +2022-05-05 14:27:48,352 INFO [train.py:715] (7/8) Epoch 6, batch 19700, loss[loss=0.1436, simple_loss=0.2059, pruned_loss=0.0407, over 4854.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2189, pruned_loss=0.03845, over 973234.51 frames.], batch size: 30, lr: 3.30e-04 +2022-05-05 14:28:27,138 INFO [train.py:715] (7/8) Epoch 6, batch 19750, loss[loss=0.1401, simple_loss=0.192, pruned_loss=0.0441, over 4705.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2189, pruned_loss=0.0381, over 972966.75 frames.], batch size: 15, lr: 3.30e-04 +2022-05-05 14:29:05,245 INFO [train.py:715] (7/8) Epoch 6, batch 19800, loss[loss=0.1755, simple_loss=0.2403, pruned_loss=0.05533, over 4642.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2189, pruned_loss=0.03809, over 972905.61 frames.], batch size: 13, lr: 3.30e-04 +2022-05-05 14:29:44,597 INFO [train.py:715] (7/8) Epoch 6, batch 19850, loss[loss=0.1481, simple_loss=0.2164, pruned_loss=0.03994, over 4779.00 frames.], tot_loss[loss=0.147, simple_loss=0.2183, pruned_loss=0.03781, over 972665.01 frames.], batch size: 17, lr: 3.30e-04 +2022-05-05 14:30:24,342 INFO [train.py:715] (7/8) Epoch 6, batch 19900, loss[loss=0.1361, simple_loss=0.2107, pruned_loss=0.03078, over 4985.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2185, pruned_loss=0.03815, over 972990.35 frames.], batch size: 14, lr: 3.30e-04 +2022-05-05 14:31:02,426 INFO [train.py:715] (7/8) Epoch 6, batch 19950, loss[loss=0.158, simple_loss=0.2257, pruned_loss=0.04517, over 4749.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2191, pruned_loss=0.03859, over 973217.35 frames.], batch size: 12, lr: 3.30e-04 +2022-05-05 14:31:41,550 INFO [train.py:715] (7/8) Epoch 6, batch 20000, loss[loss=0.1909, simple_loss=0.2609, pruned_loss=0.06047, over 4695.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2186, pruned_loss=0.03814, over 972875.69 frames.], batch size: 15, lr: 3.30e-04 +2022-05-05 14:32:21,022 INFO [train.py:715] (7/8) Epoch 6, batch 20050, loss[loss=0.1256, simple_loss=0.2007, pruned_loss=0.02526, over 4808.00 frames.], tot_loss[loss=0.1471, simple_loss=0.218, pruned_loss=0.03812, over 972555.32 frames.], batch size: 12, lr: 3.30e-04 +2022-05-05 14:32:59,452 INFO [train.py:715] (7/8) Epoch 6, batch 20100, loss[loss=0.1605, simple_loss=0.2434, pruned_loss=0.03879, over 4759.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2183, pruned_loss=0.03803, over 972056.12 frames.], batch size: 19, lr: 3.30e-04 +2022-05-05 14:33:38,525 INFO [train.py:715] (7/8) Epoch 6, batch 20150, loss[loss=0.1801, simple_loss=0.2607, pruned_loss=0.04977, over 4947.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2189, pruned_loss=0.03828, over 972823.63 frames.], batch size: 21, lr: 3.30e-04 +2022-05-05 14:34:17,808 INFO [train.py:715] (7/8) Epoch 6, batch 20200, loss[loss=0.1498, simple_loss=0.2155, pruned_loss=0.04209, over 4794.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2187, pruned_loss=0.03805, over 972221.20 frames.], batch size: 17, lr: 3.30e-04 +2022-05-05 14:34:56,736 INFO [train.py:715] (7/8) Epoch 6, batch 20250, loss[loss=0.1362, simple_loss=0.2109, pruned_loss=0.03074, over 4763.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.03826, over 972604.89 frames.], batch size: 19, lr: 3.30e-04 +2022-05-05 14:35:35,498 INFO [train.py:715] (7/8) Epoch 6, batch 20300, loss[loss=0.1209, simple_loss=0.206, pruned_loss=0.01796, over 4843.00 frames.], tot_loss[loss=0.148, simple_loss=0.2194, pruned_loss=0.03827, over 971531.43 frames.], batch size: 13, lr: 3.30e-04 +2022-05-05 14:36:14,862 INFO [train.py:715] (7/8) Epoch 6, batch 20350, loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03037, over 4859.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2187, pruned_loss=0.03786, over 972432.86 frames.], batch size: 16, lr: 3.30e-04 +2022-05-05 14:36:54,305 INFO [train.py:715] (7/8) Epoch 6, batch 20400, loss[loss=0.1614, simple_loss=0.2341, pruned_loss=0.04435, over 4789.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2186, pruned_loss=0.03778, over 972566.13 frames.], batch size: 12, lr: 3.30e-04 +2022-05-05 14:37:32,664 INFO [train.py:715] (7/8) Epoch 6, batch 20450, loss[loss=0.1434, simple_loss=0.2152, pruned_loss=0.03581, over 4844.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.0383, over 972398.82 frames.], batch size: 20, lr: 3.30e-04 +2022-05-05 14:38:11,470 INFO [train.py:715] (7/8) Epoch 6, batch 20500, loss[loss=0.152, simple_loss=0.2249, pruned_loss=0.03957, over 4953.00 frames.], tot_loss[loss=0.1495, simple_loss=0.2205, pruned_loss=0.03919, over 972115.54 frames.], batch size: 29, lr: 3.30e-04 +2022-05-05 14:38:50,535 INFO [train.py:715] (7/8) Epoch 6, batch 20550, loss[loss=0.1596, simple_loss=0.2385, pruned_loss=0.0404, over 4739.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2211, pruned_loss=0.03952, over 972412.35 frames.], batch size: 16, lr: 3.30e-04 +2022-05-05 14:39:29,694 INFO [train.py:715] (7/8) Epoch 6, batch 20600, loss[loss=0.1958, simple_loss=0.2596, pruned_loss=0.06601, over 4874.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2208, pruned_loss=0.03952, over 973450.82 frames.], batch size: 16, lr: 3.30e-04 +2022-05-05 14:40:07,959 INFO [train.py:715] (7/8) Epoch 6, batch 20650, loss[loss=0.1628, simple_loss=0.2328, pruned_loss=0.04636, over 4876.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2196, pruned_loss=0.03863, over 973511.57 frames.], batch size: 16, lr: 3.30e-04 +2022-05-05 14:40:46,657 INFO [train.py:715] (7/8) Epoch 6, batch 20700, loss[loss=0.1305, simple_loss=0.2077, pruned_loss=0.02664, over 4922.00 frames.], tot_loss[loss=0.1479, simple_loss=0.219, pruned_loss=0.0384, over 972693.93 frames.], batch size: 29, lr: 3.30e-04 +2022-05-05 14:41:25,992 INFO [train.py:715] (7/8) Epoch 6, batch 20750, loss[loss=0.1182, simple_loss=0.1959, pruned_loss=0.02019, over 4821.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2192, pruned_loss=0.03867, over 972763.54 frames.], batch size: 26, lr: 3.30e-04 +2022-05-05 14:42:04,393 INFO [train.py:715] (7/8) Epoch 6, batch 20800, loss[loss=0.1553, simple_loss=0.2322, pruned_loss=0.03922, over 4908.00 frames.], tot_loss[loss=0.149, simple_loss=0.2198, pruned_loss=0.03906, over 972812.08 frames.], batch size: 39, lr: 3.30e-04 +2022-05-05 14:42:43,607 INFO [train.py:715] (7/8) Epoch 6, batch 20850, loss[loss=0.1242, simple_loss=0.192, pruned_loss=0.0282, over 4689.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2206, pruned_loss=0.03941, over 972321.66 frames.], batch size: 15, lr: 3.30e-04 +2022-05-05 14:43:22,889 INFO [train.py:715] (7/8) Epoch 6, batch 20900, loss[loss=0.1874, simple_loss=0.2557, pruned_loss=0.05959, over 4843.00 frames.], tot_loss[loss=0.149, simple_loss=0.22, pruned_loss=0.03902, over 972960.00 frames.], batch size: 15, lr: 3.30e-04 +2022-05-05 14:44:02,110 INFO [train.py:715] (7/8) Epoch 6, batch 20950, loss[loss=0.09323, simple_loss=0.1623, pruned_loss=0.01208, over 4646.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2186, pruned_loss=0.03803, over 972475.43 frames.], batch size: 13, lr: 3.30e-04 +2022-05-05 14:44:40,096 INFO [train.py:715] (7/8) Epoch 6, batch 21000, loss[loss=0.1489, simple_loss=0.2256, pruned_loss=0.03614, over 4779.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2186, pruned_loss=0.03829, over 972798.45 frames.], batch size: 18, lr: 3.29e-04 +2022-05-05 14:44:40,097 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 14:44:51,876 INFO [train.py:742] (7/8) Epoch 6, validation: loss=0.1089, simple_loss=0.1939, pruned_loss=0.01192, over 914524.00 frames. +2022-05-05 14:45:30,117 INFO [train.py:715] (7/8) Epoch 6, batch 21050, loss[loss=0.1734, simple_loss=0.2633, pruned_loss=0.04176, over 4809.00 frames.], tot_loss[loss=0.1477, simple_loss=0.219, pruned_loss=0.03817, over 972654.04 frames.], batch size: 25, lr: 3.29e-04 +2022-05-05 14:46:09,487 INFO [train.py:715] (7/8) Epoch 6, batch 21100, loss[loss=0.1344, simple_loss=0.2101, pruned_loss=0.02935, over 4828.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2198, pruned_loss=0.039, over 973168.36 frames.], batch size: 15, lr: 3.29e-04 +2022-05-05 14:46:48,886 INFO [train.py:715] (7/8) Epoch 6, batch 21150, loss[loss=0.1764, simple_loss=0.2378, pruned_loss=0.05749, over 4878.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2188, pruned_loss=0.03844, over 973285.21 frames.], batch size: 16, lr: 3.29e-04 +2022-05-05 14:47:27,344 INFO [train.py:715] (7/8) Epoch 6, batch 21200, loss[loss=0.1624, simple_loss=0.2253, pruned_loss=0.04975, over 4904.00 frames.], tot_loss[loss=0.1492, simple_loss=0.22, pruned_loss=0.03916, over 973701.25 frames.], batch size: 19, lr: 3.29e-04 +2022-05-05 14:48:06,353 INFO [train.py:715] (7/8) Epoch 6, batch 21250, loss[loss=0.1418, simple_loss=0.2147, pruned_loss=0.03441, over 4800.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2194, pruned_loss=0.03859, over 973817.73 frames.], batch size: 26, lr: 3.29e-04 +2022-05-05 14:48:45,980 INFO [train.py:715] (7/8) Epoch 6, batch 21300, loss[loss=0.1779, simple_loss=0.2359, pruned_loss=0.05991, over 4741.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2194, pruned_loss=0.03848, over 973403.05 frames.], batch size: 16, lr: 3.29e-04 +2022-05-05 14:49:24,955 INFO [train.py:715] (7/8) Epoch 6, batch 21350, loss[loss=0.1525, simple_loss=0.232, pruned_loss=0.0365, over 4882.00 frames.], tot_loss[loss=0.1474, simple_loss=0.219, pruned_loss=0.03789, over 973058.02 frames.], batch size: 16, lr: 3.29e-04 +2022-05-05 14:50:03,787 INFO [train.py:715] (7/8) Epoch 6, batch 21400, loss[loss=0.1395, simple_loss=0.2146, pruned_loss=0.03223, over 4769.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2201, pruned_loss=0.03845, over 974078.32 frames.], batch size: 14, lr: 3.29e-04 +2022-05-05 14:50:42,551 INFO [train.py:715] (7/8) Epoch 6, batch 21450, loss[loss=0.1514, simple_loss=0.2211, pruned_loss=0.04082, over 4884.00 frames.], tot_loss[loss=0.148, simple_loss=0.2198, pruned_loss=0.03816, over 973337.83 frames.], batch size: 19, lr: 3.29e-04 +2022-05-05 14:51:21,821 INFO [train.py:715] (7/8) Epoch 6, batch 21500, loss[loss=0.144, simple_loss=0.2189, pruned_loss=0.03455, over 4834.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2197, pruned_loss=0.03837, over 972347.57 frames.], batch size: 15, lr: 3.29e-04 +2022-05-05 14:52:00,294 INFO [train.py:715] (7/8) Epoch 6, batch 21550, loss[loss=0.1237, simple_loss=0.1944, pruned_loss=0.02656, over 4874.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2192, pruned_loss=0.03811, over 971806.39 frames.], batch size: 13, lr: 3.29e-04 +2022-05-05 14:52:39,315 INFO [train.py:715] (7/8) Epoch 6, batch 21600, loss[loss=0.1401, simple_loss=0.206, pruned_loss=0.03714, over 4813.00 frames.], tot_loss[loss=0.1475, simple_loss=0.219, pruned_loss=0.03798, over 971331.79 frames.], batch size: 27, lr: 3.29e-04 +2022-05-05 14:53:18,465 INFO [train.py:715] (7/8) Epoch 6, batch 21650, loss[loss=0.1535, simple_loss=0.2214, pruned_loss=0.04275, over 4948.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2188, pruned_loss=0.0381, over 971876.84 frames.], batch size: 23, lr: 3.29e-04 +2022-05-05 14:53:57,746 INFO [train.py:715] (7/8) Epoch 6, batch 21700, loss[loss=0.1262, simple_loss=0.2, pruned_loss=0.02622, over 4793.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2194, pruned_loss=0.03822, over 971474.79 frames.], batch size: 24, lr: 3.29e-04 +2022-05-05 14:54:36,455 INFO [train.py:715] (7/8) Epoch 6, batch 21750, loss[loss=0.1395, simple_loss=0.2183, pruned_loss=0.03037, over 4925.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2194, pruned_loss=0.03803, over 972892.47 frames.], batch size: 23, lr: 3.29e-04 +2022-05-05 14:55:15,313 INFO [train.py:715] (7/8) Epoch 6, batch 21800, loss[loss=0.1356, simple_loss=0.2068, pruned_loss=0.03221, over 4957.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2195, pruned_loss=0.03859, over 972789.44 frames.], batch size: 14, lr: 3.29e-04 +2022-05-05 14:55:54,107 INFO [train.py:715] (7/8) Epoch 6, batch 21850, loss[loss=0.1371, simple_loss=0.2036, pruned_loss=0.03529, over 4853.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2196, pruned_loss=0.03889, over 972562.13 frames.], batch size: 20, lr: 3.29e-04 +2022-05-05 14:56:32,646 INFO [train.py:715] (7/8) Epoch 6, batch 21900, loss[loss=0.1308, simple_loss=0.2007, pruned_loss=0.03043, over 4993.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2196, pruned_loss=0.03898, over 972073.54 frames.], batch size: 14, lr: 3.29e-04 +2022-05-05 14:57:11,519 INFO [train.py:715] (7/8) Epoch 6, batch 21950, loss[loss=0.1665, simple_loss=0.2229, pruned_loss=0.05502, over 4884.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2199, pruned_loss=0.03921, over 971746.66 frames.], batch size: 32, lr: 3.29e-04 +2022-05-05 14:57:50,235 INFO [train.py:715] (7/8) Epoch 6, batch 22000, loss[loss=0.1498, simple_loss=0.2315, pruned_loss=0.03402, over 4917.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2202, pruned_loss=0.0392, over 971908.60 frames.], batch size: 29, lr: 3.29e-04 +2022-05-05 14:58:29,945 INFO [train.py:715] (7/8) Epoch 6, batch 22050, loss[loss=0.1537, simple_loss=0.2324, pruned_loss=0.0375, over 4974.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2209, pruned_loss=0.03924, over 972805.64 frames.], batch size: 25, lr: 3.29e-04 +2022-05-05 14:59:08,264 INFO [train.py:715] (7/8) Epoch 6, batch 22100, loss[loss=0.1201, simple_loss=0.1888, pruned_loss=0.02567, over 4756.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2209, pruned_loss=0.0394, over 972112.61 frames.], batch size: 19, lr: 3.29e-04 +2022-05-05 14:59:47,064 INFO [train.py:715] (7/8) Epoch 6, batch 22150, loss[loss=0.1304, simple_loss=0.1919, pruned_loss=0.03444, over 4992.00 frames.], tot_loss[loss=0.1501, simple_loss=0.221, pruned_loss=0.03964, over 972920.90 frames.], batch size: 14, lr: 3.29e-04 +2022-05-05 15:00:26,257 INFO [train.py:715] (7/8) Epoch 6, batch 22200, loss[loss=0.1823, simple_loss=0.2329, pruned_loss=0.06581, over 4827.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2204, pruned_loss=0.03966, over 972382.33 frames.], batch size: 30, lr: 3.29e-04 +2022-05-05 15:01:04,916 INFO [train.py:715] (7/8) Epoch 6, batch 22250, loss[loss=0.1637, simple_loss=0.2349, pruned_loss=0.04621, over 4968.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2208, pruned_loss=0.03947, over 972272.92 frames.], batch size: 24, lr: 3.29e-04 +2022-05-05 15:01:43,598 INFO [train.py:715] (7/8) Epoch 6, batch 22300, loss[loss=0.1367, simple_loss=0.214, pruned_loss=0.02969, over 4984.00 frames.], tot_loss[loss=0.1488, simple_loss=0.22, pruned_loss=0.03876, over 972505.84 frames.], batch size: 28, lr: 3.29e-04 +2022-05-05 15:02:22,658 INFO [train.py:715] (7/8) Epoch 6, batch 22350, loss[loss=0.1628, simple_loss=0.2187, pruned_loss=0.05342, over 4892.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2206, pruned_loss=0.0389, over 972166.88 frames.], batch size: 17, lr: 3.29e-04 +2022-05-05 15:03:02,004 INFO [train.py:715] (7/8) Epoch 6, batch 22400, loss[loss=0.1581, simple_loss=0.2277, pruned_loss=0.04422, over 4866.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2208, pruned_loss=0.03942, over 971840.19 frames.], batch size: 20, lr: 3.29e-04 +2022-05-05 15:03:40,490 INFO [train.py:715] (7/8) Epoch 6, batch 22450, loss[loss=0.1306, simple_loss=0.205, pruned_loss=0.02811, over 4808.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2198, pruned_loss=0.039, over 971011.34 frames.], batch size: 25, lr: 3.28e-04 +2022-05-05 15:04:19,442 INFO [train.py:715] (7/8) Epoch 6, batch 22500, loss[loss=0.1568, simple_loss=0.247, pruned_loss=0.03331, over 4935.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2195, pruned_loss=0.0391, over 971539.67 frames.], batch size: 23, lr: 3.28e-04 +2022-05-05 15:04:58,758 INFO [train.py:715] (7/8) Epoch 6, batch 22550, loss[loss=0.1457, simple_loss=0.211, pruned_loss=0.04026, over 4921.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2196, pruned_loss=0.03939, over 971824.72 frames.], batch size: 29, lr: 3.28e-04 +2022-05-05 15:05:37,182 INFO [train.py:715] (7/8) Epoch 6, batch 22600, loss[loss=0.15, simple_loss=0.2143, pruned_loss=0.04282, over 4773.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2196, pruned_loss=0.03956, over 971571.05 frames.], batch size: 18, lr: 3.28e-04 +2022-05-05 15:06:16,006 INFO [train.py:715] (7/8) Epoch 6, batch 22650, loss[loss=0.1307, simple_loss=0.2026, pruned_loss=0.02941, over 4929.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2206, pruned_loss=0.03983, over 971600.63 frames.], batch size: 18, lr: 3.28e-04 +2022-05-05 15:06:54,606 INFO [train.py:715] (7/8) Epoch 6, batch 22700, loss[loss=0.1559, simple_loss=0.2239, pruned_loss=0.04391, over 4953.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2196, pruned_loss=0.03889, over 972332.57 frames.], batch size: 35, lr: 3.28e-04 +2022-05-05 15:07:33,407 INFO [train.py:715] (7/8) Epoch 6, batch 22750, loss[loss=0.197, simple_loss=0.2622, pruned_loss=0.06587, over 4773.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2203, pruned_loss=0.03863, over 973318.17 frames.], batch size: 18, lr: 3.28e-04 +2022-05-05 15:08:11,869 INFO [train.py:715] (7/8) Epoch 6, batch 22800, loss[loss=0.1358, simple_loss=0.195, pruned_loss=0.03836, over 4838.00 frames.], tot_loss[loss=0.149, simple_loss=0.2204, pruned_loss=0.03881, over 973476.26 frames.], batch size: 30, lr: 3.28e-04 +2022-05-05 15:08:50,375 INFO [train.py:715] (7/8) Epoch 6, batch 22850, loss[loss=0.1431, simple_loss=0.2066, pruned_loss=0.03978, over 4893.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2198, pruned_loss=0.03855, over 972878.68 frames.], batch size: 17, lr: 3.28e-04 +2022-05-05 15:09:29,028 INFO [train.py:715] (7/8) Epoch 6, batch 22900, loss[loss=0.1367, simple_loss=0.214, pruned_loss=0.02973, over 4972.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2205, pruned_loss=0.039, over 972352.55 frames.], batch size: 24, lr: 3.28e-04 +2022-05-05 15:10:08,158 INFO [train.py:715] (7/8) Epoch 6, batch 22950, loss[loss=0.1399, simple_loss=0.2042, pruned_loss=0.03784, over 4938.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2208, pruned_loss=0.03934, over 972873.82 frames.], batch size: 18, lr: 3.28e-04 +2022-05-05 15:10:46,573 INFO [train.py:715] (7/8) Epoch 6, batch 23000, loss[loss=0.1495, simple_loss=0.2211, pruned_loss=0.03895, over 4972.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2203, pruned_loss=0.03874, over 972539.69 frames.], batch size: 28, lr: 3.28e-04 +2022-05-05 15:11:25,828 INFO [train.py:715] (7/8) Epoch 6, batch 23050, loss[loss=0.1366, simple_loss=0.22, pruned_loss=0.0266, over 4913.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2205, pruned_loss=0.03836, over 971886.44 frames.], batch size: 23, lr: 3.28e-04 +2022-05-05 15:12:05,301 INFO [train.py:715] (7/8) Epoch 6, batch 23100, loss[loss=0.1788, simple_loss=0.2473, pruned_loss=0.05508, over 4748.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2198, pruned_loss=0.03828, over 972420.53 frames.], batch size: 16, lr: 3.28e-04 +2022-05-05 15:12:46,120 INFO [train.py:715] (7/8) Epoch 6, batch 23150, loss[loss=0.1686, simple_loss=0.237, pruned_loss=0.05008, over 4786.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2197, pruned_loss=0.03894, over 972201.80 frames.], batch size: 18, lr: 3.28e-04 +2022-05-05 15:13:25,467 INFO [train.py:715] (7/8) Epoch 6, batch 23200, loss[loss=0.1484, simple_loss=0.2287, pruned_loss=0.03402, over 4870.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2193, pruned_loss=0.0388, over 972091.00 frames.], batch size: 16, lr: 3.28e-04 +2022-05-05 15:14:04,866 INFO [train.py:715] (7/8) Epoch 6, batch 23250, loss[loss=0.1531, simple_loss=0.2201, pruned_loss=0.04299, over 4949.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2189, pruned_loss=0.03877, over 972021.78 frames.], batch size: 23, lr: 3.28e-04 +2022-05-05 15:14:43,523 INFO [train.py:715] (7/8) Epoch 6, batch 23300, loss[loss=0.1731, simple_loss=0.2436, pruned_loss=0.05136, over 4915.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2197, pruned_loss=0.03939, over 971774.93 frames.], batch size: 19, lr: 3.28e-04 +2022-05-05 15:15:21,521 INFO [train.py:715] (7/8) Epoch 6, batch 23350, loss[loss=0.1559, simple_loss=0.2238, pruned_loss=0.04393, over 4914.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2194, pruned_loss=0.03882, over 972415.73 frames.], batch size: 19, lr: 3.28e-04 +2022-05-05 15:16:00,567 INFO [train.py:715] (7/8) Epoch 6, batch 23400, loss[loss=0.118, simple_loss=0.1916, pruned_loss=0.02218, over 4759.00 frames.], tot_loss[loss=0.1481, simple_loss=0.219, pruned_loss=0.03863, over 972234.67 frames.], batch size: 19, lr: 3.28e-04 +2022-05-05 15:16:40,146 INFO [train.py:715] (7/8) Epoch 6, batch 23450, loss[loss=0.1408, simple_loss=0.2162, pruned_loss=0.03274, over 4982.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2188, pruned_loss=0.03782, over 972143.07 frames.], batch size: 28, lr: 3.28e-04 +2022-05-05 15:17:19,121 INFO [train.py:715] (7/8) Epoch 6, batch 23500, loss[loss=0.1383, simple_loss=0.2126, pruned_loss=0.03195, over 4916.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2197, pruned_loss=0.03805, over 972586.54 frames.], batch size: 29, lr: 3.28e-04 +2022-05-05 15:17:58,302 INFO [train.py:715] (7/8) Epoch 6, batch 23550, loss[loss=0.1477, simple_loss=0.2234, pruned_loss=0.03598, over 4752.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2188, pruned_loss=0.03778, over 972565.26 frames.], batch size: 19, lr: 3.28e-04 +2022-05-05 15:18:37,513 INFO [train.py:715] (7/8) Epoch 6, batch 23600, loss[loss=0.1357, simple_loss=0.2106, pruned_loss=0.03036, over 4804.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.03825, over 972508.27 frames.], batch size: 25, lr: 3.28e-04 +2022-05-05 15:19:16,254 INFO [train.py:715] (7/8) Epoch 6, batch 23650, loss[loss=0.1546, simple_loss=0.2181, pruned_loss=0.0455, over 4856.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2197, pruned_loss=0.03852, over 972823.66 frames.], batch size: 38, lr: 3.28e-04 +2022-05-05 15:19:54,393 INFO [train.py:715] (7/8) Epoch 6, batch 23700, loss[loss=0.155, simple_loss=0.2306, pruned_loss=0.03976, over 4945.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2192, pruned_loss=0.03812, over 972770.21 frames.], batch size: 21, lr: 3.28e-04 +2022-05-05 15:20:33,416 INFO [train.py:715] (7/8) Epoch 6, batch 23750, loss[loss=0.1463, simple_loss=0.2244, pruned_loss=0.03414, over 4791.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2186, pruned_loss=0.03794, over 972196.21 frames.], batch size: 21, lr: 3.28e-04 +2022-05-05 15:21:12,835 INFO [train.py:715] (7/8) Epoch 6, batch 23800, loss[loss=0.155, simple_loss=0.2149, pruned_loss=0.04751, over 4977.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2187, pruned_loss=0.03788, over 973082.77 frames.], batch size: 15, lr: 3.28e-04 +2022-05-05 15:21:51,203 INFO [train.py:715] (7/8) Epoch 6, batch 23850, loss[loss=0.1475, simple_loss=0.2199, pruned_loss=0.0376, over 4847.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2191, pruned_loss=0.0381, over 972897.45 frames.], batch size: 20, lr: 3.27e-04 +2022-05-05 15:22:29,817 INFO [train.py:715] (7/8) Epoch 6, batch 23900, loss[loss=0.1634, simple_loss=0.2359, pruned_loss=0.04548, over 4957.00 frames.], tot_loss[loss=0.147, simple_loss=0.2189, pruned_loss=0.03754, over 972774.09 frames.], batch size: 24, lr: 3.27e-04 +2022-05-05 15:23:08,544 INFO [train.py:715] (7/8) Epoch 6, batch 23950, loss[loss=0.1512, simple_loss=0.2218, pruned_loss=0.04028, over 4871.00 frames.], tot_loss[loss=0.1474, simple_loss=0.219, pruned_loss=0.03788, over 972239.27 frames.], batch size: 16, lr: 3.27e-04 +2022-05-05 15:23:47,250 INFO [train.py:715] (7/8) Epoch 6, batch 24000, loss[loss=0.1371, simple_loss=0.2184, pruned_loss=0.02793, over 4847.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2192, pruned_loss=0.03822, over 972336.81 frames.], batch size: 13, lr: 3.27e-04 +2022-05-05 15:23:47,250 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 15:23:58,203 INFO [train.py:742] (7/8) Epoch 6, validation: loss=0.1089, simple_loss=0.1939, pruned_loss=0.01195, over 914524.00 frames. +2022-05-05 15:24:36,966 INFO [train.py:715] (7/8) Epoch 6, batch 24050, loss[loss=0.154, simple_loss=0.2329, pruned_loss=0.03751, over 4985.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2193, pruned_loss=0.03802, over 972372.50 frames.], batch size: 25, lr: 3.27e-04 +2022-05-05 15:25:15,032 INFO [train.py:715] (7/8) Epoch 6, batch 24100, loss[loss=0.1489, simple_loss=0.2279, pruned_loss=0.03492, over 4987.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2189, pruned_loss=0.03771, over 972283.18 frames.], batch size: 27, lr: 3.27e-04 +2022-05-05 15:25:53,705 INFO [train.py:715] (7/8) Epoch 6, batch 24150, loss[loss=0.1502, simple_loss=0.2176, pruned_loss=0.04142, over 4944.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2184, pruned_loss=0.0375, over 971912.70 frames.], batch size: 35, lr: 3.27e-04 +2022-05-05 15:26:32,799 INFO [train.py:715] (7/8) Epoch 6, batch 24200, loss[loss=0.1316, simple_loss=0.2073, pruned_loss=0.02794, over 4864.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2185, pruned_loss=0.03794, over 972979.42 frames.], batch size: 32, lr: 3.27e-04 +2022-05-05 15:27:10,720 INFO [train.py:715] (7/8) Epoch 6, batch 24250, loss[loss=0.1459, simple_loss=0.2226, pruned_loss=0.03461, over 4958.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2193, pruned_loss=0.03843, over 972743.26 frames.], batch size: 24, lr: 3.27e-04 +2022-05-05 15:27:49,115 INFO [train.py:715] (7/8) Epoch 6, batch 24300, loss[loss=0.1357, simple_loss=0.204, pruned_loss=0.03367, over 4912.00 frames.], tot_loss[loss=0.148, simple_loss=0.2192, pruned_loss=0.03843, over 972510.18 frames.], batch size: 18, lr: 3.27e-04 +2022-05-05 15:28:28,041 INFO [train.py:715] (7/8) Epoch 6, batch 24350, loss[loss=0.1485, simple_loss=0.226, pruned_loss=0.0355, over 4925.00 frames.], tot_loss[loss=0.1478, simple_loss=0.219, pruned_loss=0.03831, over 972862.45 frames.], batch size: 23, lr: 3.27e-04 +2022-05-05 15:29:07,161 INFO [train.py:715] (7/8) Epoch 6, batch 24400, loss[loss=0.1436, simple_loss=0.2192, pruned_loss=0.03401, over 4879.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2198, pruned_loss=0.03904, over 972868.95 frames.], batch size: 22, lr: 3.27e-04 +2022-05-05 15:29:45,509 INFO [train.py:715] (7/8) Epoch 6, batch 24450, loss[loss=0.1457, simple_loss=0.2092, pruned_loss=0.04115, over 4778.00 frames.], tot_loss[loss=0.149, simple_loss=0.2203, pruned_loss=0.03883, over 972706.12 frames.], batch size: 14, lr: 3.27e-04 +2022-05-05 15:30:24,120 INFO [train.py:715] (7/8) Epoch 6, batch 24500, loss[loss=0.153, simple_loss=0.2168, pruned_loss=0.0446, over 4901.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2205, pruned_loss=0.03914, over 972619.38 frames.], batch size: 17, lr: 3.27e-04 +2022-05-05 15:31:03,936 INFO [train.py:715] (7/8) Epoch 6, batch 24550, loss[loss=0.1508, simple_loss=0.2161, pruned_loss=0.04274, over 4839.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2199, pruned_loss=0.03842, over 972534.86 frames.], batch size: 15, lr: 3.27e-04 +2022-05-05 15:31:42,160 INFO [train.py:715] (7/8) Epoch 6, batch 24600, loss[loss=0.1418, simple_loss=0.2074, pruned_loss=0.03809, over 4813.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2198, pruned_loss=0.03835, over 972192.98 frames.], batch size: 13, lr: 3.27e-04 +2022-05-05 15:32:21,362 INFO [train.py:715] (7/8) Epoch 6, batch 24650, loss[loss=0.1575, simple_loss=0.2227, pruned_loss=0.04612, over 4844.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2198, pruned_loss=0.03884, over 971936.47 frames.], batch size: 30, lr: 3.27e-04 +2022-05-05 15:33:00,611 INFO [train.py:715] (7/8) Epoch 6, batch 24700, loss[loss=0.1397, simple_loss=0.2259, pruned_loss=0.02672, over 4824.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2207, pruned_loss=0.03921, over 972284.64 frames.], batch size: 15, lr: 3.27e-04 +2022-05-05 15:33:39,471 INFO [train.py:715] (7/8) Epoch 6, batch 24750, loss[loss=0.1737, simple_loss=0.2454, pruned_loss=0.05105, over 4989.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2203, pruned_loss=0.03912, over 971597.57 frames.], batch size: 28, lr: 3.27e-04 +2022-05-05 15:34:17,836 INFO [train.py:715] (7/8) Epoch 6, batch 24800, loss[loss=0.1738, simple_loss=0.244, pruned_loss=0.0518, over 4977.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2212, pruned_loss=0.03962, over 971778.89 frames.], batch size: 39, lr: 3.27e-04 +2022-05-05 15:34:56,837 INFO [train.py:715] (7/8) Epoch 6, batch 24850, loss[loss=0.1819, simple_loss=0.2537, pruned_loss=0.05505, over 4859.00 frames.], tot_loss[loss=0.1502, simple_loss=0.2211, pruned_loss=0.03967, over 971557.47 frames.], batch size: 32, lr: 3.27e-04 +2022-05-05 15:35:36,648 INFO [train.py:715] (7/8) Epoch 6, batch 24900, loss[loss=0.1394, simple_loss=0.2074, pruned_loss=0.03571, over 4795.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2197, pruned_loss=0.03876, over 972653.15 frames.], batch size: 17, lr: 3.27e-04 +2022-05-05 15:36:14,918 INFO [train.py:715] (7/8) Epoch 6, batch 24950, loss[loss=0.179, simple_loss=0.2461, pruned_loss=0.05596, over 4876.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2199, pruned_loss=0.0391, over 972762.02 frames.], batch size: 32, lr: 3.27e-04 +2022-05-05 15:36:53,558 INFO [train.py:715] (7/8) Epoch 6, batch 25000, loss[loss=0.1209, simple_loss=0.1982, pruned_loss=0.02183, over 4930.00 frames.], tot_loss[loss=0.1493, simple_loss=0.2205, pruned_loss=0.03903, over 973253.54 frames.], batch size: 23, lr: 3.27e-04 +2022-05-05 15:37:32,635 INFO [train.py:715] (7/8) Epoch 6, batch 25050, loss[loss=0.1213, simple_loss=0.1955, pruned_loss=0.02354, over 4781.00 frames.], tot_loss[loss=0.1491, simple_loss=0.2202, pruned_loss=0.039, over 972549.23 frames.], batch size: 18, lr: 3.27e-04 +2022-05-05 15:38:11,571 INFO [train.py:715] (7/8) Epoch 6, batch 25100, loss[loss=0.1231, simple_loss=0.2104, pruned_loss=0.0179, over 4942.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2199, pruned_loss=0.03866, over 972259.31 frames.], batch size: 23, lr: 3.27e-04 +2022-05-05 15:38:50,094 INFO [train.py:715] (7/8) Epoch 6, batch 25150, loss[loss=0.1537, simple_loss=0.2281, pruned_loss=0.03965, over 4912.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03871, over 971663.72 frames.], batch size: 17, lr: 3.27e-04 +2022-05-05 15:39:28,931 INFO [train.py:715] (7/8) Epoch 6, batch 25200, loss[loss=0.1584, simple_loss=0.2306, pruned_loss=0.04305, over 4791.00 frames.], tot_loss[loss=0.149, simple_loss=0.2197, pruned_loss=0.03915, over 971083.92 frames.], batch size: 14, lr: 3.27e-04 +2022-05-05 15:40:07,779 INFO [train.py:715] (7/8) Epoch 6, batch 25250, loss[loss=0.1268, simple_loss=0.2025, pruned_loss=0.02551, over 4789.00 frames.], tot_loss[loss=0.1494, simple_loss=0.2203, pruned_loss=0.03926, over 970476.94 frames.], batch size: 17, lr: 3.26e-04 +2022-05-05 15:40:46,084 INFO [train.py:715] (7/8) Epoch 6, batch 25300, loss[loss=0.1717, simple_loss=0.2317, pruned_loss=0.05588, over 4792.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2194, pruned_loss=0.03892, over 970760.08 frames.], batch size: 14, lr: 3.26e-04 +2022-05-05 15:41:24,368 INFO [train.py:715] (7/8) Epoch 6, batch 25350, loss[loss=0.1471, simple_loss=0.2292, pruned_loss=0.03251, over 4982.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2196, pruned_loss=0.03892, over 971861.27 frames.], batch size: 25, lr: 3.26e-04 +2022-05-05 15:42:03,175 INFO [train.py:715] (7/8) Epoch 6, batch 25400, loss[loss=0.1706, simple_loss=0.2446, pruned_loss=0.04831, over 4930.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2191, pruned_loss=0.03853, over 972029.02 frames.], batch size: 21, lr: 3.26e-04 +2022-05-05 15:42:41,995 INFO [train.py:715] (7/8) Epoch 6, batch 25450, loss[loss=0.1197, simple_loss=0.183, pruned_loss=0.02821, over 4743.00 frames.], tot_loss[loss=0.148, simple_loss=0.2192, pruned_loss=0.0384, over 972013.47 frames.], batch size: 12, lr: 3.26e-04 +2022-05-05 15:43:20,092 INFO [train.py:715] (7/8) Epoch 6, batch 25500, loss[loss=0.1458, simple_loss=0.2096, pruned_loss=0.04102, over 4770.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2187, pruned_loss=0.038, over 972292.23 frames.], batch size: 14, lr: 3.26e-04 +2022-05-05 15:43:58,582 INFO [train.py:715] (7/8) Epoch 6, batch 25550, loss[loss=0.1653, simple_loss=0.2392, pruned_loss=0.04568, over 4700.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2196, pruned_loss=0.03849, over 971962.78 frames.], batch size: 15, lr: 3.26e-04 +2022-05-05 15:44:37,704 INFO [train.py:715] (7/8) Epoch 6, batch 25600, loss[loss=0.1383, simple_loss=0.207, pruned_loss=0.03481, over 4978.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.03819, over 972096.74 frames.], batch size: 14, lr: 3.26e-04 +2022-05-05 15:45:15,939 INFO [train.py:715] (7/8) Epoch 6, batch 25650, loss[loss=0.143, simple_loss=0.2123, pruned_loss=0.03687, over 4852.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2201, pruned_loss=0.03831, over 972903.96 frames.], batch size: 12, lr: 3.26e-04 +2022-05-05 15:45:54,739 INFO [train.py:715] (7/8) Epoch 6, batch 25700, loss[loss=0.1253, simple_loss=0.1931, pruned_loss=0.02875, over 4748.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2199, pruned_loss=0.03826, over 972779.18 frames.], batch size: 16, lr: 3.26e-04 +2022-05-05 15:46:34,046 INFO [train.py:715] (7/8) Epoch 6, batch 25750, loss[loss=0.1254, simple_loss=0.1922, pruned_loss=0.02934, over 4821.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2202, pruned_loss=0.03818, over 973405.69 frames.], batch size: 13, lr: 3.26e-04 +2022-05-05 15:47:12,319 INFO [train.py:715] (7/8) Epoch 6, batch 25800, loss[loss=0.1406, simple_loss=0.2213, pruned_loss=0.02993, over 4989.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2196, pruned_loss=0.03794, over 973674.11 frames.], batch size: 28, lr: 3.26e-04 +2022-05-05 15:47:50,578 INFO [train.py:715] (7/8) Epoch 6, batch 25850, loss[loss=0.1521, simple_loss=0.2295, pruned_loss=0.03732, over 4974.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2193, pruned_loss=0.03781, over 973582.04 frames.], batch size: 39, lr: 3.26e-04 +2022-05-05 15:48:29,221 INFO [train.py:715] (7/8) Epoch 6, batch 25900, loss[loss=0.1487, simple_loss=0.2188, pruned_loss=0.03932, over 4757.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2193, pruned_loss=0.03796, over 973216.90 frames.], batch size: 19, lr: 3.26e-04 +2022-05-05 15:49:08,368 INFO [train.py:715] (7/8) Epoch 6, batch 25950, loss[loss=0.1456, simple_loss=0.212, pruned_loss=0.03954, over 4844.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2203, pruned_loss=0.03838, over 972206.97 frames.], batch size: 30, lr: 3.26e-04 +2022-05-05 15:49:46,043 INFO [train.py:715] (7/8) Epoch 6, batch 26000, loss[loss=0.1635, simple_loss=0.2324, pruned_loss=0.0473, over 4866.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2201, pruned_loss=0.03853, over 971622.25 frames.], batch size: 16, lr: 3.26e-04 +2022-05-05 15:50:24,228 INFO [train.py:715] (7/8) Epoch 6, batch 26050, loss[loss=0.1588, simple_loss=0.2304, pruned_loss=0.04357, over 4696.00 frames.], tot_loss[loss=0.149, simple_loss=0.2203, pruned_loss=0.03888, over 971868.11 frames.], batch size: 15, lr: 3.26e-04 +2022-05-05 15:51:03,218 INFO [train.py:715] (7/8) Epoch 6, batch 26100, loss[loss=0.1228, simple_loss=0.1996, pruned_loss=0.02293, over 4796.00 frames.], tot_loss[loss=0.1478, simple_loss=0.219, pruned_loss=0.03837, over 973568.42 frames.], batch size: 24, lr: 3.26e-04 +2022-05-05 15:51:41,622 INFO [train.py:715] (7/8) Epoch 6, batch 26150, loss[loss=0.1429, simple_loss=0.2091, pruned_loss=0.03838, over 4904.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2192, pruned_loss=0.03855, over 974030.15 frames.], batch size: 19, lr: 3.26e-04 +2022-05-05 15:52:20,122 INFO [train.py:715] (7/8) Epoch 6, batch 26200, loss[loss=0.1691, simple_loss=0.2451, pruned_loss=0.04656, over 4795.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2188, pruned_loss=0.03811, over 973489.26 frames.], batch size: 21, lr: 3.26e-04 +2022-05-05 15:52:58,597 INFO [train.py:715] (7/8) Epoch 6, batch 26250, loss[loss=0.1548, simple_loss=0.2255, pruned_loss=0.04201, over 4994.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2195, pruned_loss=0.03839, over 974235.49 frames.], batch size: 15, lr: 3.26e-04 +2022-05-05 15:53:37,250 INFO [train.py:715] (7/8) Epoch 6, batch 26300, loss[loss=0.1927, simple_loss=0.2601, pruned_loss=0.06268, over 4956.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2189, pruned_loss=0.03807, over 973275.55 frames.], batch size: 39, lr: 3.26e-04 +2022-05-05 15:54:15,325 INFO [train.py:715] (7/8) Epoch 6, batch 26350, loss[loss=0.138, simple_loss=0.2154, pruned_loss=0.03025, over 4853.00 frames.], tot_loss[loss=0.148, simple_loss=0.2193, pruned_loss=0.03837, over 973234.88 frames.], batch size: 20, lr: 3.26e-04 +2022-05-05 15:54:53,796 INFO [train.py:715] (7/8) Epoch 6, batch 26400, loss[loss=0.1389, simple_loss=0.214, pruned_loss=0.03191, over 4967.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2203, pruned_loss=0.03909, over 973652.79 frames.], batch size: 15, lr: 3.26e-04 +2022-05-05 15:55:33,107 INFO [train.py:715] (7/8) Epoch 6, batch 26450, loss[loss=0.1582, simple_loss=0.2278, pruned_loss=0.04427, over 4822.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2189, pruned_loss=0.03813, over 973421.99 frames.], batch size: 25, lr: 3.26e-04 +2022-05-05 15:56:11,692 INFO [train.py:715] (7/8) Epoch 6, batch 26500, loss[loss=0.1088, simple_loss=0.1836, pruned_loss=0.01702, over 4804.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2185, pruned_loss=0.03756, over 972340.57 frames.], batch size: 21, lr: 3.26e-04 +2022-05-05 15:56:50,069 INFO [train.py:715] (7/8) Epoch 6, batch 26550, loss[loss=0.1494, simple_loss=0.2333, pruned_loss=0.0328, over 4938.00 frames.], tot_loss[loss=0.146, simple_loss=0.218, pruned_loss=0.03696, over 972152.56 frames.], batch size: 21, lr: 3.26e-04 +2022-05-05 15:57:28,901 INFO [train.py:715] (7/8) Epoch 6, batch 26600, loss[loss=0.1388, simple_loss=0.206, pruned_loss=0.0358, over 4749.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2189, pruned_loss=0.03771, over 972287.19 frames.], batch size: 16, lr: 3.26e-04 +2022-05-05 15:58:07,543 INFO [train.py:715] (7/8) Epoch 6, batch 26650, loss[loss=0.1816, simple_loss=0.2443, pruned_loss=0.05944, over 4906.00 frames.], tot_loss[loss=0.147, simple_loss=0.2188, pruned_loss=0.03765, over 971928.05 frames.], batch size: 17, lr: 3.26e-04 +2022-05-05 15:58:46,267 INFO [train.py:715] (7/8) Epoch 6, batch 26700, loss[loss=0.1517, simple_loss=0.2292, pruned_loss=0.03708, over 4691.00 frames.], tot_loss[loss=0.147, simple_loss=0.219, pruned_loss=0.03747, over 972531.20 frames.], batch size: 15, lr: 3.25e-04 +2022-05-05 15:59:24,555 INFO [train.py:715] (7/8) Epoch 6, batch 26750, loss[loss=0.1564, simple_loss=0.2379, pruned_loss=0.0375, over 4865.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2192, pruned_loss=0.03772, over 971429.22 frames.], batch size: 20, lr: 3.25e-04 +2022-05-05 16:00:03,786 INFO [train.py:715] (7/8) Epoch 6, batch 26800, loss[loss=0.1645, simple_loss=0.2246, pruned_loss=0.05219, over 4765.00 frames.], tot_loss[loss=0.148, simple_loss=0.2196, pruned_loss=0.0382, over 971262.10 frames.], batch size: 19, lr: 3.25e-04 +2022-05-05 16:00:41,899 INFO [train.py:715] (7/8) Epoch 6, batch 26850, loss[loss=0.1572, simple_loss=0.2232, pruned_loss=0.04561, over 4822.00 frames.], tot_loss[loss=0.147, simple_loss=0.219, pruned_loss=0.03751, over 971164.10 frames.], batch size: 26, lr: 3.25e-04 +2022-05-05 16:01:20,554 INFO [train.py:715] (7/8) Epoch 6, batch 26900, loss[loss=0.1403, simple_loss=0.2204, pruned_loss=0.03014, over 4961.00 frames.], tot_loss[loss=0.147, simple_loss=0.2188, pruned_loss=0.03758, over 971326.47 frames.], batch size: 24, lr: 3.25e-04 +2022-05-05 16:01:59,794 INFO [train.py:715] (7/8) Epoch 6, batch 26950, loss[loss=0.1506, simple_loss=0.2282, pruned_loss=0.03652, over 4884.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2193, pruned_loss=0.03796, over 971778.58 frames.], batch size: 19, lr: 3.25e-04 +2022-05-05 16:02:39,040 INFO [train.py:715] (7/8) Epoch 6, batch 27000, loss[loss=0.1545, simple_loss=0.2206, pruned_loss=0.04424, over 4868.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2186, pruned_loss=0.03797, over 971398.64 frames.], batch size: 20, lr: 3.25e-04 +2022-05-05 16:02:39,041 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 16:02:48,795 INFO [train.py:742] (7/8) Epoch 6, validation: loss=0.1088, simple_loss=0.1938, pruned_loss=0.01188, over 914524.00 frames. +2022-05-05 16:03:28,073 INFO [train.py:715] (7/8) Epoch 6, batch 27050, loss[loss=0.1098, simple_loss=0.1893, pruned_loss=0.01515, over 4798.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2179, pruned_loss=0.03794, over 972221.70 frames.], batch size: 21, lr: 3.25e-04 +2022-05-05 16:04:06,805 INFO [train.py:715] (7/8) Epoch 6, batch 27100, loss[loss=0.1294, simple_loss=0.1971, pruned_loss=0.03088, over 4851.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2179, pruned_loss=0.03791, over 971968.74 frames.], batch size: 15, lr: 3.25e-04 +2022-05-05 16:04:45,440 INFO [train.py:715] (7/8) Epoch 6, batch 27150, loss[loss=0.1638, simple_loss=0.2278, pruned_loss=0.04993, over 4747.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2186, pruned_loss=0.03849, over 971891.75 frames.], batch size: 16, lr: 3.25e-04 +2022-05-05 16:05:25,174 INFO [train.py:715] (7/8) Epoch 6, batch 27200, loss[loss=0.162, simple_loss=0.2201, pruned_loss=0.052, over 4956.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2181, pruned_loss=0.03813, over 972105.96 frames.], batch size: 35, lr: 3.25e-04 +2022-05-05 16:06:03,411 INFO [train.py:715] (7/8) Epoch 6, batch 27250, loss[loss=0.1588, simple_loss=0.2386, pruned_loss=0.03955, over 4759.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2185, pruned_loss=0.03813, over 972694.55 frames.], batch size: 16, lr: 3.25e-04 +2022-05-05 16:06:43,063 INFO [train.py:715] (7/8) Epoch 6, batch 27300, loss[loss=0.1283, simple_loss=0.2034, pruned_loss=0.02654, over 4917.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2177, pruned_loss=0.0378, over 972522.91 frames.], batch size: 18, lr: 3.25e-04 +2022-05-05 16:07:22,055 INFO [train.py:715] (7/8) Epoch 6, batch 27350, loss[loss=0.1269, simple_loss=0.1998, pruned_loss=0.02706, over 4799.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2173, pruned_loss=0.03754, over 972655.11 frames.], batch size: 24, lr: 3.25e-04 +2022-05-05 16:08:01,165 INFO [train.py:715] (7/8) Epoch 6, batch 27400, loss[loss=0.1344, simple_loss=0.1997, pruned_loss=0.03459, over 4799.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2178, pruned_loss=0.03759, over 972035.60 frames.], batch size: 12, lr: 3.25e-04 +2022-05-05 16:08:39,771 INFO [train.py:715] (7/8) Epoch 6, batch 27450, loss[loss=0.1658, simple_loss=0.2297, pruned_loss=0.05096, over 4954.00 frames.], tot_loss[loss=0.1477, simple_loss=0.219, pruned_loss=0.03819, over 972004.74 frames.], batch size: 21, lr: 3.25e-04 +2022-05-05 16:09:18,813 INFO [train.py:715] (7/8) Epoch 6, batch 27500, loss[loss=0.1836, simple_loss=0.2524, pruned_loss=0.05737, over 4981.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03864, over 972380.95 frames.], batch size: 35, lr: 3.25e-04 +2022-05-05 16:09:58,186 INFO [train.py:715] (7/8) Epoch 6, batch 27550, loss[loss=0.133, simple_loss=0.2035, pruned_loss=0.03118, over 4796.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2192, pruned_loss=0.03829, over 971746.46 frames.], batch size: 13, lr: 3.25e-04 +2022-05-05 16:10:36,912 INFO [train.py:715] (7/8) Epoch 6, batch 27600, loss[loss=0.1546, simple_loss=0.2403, pruned_loss=0.03444, over 4972.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2195, pruned_loss=0.03857, over 972214.24 frames.], batch size: 24, lr: 3.25e-04 +2022-05-05 16:11:15,430 INFO [train.py:715] (7/8) Epoch 6, batch 27650, loss[loss=0.1178, simple_loss=0.197, pruned_loss=0.01931, over 4812.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2192, pruned_loss=0.0382, over 971569.10 frames.], batch size: 25, lr: 3.25e-04 +2022-05-05 16:11:54,439 INFO [train.py:715] (7/8) Epoch 6, batch 27700, loss[loss=0.1322, simple_loss=0.208, pruned_loss=0.02821, over 4775.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2199, pruned_loss=0.03839, over 971018.07 frames.], batch size: 18, lr: 3.25e-04 +2022-05-05 16:12:32,979 INFO [train.py:715] (7/8) Epoch 6, batch 27750, loss[loss=0.134, simple_loss=0.1992, pruned_loss=0.03439, over 4769.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2191, pruned_loss=0.03791, over 971527.16 frames.], batch size: 12, lr: 3.25e-04 +2022-05-05 16:13:12,188 INFO [train.py:715] (7/8) Epoch 6, batch 27800, loss[loss=0.1308, simple_loss=0.2088, pruned_loss=0.02644, over 4862.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2195, pruned_loss=0.03811, over 972262.53 frames.], batch size: 20, lr: 3.25e-04 +2022-05-05 16:13:51,238 INFO [train.py:715] (7/8) Epoch 6, batch 27850, loss[loss=0.165, simple_loss=0.2326, pruned_loss=0.04869, over 4959.00 frames.], tot_loss[loss=0.149, simple_loss=0.2203, pruned_loss=0.03891, over 972450.22 frames.], batch size: 35, lr: 3.25e-04 +2022-05-05 16:14:30,896 INFO [train.py:715] (7/8) Epoch 6, batch 27900, loss[loss=0.1415, simple_loss=0.2101, pruned_loss=0.03642, over 4924.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2199, pruned_loss=0.03863, over 973321.10 frames.], batch size: 29, lr: 3.25e-04 +2022-05-05 16:15:09,373 INFO [train.py:715] (7/8) Epoch 6, batch 27950, loss[loss=0.1412, simple_loss=0.2097, pruned_loss=0.03637, over 4873.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2193, pruned_loss=0.03822, over 972965.05 frames.], batch size: 32, lr: 3.25e-04 +2022-05-05 16:15:48,255 INFO [train.py:715] (7/8) Epoch 6, batch 28000, loss[loss=0.1234, simple_loss=0.2054, pruned_loss=0.02074, over 4927.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2191, pruned_loss=0.03793, over 971915.90 frames.], batch size: 29, lr: 3.25e-04 +2022-05-05 16:16:27,394 INFO [train.py:715] (7/8) Epoch 6, batch 28050, loss[loss=0.161, simple_loss=0.2219, pruned_loss=0.05008, over 4987.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2192, pruned_loss=0.03796, over 972438.77 frames.], batch size: 16, lr: 3.25e-04 +2022-05-05 16:17:06,026 INFO [train.py:715] (7/8) Epoch 6, batch 28100, loss[loss=0.1838, simple_loss=0.2686, pruned_loss=0.04949, over 4964.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2194, pruned_loss=0.03754, over 972130.40 frames.], batch size: 24, lr: 3.25e-04 +2022-05-05 16:17:44,950 INFO [train.py:715] (7/8) Epoch 6, batch 28150, loss[loss=0.1275, simple_loss=0.2017, pruned_loss=0.02661, over 4910.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2196, pruned_loss=0.03785, over 972536.23 frames.], batch size: 23, lr: 3.24e-04 +2022-05-05 16:18:24,087 INFO [train.py:715] (7/8) Epoch 6, batch 28200, loss[loss=0.1624, simple_loss=0.2497, pruned_loss=0.03759, over 4757.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2195, pruned_loss=0.03812, over 973059.91 frames.], batch size: 16, lr: 3.24e-04 +2022-05-05 16:19:03,411 INFO [train.py:715] (7/8) Epoch 6, batch 28250, loss[loss=0.1688, simple_loss=0.2406, pruned_loss=0.04853, over 4951.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2198, pruned_loss=0.03851, over 972830.52 frames.], batch size: 14, lr: 3.24e-04 +2022-05-05 16:19:41,790 INFO [train.py:715] (7/8) Epoch 6, batch 28300, loss[loss=0.1705, simple_loss=0.2365, pruned_loss=0.05224, over 4955.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2202, pruned_loss=0.03859, over 972770.61 frames.], batch size: 39, lr: 3.24e-04 +2022-05-05 16:20:20,028 INFO [train.py:715] (7/8) Epoch 6, batch 28350, loss[loss=0.1406, simple_loss=0.2159, pruned_loss=0.03266, over 4988.00 frames.], tot_loss[loss=0.1486, simple_loss=0.22, pruned_loss=0.03863, over 972795.58 frames.], batch size: 28, lr: 3.24e-04 +2022-05-05 16:20:59,873 INFO [train.py:715] (7/8) Epoch 6, batch 28400, loss[loss=0.1372, simple_loss=0.2069, pruned_loss=0.03378, over 4809.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2198, pruned_loss=0.03843, over 972569.44 frames.], batch size: 27, lr: 3.24e-04 +2022-05-05 16:21:38,666 INFO [train.py:715] (7/8) Epoch 6, batch 28450, loss[loss=0.1477, simple_loss=0.2195, pruned_loss=0.03795, over 4820.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2194, pruned_loss=0.03837, over 972468.57 frames.], batch size: 13, lr: 3.24e-04 +2022-05-05 16:22:17,507 INFO [train.py:715] (7/8) Epoch 6, batch 28500, loss[loss=0.1167, simple_loss=0.1906, pruned_loss=0.02138, over 4973.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2197, pruned_loss=0.03846, over 972295.70 frames.], batch size: 14, lr: 3.24e-04 +2022-05-05 16:22:56,650 INFO [train.py:715] (7/8) Epoch 6, batch 28550, loss[loss=0.1361, simple_loss=0.2038, pruned_loss=0.03421, over 4950.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2193, pruned_loss=0.03817, over 972407.70 frames.], batch size: 21, lr: 3.24e-04 +2022-05-05 16:23:36,087 INFO [train.py:715] (7/8) Epoch 6, batch 28600, loss[loss=0.1371, simple_loss=0.2011, pruned_loss=0.03659, over 4892.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2191, pruned_loss=0.03806, over 971990.02 frames.], batch size: 16, lr: 3.24e-04 +2022-05-05 16:24:14,189 INFO [train.py:715] (7/8) Epoch 6, batch 28650, loss[loss=0.1692, simple_loss=0.2238, pruned_loss=0.05726, over 4850.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2193, pruned_loss=0.03776, over 972066.02 frames.], batch size: 13, lr: 3.24e-04 +2022-05-05 16:24:52,990 INFO [train.py:715] (7/8) Epoch 6, batch 28700, loss[loss=0.1144, simple_loss=0.1843, pruned_loss=0.02228, over 4794.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2196, pruned_loss=0.03798, over 973421.28 frames.], batch size: 14, lr: 3.24e-04 +2022-05-05 16:25:32,174 INFO [train.py:715] (7/8) Epoch 6, batch 28750, loss[loss=0.1524, simple_loss=0.2116, pruned_loss=0.04657, over 4864.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2188, pruned_loss=0.03776, over 973482.24 frames.], batch size: 32, lr: 3.24e-04 +2022-05-05 16:26:10,898 INFO [train.py:715] (7/8) Epoch 6, batch 28800, loss[loss=0.2009, simple_loss=0.2818, pruned_loss=0.06005, over 4833.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2191, pruned_loss=0.03763, over 973416.75 frames.], batch size: 26, lr: 3.24e-04 +2022-05-05 16:26:49,769 INFO [train.py:715] (7/8) Epoch 6, batch 28850, loss[loss=0.1438, simple_loss=0.2108, pruned_loss=0.03834, over 4833.00 frames.], tot_loss[loss=0.1476, simple_loss=0.219, pruned_loss=0.03806, over 972772.18 frames.], batch size: 15, lr: 3.24e-04 +2022-05-05 16:27:28,069 INFO [train.py:715] (7/8) Epoch 6, batch 28900, loss[loss=0.1392, simple_loss=0.2014, pruned_loss=0.03846, over 4936.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2191, pruned_loss=0.03817, over 972570.69 frames.], batch size: 21, lr: 3.24e-04 +2022-05-05 16:28:07,514 INFO [train.py:715] (7/8) Epoch 6, batch 28950, loss[loss=0.1198, simple_loss=0.1978, pruned_loss=0.02091, over 4936.00 frames.], tot_loss[loss=0.147, simple_loss=0.2185, pruned_loss=0.03774, over 972162.33 frames.], batch size: 18, lr: 3.24e-04 +2022-05-05 16:28:45,750 INFO [train.py:715] (7/8) Epoch 6, batch 29000, loss[loss=0.1414, simple_loss=0.2137, pruned_loss=0.0346, over 4776.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2189, pruned_loss=0.03796, over 972161.13 frames.], batch size: 14, lr: 3.24e-04 +2022-05-05 16:29:23,902 INFO [train.py:715] (7/8) Epoch 6, batch 29050, loss[loss=0.1378, simple_loss=0.2266, pruned_loss=0.0245, over 4875.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.03822, over 972207.78 frames.], batch size: 22, lr: 3.24e-04 +2022-05-05 16:30:02,951 INFO [train.py:715] (7/8) Epoch 6, batch 29100, loss[loss=0.1198, simple_loss=0.1947, pruned_loss=0.02245, over 4921.00 frames.], tot_loss[loss=0.147, simple_loss=0.2187, pruned_loss=0.03763, over 972120.67 frames.], batch size: 18, lr: 3.24e-04 +2022-05-05 16:30:41,837 INFO [train.py:715] (7/8) Epoch 6, batch 29150, loss[loss=0.1268, simple_loss=0.1988, pruned_loss=0.02737, over 4937.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2182, pruned_loss=0.03702, over 972577.93 frames.], batch size: 23, lr: 3.24e-04 +2022-05-05 16:31:20,670 INFO [train.py:715] (7/8) Epoch 6, batch 29200, loss[loss=0.1674, simple_loss=0.2398, pruned_loss=0.04744, over 4780.00 frames.], tot_loss[loss=0.1473, simple_loss=0.219, pruned_loss=0.03778, over 972887.50 frames.], batch size: 18, lr: 3.24e-04 +2022-05-05 16:31:59,911 INFO [train.py:715] (7/8) Epoch 6, batch 29250, loss[loss=0.1259, simple_loss=0.1995, pruned_loss=0.0261, over 4805.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2191, pruned_loss=0.03766, over 972175.16 frames.], batch size: 25, lr: 3.24e-04 +2022-05-05 16:32:39,921 INFO [train.py:715] (7/8) Epoch 6, batch 29300, loss[loss=0.1552, simple_loss=0.216, pruned_loss=0.04721, over 4828.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2193, pruned_loss=0.03793, over 972516.44 frames.], batch size: 15, lr: 3.24e-04 +2022-05-05 16:33:18,208 INFO [train.py:715] (7/8) Epoch 6, batch 29350, loss[loss=0.1609, simple_loss=0.222, pruned_loss=0.04983, over 4793.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2185, pruned_loss=0.03732, over 971153.06 frames.], batch size: 24, lr: 3.24e-04 +2022-05-05 16:33:57,191 INFO [train.py:715] (7/8) Epoch 6, batch 29400, loss[loss=0.1768, simple_loss=0.2634, pruned_loss=0.04504, over 4866.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2185, pruned_loss=0.03738, over 971164.78 frames.], batch size: 16, lr: 3.24e-04 +2022-05-05 16:34:36,594 INFO [train.py:715] (7/8) Epoch 6, batch 29450, loss[loss=0.1357, simple_loss=0.2002, pruned_loss=0.03562, over 4746.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2184, pruned_loss=0.03745, over 970775.21 frames.], batch size: 16, lr: 3.24e-04 +2022-05-05 16:35:15,803 INFO [train.py:715] (7/8) Epoch 6, batch 29500, loss[loss=0.1456, simple_loss=0.2219, pruned_loss=0.03469, over 4933.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2187, pruned_loss=0.03779, over 972261.26 frames.], batch size: 18, lr: 3.24e-04 +2022-05-05 16:35:53,792 INFO [train.py:715] (7/8) Epoch 6, batch 29550, loss[loss=0.1325, simple_loss=0.2033, pruned_loss=0.03086, over 4913.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2178, pruned_loss=0.03736, over 971717.37 frames.], batch size: 18, lr: 3.24e-04 +2022-05-05 16:36:33,142 INFO [train.py:715] (7/8) Epoch 6, batch 29600, loss[loss=0.148, simple_loss=0.2219, pruned_loss=0.03698, over 4986.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2183, pruned_loss=0.03772, over 971671.10 frames.], batch size: 24, lr: 3.24e-04 +2022-05-05 16:37:12,534 INFO [train.py:715] (7/8) Epoch 6, batch 29650, loss[loss=0.1284, simple_loss=0.2065, pruned_loss=0.0251, over 4960.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2177, pruned_loss=0.03761, over 971086.48 frames.], batch size: 24, lr: 3.23e-04 +2022-05-05 16:37:51,063 INFO [train.py:715] (7/8) Epoch 6, batch 29700, loss[loss=0.1184, simple_loss=0.1838, pruned_loss=0.02646, over 4779.00 frames.], tot_loss[loss=0.147, simple_loss=0.218, pruned_loss=0.03798, over 971873.80 frames.], batch size: 18, lr: 3.23e-04 +2022-05-05 16:38:29,767 INFO [train.py:715] (7/8) Epoch 6, batch 29750, loss[loss=0.1249, simple_loss=0.1925, pruned_loss=0.02868, over 4813.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2183, pruned_loss=0.03806, over 972013.52 frames.], batch size: 21, lr: 3.23e-04 +2022-05-05 16:39:08,778 INFO [train.py:715] (7/8) Epoch 6, batch 29800, loss[loss=0.1162, simple_loss=0.1862, pruned_loss=0.02313, over 4794.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2185, pruned_loss=0.03795, over 972664.56 frames.], batch size: 14, lr: 3.23e-04 +2022-05-05 16:39:48,203 INFO [train.py:715] (7/8) Epoch 6, batch 29850, loss[loss=0.1427, simple_loss=0.2088, pruned_loss=0.03828, over 4972.00 frames.], tot_loss[loss=0.147, simple_loss=0.2181, pruned_loss=0.03792, over 973397.52 frames.], batch size: 25, lr: 3.23e-04 +2022-05-05 16:40:26,715 INFO [train.py:715] (7/8) Epoch 6, batch 29900, loss[loss=0.1273, simple_loss=0.193, pruned_loss=0.03084, over 4980.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2189, pruned_loss=0.03838, over 973068.04 frames.], batch size: 15, lr: 3.23e-04 +2022-05-05 16:41:05,702 INFO [train.py:715] (7/8) Epoch 6, batch 29950, loss[loss=0.1756, simple_loss=0.2504, pruned_loss=0.05039, over 4797.00 frames.], tot_loss[loss=0.148, simple_loss=0.2191, pruned_loss=0.0385, over 972190.12 frames.], batch size: 25, lr: 3.23e-04 +2022-05-05 16:41:45,053 INFO [train.py:715] (7/8) Epoch 6, batch 30000, loss[loss=0.1381, simple_loss=0.2003, pruned_loss=0.03795, over 4802.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2192, pruned_loss=0.03857, over 971216.65 frames.], batch size: 14, lr: 3.23e-04 +2022-05-05 16:41:45,054 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 16:41:54,714 INFO [train.py:742] (7/8) Epoch 6, validation: loss=0.1088, simple_loss=0.1938, pruned_loss=0.0119, over 914524.00 frames. +2022-05-05 16:42:34,423 INFO [train.py:715] (7/8) Epoch 6, batch 30050, loss[loss=0.1568, simple_loss=0.2411, pruned_loss=0.03624, over 4964.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2185, pruned_loss=0.03822, over 971051.93 frames.], batch size: 24, lr: 3.23e-04 +2022-05-05 16:43:12,814 INFO [train.py:715] (7/8) Epoch 6, batch 30100, loss[loss=0.1425, simple_loss=0.2261, pruned_loss=0.02945, over 4804.00 frames.], tot_loss[loss=0.1478, simple_loss=0.219, pruned_loss=0.03831, over 971446.83 frames.], batch size: 25, lr: 3.23e-04 +2022-05-05 16:43:51,566 INFO [train.py:715] (7/8) Epoch 6, batch 30150, loss[loss=0.1317, simple_loss=0.204, pruned_loss=0.02968, over 4976.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2191, pruned_loss=0.03811, over 971370.26 frames.], batch size: 14, lr: 3.23e-04 +2022-05-05 16:44:30,966 INFO [train.py:715] (7/8) Epoch 6, batch 30200, loss[loss=0.1141, simple_loss=0.2013, pruned_loss=0.01348, over 4767.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2193, pruned_loss=0.03827, over 972466.44 frames.], batch size: 14, lr: 3.23e-04 +2022-05-05 16:45:10,342 INFO [train.py:715] (7/8) Epoch 6, batch 30250, loss[loss=0.1521, simple_loss=0.2285, pruned_loss=0.03789, over 4915.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2199, pruned_loss=0.03848, over 972725.46 frames.], batch size: 18, lr: 3.23e-04 +2022-05-05 16:45:48,515 INFO [train.py:715] (7/8) Epoch 6, batch 30300, loss[loss=0.1464, simple_loss=0.2081, pruned_loss=0.04229, over 4992.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2194, pruned_loss=0.03839, over 974002.39 frames.], batch size: 14, lr: 3.23e-04 +2022-05-05 16:46:27,515 INFO [train.py:715] (7/8) Epoch 6, batch 30350, loss[loss=0.1542, simple_loss=0.2157, pruned_loss=0.04633, over 4966.00 frames.], tot_loss[loss=0.148, simple_loss=0.2193, pruned_loss=0.03834, over 974288.07 frames.], batch size: 35, lr: 3.23e-04 +2022-05-05 16:47:06,584 INFO [train.py:715] (7/8) Epoch 6, batch 30400, loss[loss=0.1626, simple_loss=0.2249, pruned_loss=0.05018, over 4967.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2193, pruned_loss=0.03783, over 974034.45 frames.], batch size: 15, lr: 3.23e-04 +2022-05-05 16:47:45,261 INFO [train.py:715] (7/8) Epoch 6, batch 30450, loss[loss=0.1533, simple_loss=0.2255, pruned_loss=0.04052, over 4959.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2188, pruned_loss=0.03751, over 974018.94 frames.], batch size: 15, lr: 3.23e-04 +2022-05-05 16:48:23,946 INFO [train.py:715] (7/8) Epoch 6, batch 30500, loss[loss=0.1428, simple_loss=0.2132, pruned_loss=0.03625, over 4776.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2182, pruned_loss=0.03756, over 973991.44 frames.], batch size: 14, lr: 3.23e-04 +2022-05-05 16:49:02,695 INFO [train.py:715] (7/8) Epoch 6, batch 30550, loss[loss=0.1297, simple_loss=0.2048, pruned_loss=0.02727, over 4812.00 frames.], tot_loss[loss=0.146, simple_loss=0.2177, pruned_loss=0.0371, over 973591.81 frames.], batch size: 25, lr: 3.23e-04 +2022-05-05 16:49:41,852 INFO [train.py:715] (7/8) Epoch 6, batch 30600, loss[loss=0.1382, simple_loss=0.2069, pruned_loss=0.03472, over 4824.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2179, pruned_loss=0.03762, over 973769.94 frames.], batch size: 12, lr: 3.23e-04 +2022-05-05 16:50:20,377 INFO [train.py:715] (7/8) Epoch 6, batch 30650, loss[loss=0.185, simple_loss=0.2611, pruned_loss=0.05449, over 4889.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2188, pruned_loss=0.03822, over 973759.23 frames.], batch size: 32, lr: 3.23e-04 +2022-05-05 16:50:59,232 INFO [train.py:715] (7/8) Epoch 6, batch 30700, loss[loss=0.09813, simple_loss=0.1601, pruned_loss=0.01809, over 4753.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2184, pruned_loss=0.03821, over 972915.99 frames.], batch size: 12, lr: 3.23e-04 +2022-05-05 16:51:38,191 INFO [train.py:715] (7/8) Epoch 6, batch 30750, loss[loss=0.1613, simple_loss=0.2255, pruned_loss=0.04861, over 4969.00 frames.], tot_loss[loss=0.1479, simple_loss=0.219, pruned_loss=0.03839, over 972734.30 frames.], batch size: 35, lr: 3.23e-04 +2022-05-05 16:52:17,035 INFO [train.py:715] (7/8) Epoch 6, batch 30800, loss[loss=0.1574, simple_loss=0.2219, pruned_loss=0.0464, over 4940.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2195, pruned_loss=0.03868, over 973324.26 frames.], batch size: 29, lr: 3.23e-04 +2022-05-05 16:52:55,438 INFO [train.py:715] (7/8) Epoch 6, batch 30850, loss[loss=0.1268, simple_loss=0.2041, pruned_loss=0.02478, over 4947.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2188, pruned_loss=0.03798, over 973937.76 frames.], batch size: 21, lr: 3.23e-04 +2022-05-05 16:53:34,167 INFO [train.py:715] (7/8) Epoch 6, batch 30900, loss[loss=0.1799, simple_loss=0.2565, pruned_loss=0.0517, over 4863.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2197, pruned_loss=0.03825, over 973542.58 frames.], batch size: 20, lr: 3.23e-04 +2022-05-05 16:54:13,774 INFO [train.py:715] (7/8) Epoch 6, batch 30950, loss[loss=0.1263, simple_loss=0.2011, pruned_loss=0.02568, over 4979.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2199, pruned_loss=0.03842, over 973535.62 frames.], batch size: 28, lr: 3.23e-04 +2022-05-05 16:54:51,906 INFO [train.py:715] (7/8) Epoch 6, batch 31000, loss[loss=0.1404, simple_loss=0.213, pruned_loss=0.03385, over 4741.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2191, pruned_loss=0.03758, over 973435.20 frames.], batch size: 16, lr: 3.23e-04 +2022-05-05 16:55:30,911 INFO [train.py:715] (7/8) Epoch 6, batch 31050, loss[loss=0.1319, simple_loss=0.2023, pruned_loss=0.0308, over 4953.00 frames.], tot_loss[loss=0.148, simple_loss=0.2201, pruned_loss=0.03798, over 973296.56 frames.], batch size: 21, lr: 3.23e-04 +2022-05-05 16:56:10,164 INFO [train.py:715] (7/8) Epoch 6, batch 31100, loss[loss=0.1429, simple_loss=0.2148, pruned_loss=0.03549, over 4867.00 frames.], tot_loss[loss=0.148, simple_loss=0.22, pruned_loss=0.03801, over 973356.51 frames.], batch size: 20, lr: 3.22e-04 +2022-05-05 16:56:51,377 INFO [train.py:715] (7/8) Epoch 6, batch 31150, loss[loss=0.1474, simple_loss=0.2177, pruned_loss=0.03849, over 4825.00 frames.], tot_loss[loss=0.149, simple_loss=0.2207, pruned_loss=0.03865, over 972850.15 frames.], batch size: 27, lr: 3.22e-04 +2022-05-05 16:57:30,158 INFO [train.py:715] (7/8) Epoch 6, batch 31200, loss[loss=0.1543, simple_loss=0.2196, pruned_loss=0.04455, over 4950.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2202, pruned_loss=0.03859, over 971908.70 frames.], batch size: 35, lr: 3.22e-04 +2022-05-05 16:58:09,411 INFO [train.py:715] (7/8) Epoch 6, batch 31250, loss[loss=0.1324, simple_loss=0.2141, pruned_loss=0.02539, over 4900.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2202, pruned_loss=0.03857, over 972185.32 frames.], batch size: 17, lr: 3.22e-04 +2022-05-05 16:58:48,244 INFO [train.py:715] (7/8) Epoch 6, batch 31300, loss[loss=0.2052, simple_loss=0.2687, pruned_loss=0.07089, over 4921.00 frames.], tot_loss[loss=0.1487, simple_loss=0.2199, pruned_loss=0.03872, over 971926.38 frames.], batch size: 39, lr: 3.22e-04 +2022-05-05 16:59:27,121 INFO [train.py:715] (7/8) Epoch 6, batch 31350, loss[loss=0.1325, simple_loss=0.2043, pruned_loss=0.03038, over 4917.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2194, pruned_loss=0.03849, over 973146.69 frames.], batch size: 17, lr: 3.22e-04 +2022-05-05 17:00:06,354 INFO [train.py:715] (7/8) Epoch 6, batch 31400, loss[loss=0.1113, simple_loss=0.1846, pruned_loss=0.01897, over 4976.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2189, pruned_loss=0.03812, over 972524.60 frames.], batch size: 14, lr: 3.22e-04 +2022-05-05 17:00:45,701 INFO [train.py:715] (7/8) Epoch 6, batch 31450, loss[loss=0.1967, simple_loss=0.2636, pruned_loss=0.06486, over 4977.00 frames.], tot_loss[loss=0.148, simple_loss=0.2191, pruned_loss=0.03845, over 973057.28 frames.], batch size: 15, lr: 3.22e-04 +2022-05-05 17:01:23,994 INFO [train.py:715] (7/8) Epoch 6, batch 31500, loss[loss=0.1281, simple_loss=0.2071, pruned_loss=0.02459, over 4843.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2192, pruned_loss=0.03832, over 972784.86 frames.], batch size: 15, lr: 3.22e-04 +2022-05-05 17:02:02,412 INFO [train.py:715] (7/8) Epoch 6, batch 31550, loss[loss=0.1166, simple_loss=0.1898, pruned_loss=0.02173, over 4702.00 frames.], tot_loss[loss=0.148, simple_loss=0.2194, pruned_loss=0.03827, over 972775.80 frames.], batch size: 15, lr: 3.22e-04 +2022-05-05 17:02:41,953 INFO [train.py:715] (7/8) Epoch 6, batch 31600, loss[loss=0.1363, simple_loss=0.2053, pruned_loss=0.03369, over 4779.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2189, pruned_loss=0.03847, over 972277.48 frames.], batch size: 14, lr: 3.22e-04 +2022-05-05 17:03:21,196 INFO [train.py:715] (7/8) Epoch 6, batch 31650, loss[loss=0.1192, simple_loss=0.1887, pruned_loss=0.02487, over 4818.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2193, pruned_loss=0.03866, over 971401.78 frames.], batch size: 12, lr: 3.22e-04 +2022-05-05 17:03:59,733 INFO [train.py:715] (7/8) Epoch 6, batch 31700, loss[loss=0.1041, simple_loss=0.1747, pruned_loss=0.01671, over 4752.00 frames.], tot_loss[loss=0.147, simple_loss=0.2182, pruned_loss=0.03791, over 971496.47 frames.], batch size: 12, lr: 3.22e-04 +2022-05-05 17:04:38,253 INFO [train.py:715] (7/8) Epoch 6, batch 31750, loss[loss=0.141, simple_loss=0.1974, pruned_loss=0.04231, over 4773.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2182, pruned_loss=0.03734, over 971223.42 frames.], batch size: 14, lr: 3.22e-04 +2022-05-05 17:05:17,757 INFO [train.py:715] (7/8) Epoch 6, batch 31800, loss[loss=0.1697, simple_loss=0.2494, pruned_loss=0.045, over 4793.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2179, pruned_loss=0.03739, over 971347.52 frames.], batch size: 14, lr: 3.22e-04 +2022-05-05 17:05:56,239 INFO [train.py:715] (7/8) Epoch 6, batch 31850, loss[loss=0.15, simple_loss=0.2243, pruned_loss=0.03786, over 4785.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2171, pruned_loss=0.03689, over 970981.29 frames.], batch size: 14, lr: 3.22e-04 +2022-05-05 17:06:34,777 INFO [train.py:715] (7/8) Epoch 6, batch 31900, loss[loss=0.19, simple_loss=0.2583, pruned_loss=0.06087, over 4689.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2176, pruned_loss=0.03708, over 971449.40 frames.], batch size: 15, lr: 3.22e-04 +2022-05-05 17:07:13,870 INFO [train.py:715] (7/8) Epoch 6, batch 31950, loss[loss=0.1507, simple_loss=0.2346, pruned_loss=0.03338, over 4768.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2184, pruned_loss=0.03759, over 971396.70 frames.], batch size: 14, lr: 3.22e-04 +2022-05-05 17:07:52,488 INFO [train.py:715] (7/8) Epoch 6, batch 32000, loss[loss=0.183, simple_loss=0.2586, pruned_loss=0.05366, over 4982.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2184, pruned_loss=0.03743, over 971572.37 frames.], batch size: 39, lr: 3.22e-04 +2022-05-05 17:08:31,940 INFO [train.py:715] (7/8) Epoch 6, batch 32050, loss[loss=0.1318, simple_loss=0.2027, pruned_loss=0.03048, over 4844.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2184, pruned_loss=0.03769, over 972289.74 frames.], batch size: 15, lr: 3.22e-04 +2022-05-05 17:09:11,462 INFO [train.py:715] (7/8) Epoch 6, batch 32100, loss[loss=0.1476, simple_loss=0.218, pruned_loss=0.0386, over 4774.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2187, pruned_loss=0.038, over 971587.65 frames.], batch size: 19, lr: 3.22e-04 +2022-05-05 17:09:50,463 INFO [train.py:715] (7/8) Epoch 6, batch 32150, loss[loss=0.1361, simple_loss=0.2054, pruned_loss=0.03334, over 4792.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2182, pruned_loss=0.03747, over 971491.27 frames.], batch size: 24, lr: 3.22e-04 +2022-05-05 17:10:28,955 INFO [train.py:715] (7/8) Epoch 6, batch 32200, loss[loss=0.1599, simple_loss=0.2332, pruned_loss=0.04333, over 4809.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2184, pruned_loss=0.03759, over 972111.47 frames.], batch size: 26, lr: 3.22e-04 +2022-05-05 17:11:08,026 INFO [train.py:715] (7/8) Epoch 6, batch 32250, loss[loss=0.1408, simple_loss=0.2205, pruned_loss=0.03055, over 4861.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.03823, over 971986.01 frames.], batch size: 38, lr: 3.22e-04 +2022-05-05 17:11:46,851 INFO [train.py:715] (7/8) Epoch 6, batch 32300, loss[loss=0.1516, simple_loss=0.2159, pruned_loss=0.04364, over 4804.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2189, pruned_loss=0.03799, over 971394.05 frames.], batch size: 21, lr: 3.22e-04 +2022-05-05 17:12:26,140 INFO [train.py:715] (7/8) Epoch 6, batch 32350, loss[loss=0.1336, simple_loss=0.223, pruned_loss=0.02209, over 4791.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2183, pruned_loss=0.0374, over 971654.69 frames.], batch size: 17, lr: 3.22e-04 +2022-05-05 17:13:04,502 INFO [train.py:715] (7/8) Epoch 6, batch 32400, loss[loss=0.1232, simple_loss=0.1968, pruned_loss=0.0248, over 4966.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2183, pruned_loss=0.03739, over 971508.41 frames.], batch size: 28, lr: 3.22e-04 +2022-05-05 17:13:43,921 INFO [train.py:715] (7/8) Epoch 6, batch 32450, loss[loss=0.1457, simple_loss=0.2225, pruned_loss=0.03441, over 4800.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2192, pruned_loss=0.03807, over 972170.70 frames.], batch size: 21, lr: 3.22e-04 +2022-05-05 17:14:23,268 INFO [train.py:715] (7/8) Epoch 6, batch 32500, loss[loss=0.1439, simple_loss=0.2122, pruned_loss=0.03779, over 4930.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2195, pruned_loss=0.03786, over 971577.79 frames.], batch size: 21, lr: 3.22e-04 +2022-05-05 17:15:01,982 INFO [train.py:715] (7/8) Epoch 6, batch 32550, loss[loss=0.1434, simple_loss=0.2059, pruned_loss=0.04045, over 4839.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.03826, over 971711.86 frames.], batch size: 30, lr: 3.22e-04 +2022-05-05 17:15:40,778 INFO [train.py:715] (7/8) Epoch 6, batch 32600, loss[loss=0.1559, simple_loss=0.2297, pruned_loss=0.04106, over 4981.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2188, pruned_loss=0.03841, over 972012.64 frames.], batch size: 28, lr: 3.21e-04 +2022-05-05 17:16:19,207 INFO [train.py:715] (7/8) Epoch 6, batch 32650, loss[loss=0.1479, simple_loss=0.2263, pruned_loss=0.03474, over 4781.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2191, pruned_loss=0.0384, over 970712.71 frames.], batch size: 17, lr: 3.21e-04 +2022-05-05 17:16:57,839 INFO [train.py:715] (7/8) Epoch 6, batch 32700, loss[loss=0.1105, simple_loss=0.1795, pruned_loss=0.02077, over 4803.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2189, pruned_loss=0.03845, over 971217.47 frames.], batch size: 13, lr: 3.21e-04 +2022-05-05 17:17:35,887 INFO [train.py:715] (7/8) Epoch 6, batch 32750, loss[loss=0.1268, simple_loss=0.2043, pruned_loss=0.02461, over 4772.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2187, pruned_loss=0.03821, over 971581.41 frames.], batch size: 17, lr: 3.21e-04 +2022-05-05 17:18:14,603 INFO [train.py:715] (7/8) Epoch 6, batch 32800, loss[loss=0.1575, simple_loss=0.2253, pruned_loss=0.04483, over 4909.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2187, pruned_loss=0.03807, over 971487.41 frames.], batch size: 17, lr: 3.21e-04 +2022-05-05 17:18:53,198 INFO [train.py:715] (7/8) Epoch 6, batch 32850, loss[loss=0.158, simple_loss=0.2148, pruned_loss=0.05057, over 4824.00 frames.], tot_loss[loss=0.1468, simple_loss=0.218, pruned_loss=0.0378, over 971783.96 frames.], batch size: 13, lr: 3.21e-04 +2022-05-05 17:19:31,605 INFO [train.py:715] (7/8) Epoch 6, batch 32900, loss[loss=0.1269, simple_loss=0.2002, pruned_loss=0.02684, over 4968.00 frames.], tot_loss[loss=0.1469, simple_loss=0.218, pruned_loss=0.03793, over 972910.94 frames.], batch size: 14, lr: 3.21e-04 +2022-05-05 17:20:09,697 INFO [train.py:715] (7/8) Epoch 6, batch 32950, loss[loss=0.1535, simple_loss=0.2235, pruned_loss=0.04177, over 4859.00 frames.], tot_loss[loss=0.147, simple_loss=0.2181, pruned_loss=0.03796, over 972697.96 frames.], batch size: 20, lr: 3.21e-04 +2022-05-05 17:20:48,506 INFO [train.py:715] (7/8) Epoch 6, batch 33000, loss[loss=0.151, simple_loss=0.2303, pruned_loss=0.0358, over 4813.00 frames.], tot_loss[loss=0.1476, simple_loss=0.219, pruned_loss=0.03807, over 973045.53 frames.], batch size: 24, lr: 3.21e-04 +2022-05-05 17:20:48,506 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 17:20:58,110 INFO [train.py:742] (7/8) Epoch 6, validation: loss=0.1087, simple_loss=0.1938, pruned_loss=0.01183, over 914524.00 frames. +2022-05-05 17:21:36,674 INFO [train.py:715] (7/8) Epoch 6, batch 33050, loss[loss=0.1788, simple_loss=0.2458, pruned_loss=0.05591, over 4960.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2189, pruned_loss=0.03798, over 972803.44 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:22:15,263 INFO [train.py:715] (7/8) Epoch 6, batch 33100, loss[loss=0.1675, simple_loss=0.2454, pruned_loss=0.04484, over 4899.00 frames.], tot_loss[loss=0.147, simple_loss=0.2187, pruned_loss=0.03765, over 973538.08 frames.], batch size: 19, lr: 3.21e-04 +2022-05-05 17:22:53,008 INFO [train.py:715] (7/8) Epoch 6, batch 33150, loss[loss=0.1332, simple_loss=0.2107, pruned_loss=0.02787, over 4805.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2186, pruned_loss=0.03748, over 972357.21 frames.], batch size: 21, lr: 3.21e-04 +2022-05-05 17:23:31,901 INFO [train.py:715] (7/8) Epoch 6, batch 33200, loss[loss=0.1747, simple_loss=0.2509, pruned_loss=0.04925, over 4925.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2189, pruned_loss=0.03748, over 972720.46 frames.], batch size: 39, lr: 3.21e-04 +2022-05-05 17:24:10,785 INFO [train.py:715] (7/8) Epoch 6, batch 33250, loss[loss=0.1541, simple_loss=0.2219, pruned_loss=0.04318, over 4781.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2181, pruned_loss=0.03748, over 972525.02 frames.], batch size: 14, lr: 3.21e-04 +2022-05-05 17:24:49,863 INFO [train.py:715] (7/8) Epoch 6, batch 33300, loss[loss=0.1505, simple_loss=0.2176, pruned_loss=0.04165, over 4956.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2172, pruned_loss=0.03731, over 973396.13 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:25:28,469 INFO [train.py:715] (7/8) Epoch 6, batch 33350, loss[loss=0.1993, simple_loss=0.2557, pruned_loss=0.07147, over 4844.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2184, pruned_loss=0.03789, over 973190.15 frames.], batch size: 32, lr: 3.21e-04 +2022-05-05 17:26:07,934 INFO [train.py:715] (7/8) Epoch 6, batch 33400, loss[loss=0.1874, simple_loss=0.2446, pruned_loss=0.06512, over 4978.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2184, pruned_loss=0.03801, over 973417.65 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:26:47,026 INFO [train.py:715] (7/8) Epoch 6, batch 33450, loss[loss=0.1397, simple_loss=0.2193, pruned_loss=0.03011, over 4784.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2185, pruned_loss=0.03802, over 972955.41 frames.], batch size: 14, lr: 3.21e-04 +2022-05-05 17:27:25,292 INFO [train.py:715] (7/8) Epoch 6, batch 33500, loss[loss=0.1733, simple_loss=0.2314, pruned_loss=0.05766, over 4871.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2189, pruned_loss=0.03818, over 973683.62 frames.], batch size: 30, lr: 3.21e-04 +2022-05-05 17:28:04,314 INFO [train.py:715] (7/8) Epoch 6, batch 33550, loss[loss=0.1362, simple_loss=0.2072, pruned_loss=0.03262, over 4779.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2178, pruned_loss=0.03752, over 973563.65 frames.], batch size: 18, lr: 3.21e-04 +2022-05-05 17:28:43,722 INFO [train.py:715] (7/8) Epoch 6, batch 33600, loss[loss=0.1587, simple_loss=0.228, pruned_loss=0.04471, over 4965.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2189, pruned_loss=0.03798, over 972426.86 frames.], batch size: 35, lr: 3.21e-04 +2022-05-05 17:29:22,676 INFO [train.py:715] (7/8) Epoch 6, batch 33650, loss[loss=0.1187, simple_loss=0.2016, pruned_loss=0.01786, over 4864.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2187, pruned_loss=0.03756, over 972777.05 frames.], batch size: 20, lr: 3.21e-04 +2022-05-05 17:30:01,279 INFO [train.py:715] (7/8) Epoch 6, batch 33700, loss[loss=0.1698, simple_loss=0.2339, pruned_loss=0.05286, over 4780.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2189, pruned_loss=0.03776, over 972515.44 frames.], batch size: 17, lr: 3.21e-04 +2022-05-05 17:30:39,883 INFO [train.py:715] (7/8) Epoch 6, batch 33750, loss[loss=0.1439, simple_loss=0.2155, pruned_loss=0.03615, over 4682.00 frames.], tot_loss[loss=0.147, simple_loss=0.2186, pruned_loss=0.03772, over 972825.30 frames.], batch size: 15, lr: 3.21e-04 +2022-05-05 17:31:19,209 INFO [train.py:715] (7/8) Epoch 6, batch 33800, loss[loss=0.124, simple_loss=0.197, pruned_loss=0.0255, over 4907.00 frames.], tot_loss[loss=0.1474, simple_loss=0.219, pruned_loss=0.03794, over 973183.35 frames.], batch size: 17, lr: 3.21e-04 +2022-05-05 17:31:58,018 INFO [train.py:715] (7/8) Epoch 6, batch 33850, loss[loss=0.1471, simple_loss=0.2229, pruned_loss=0.03566, over 4965.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2191, pruned_loss=0.03771, over 973252.27 frames.], batch size: 24, lr: 3.21e-04 +2022-05-05 17:32:36,704 INFO [train.py:715] (7/8) Epoch 6, batch 33900, loss[loss=0.1606, simple_loss=0.2218, pruned_loss=0.04969, over 4864.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2198, pruned_loss=0.038, over 972674.65 frames.], batch size: 30, lr: 3.21e-04 +2022-05-05 17:33:16,047 INFO [train.py:715] (7/8) Epoch 6, batch 33950, loss[loss=0.1455, simple_loss=0.2099, pruned_loss=0.04056, over 4913.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2204, pruned_loss=0.03821, over 973143.02 frames.], batch size: 39, lr: 3.21e-04 +2022-05-05 17:33:55,027 INFO [train.py:715] (7/8) Epoch 6, batch 34000, loss[loss=0.1247, simple_loss=0.207, pruned_loss=0.02121, over 4754.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2195, pruned_loss=0.03812, over 972792.02 frames.], batch size: 18, lr: 3.21e-04 +2022-05-05 17:34:33,700 INFO [train.py:715] (7/8) Epoch 6, batch 34050, loss[loss=0.1468, simple_loss=0.2258, pruned_loss=0.03389, over 4769.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2189, pruned_loss=0.03806, over 972225.06 frames.], batch size: 14, lr: 3.21e-04 +2022-05-05 17:35:12,981 INFO [train.py:715] (7/8) Epoch 6, batch 34100, loss[loss=0.1474, simple_loss=0.2098, pruned_loss=0.0425, over 4645.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2188, pruned_loss=0.03808, over 972049.52 frames.], batch size: 13, lr: 3.20e-04 +2022-05-05 17:35:51,935 INFO [train.py:715] (7/8) Epoch 6, batch 34150, loss[loss=0.1136, simple_loss=0.1883, pruned_loss=0.01941, over 4777.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.0383, over 971871.95 frames.], batch size: 18, lr: 3.20e-04 +2022-05-05 17:36:30,535 INFO [train.py:715] (7/8) Epoch 6, batch 34200, loss[loss=0.1383, simple_loss=0.2007, pruned_loss=0.03797, over 4852.00 frames.], tot_loss[loss=0.147, simple_loss=0.2187, pruned_loss=0.03768, over 971623.24 frames.], batch size: 32, lr: 3.20e-04 +2022-05-05 17:37:09,176 INFO [train.py:715] (7/8) Epoch 6, batch 34250, loss[loss=0.1647, simple_loss=0.2317, pruned_loss=0.04888, over 4854.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2188, pruned_loss=0.0381, over 971055.03 frames.], batch size: 20, lr: 3.20e-04 +2022-05-05 17:37:48,387 INFO [train.py:715] (7/8) Epoch 6, batch 34300, loss[loss=0.1279, simple_loss=0.2, pruned_loss=0.02789, over 4977.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2185, pruned_loss=0.03788, over 971802.02 frames.], batch size: 15, lr: 3.20e-04 +2022-05-05 17:38:26,980 INFO [train.py:715] (7/8) Epoch 6, batch 34350, loss[loss=0.1722, simple_loss=0.2526, pruned_loss=0.04585, over 4808.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.03823, over 971594.81 frames.], batch size: 21, lr: 3.20e-04 +2022-05-05 17:39:05,619 INFO [train.py:715] (7/8) Epoch 6, batch 34400, loss[loss=0.1671, simple_loss=0.247, pruned_loss=0.04367, over 4859.00 frames.], tot_loss[loss=0.1484, simple_loss=0.22, pruned_loss=0.03837, over 971766.03 frames.], batch size: 20, lr: 3.20e-04 +2022-05-05 17:39:45,298 INFO [train.py:715] (7/8) Epoch 6, batch 34450, loss[loss=0.1349, simple_loss=0.2082, pruned_loss=0.0308, over 4694.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2191, pruned_loss=0.03816, over 971387.89 frames.], batch size: 15, lr: 3.20e-04 +2022-05-05 17:40:24,041 INFO [train.py:715] (7/8) Epoch 6, batch 34500, loss[loss=0.1631, simple_loss=0.2351, pruned_loss=0.04549, over 4883.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2197, pruned_loss=0.03833, over 971238.64 frames.], batch size: 22, lr: 3.20e-04 +2022-05-05 17:41:02,891 INFO [train.py:715] (7/8) Epoch 6, batch 34550, loss[loss=0.1142, simple_loss=0.1911, pruned_loss=0.01863, over 4897.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2199, pruned_loss=0.03819, over 971637.85 frames.], batch size: 19, lr: 3.20e-04 +2022-05-05 17:41:41,805 INFO [train.py:715] (7/8) Epoch 6, batch 34600, loss[loss=0.1498, simple_loss=0.2203, pruned_loss=0.03966, over 4926.00 frames.], tot_loss[loss=0.1485, simple_loss=0.2201, pruned_loss=0.03843, over 971188.69 frames.], batch size: 17, lr: 3.20e-04 +2022-05-05 17:42:20,615 INFO [train.py:715] (7/8) Epoch 6, batch 34650, loss[loss=0.1277, simple_loss=0.1985, pruned_loss=0.02842, over 4831.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2197, pruned_loss=0.03841, over 971417.58 frames.], batch size: 13, lr: 3.20e-04 +2022-05-05 17:42:59,316 INFO [train.py:715] (7/8) Epoch 6, batch 34700, loss[loss=0.1377, simple_loss=0.2153, pruned_loss=0.03002, over 4811.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2192, pruned_loss=0.03797, over 971291.04 frames.], batch size: 25, lr: 3.20e-04 +2022-05-05 17:43:37,147 INFO [train.py:715] (7/8) Epoch 6, batch 34750, loss[loss=0.1455, simple_loss=0.2229, pruned_loss=0.03406, over 4855.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2181, pruned_loss=0.03781, over 971189.76 frames.], batch size: 20, lr: 3.20e-04 +2022-05-05 17:44:13,984 INFO [train.py:715] (7/8) Epoch 6, batch 34800, loss[loss=0.1205, simple_loss=0.1932, pruned_loss=0.02393, over 4841.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2184, pruned_loss=0.038, over 970693.62 frames.], batch size: 12, lr: 3.20e-04 +2022-05-05 17:45:04,004 INFO [train.py:715] (7/8) Epoch 7, batch 0, loss[loss=0.1722, simple_loss=0.2407, pruned_loss=0.05185, over 4774.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2407, pruned_loss=0.05185, over 4774.00 frames.], batch size: 18, lr: 3.03e-04 +2022-05-05 17:45:42,573 INFO [train.py:715] (7/8) Epoch 7, batch 50, loss[loss=0.1422, simple_loss=0.2154, pruned_loss=0.03446, over 4918.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2188, pruned_loss=0.03798, over 219309.55 frames.], batch size: 29, lr: 3.03e-04 +2022-05-05 17:46:21,355 INFO [train.py:715] (7/8) Epoch 7, batch 100, loss[loss=0.1503, simple_loss=0.2247, pruned_loss=0.03789, over 4848.00 frames.], tot_loss[loss=0.1489, simple_loss=0.2196, pruned_loss=0.03915, over 386430.26 frames.], batch size: 32, lr: 3.03e-04 +2022-05-05 17:47:00,257 INFO [train.py:715] (7/8) Epoch 7, batch 150, loss[loss=0.1181, simple_loss=0.1974, pruned_loss=0.01936, over 4988.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2187, pruned_loss=0.03838, over 517170.32 frames.], batch size: 28, lr: 3.03e-04 +2022-05-05 17:47:39,938 INFO [train.py:715] (7/8) Epoch 7, batch 200, loss[loss=0.1148, simple_loss=0.1901, pruned_loss=0.01975, over 4918.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2177, pruned_loss=0.03785, over 619295.87 frames.], batch size: 23, lr: 3.03e-04 +2022-05-05 17:48:18,722 INFO [train.py:715] (7/8) Epoch 7, batch 250, loss[loss=0.133, simple_loss=0.2092, pruned_loss=0.02838, over 4934.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2169, pruned_loss=0.03745, over 697287.08 frames.], batch size: 29, lr: 3.03e-04 +2022-05-05 17:48:58,165 INFO [train.py:715] (7/8) Epoch 7, batch 300, loss[loss=0.123, simple_loss=0.1961, pruned_loss=0.02499, over 4907.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2182, pruned_loss=0.03781, over 759033.86 frames.], batch size: 17, lr: 3.02e-04 +2022-05-05 17:49:36,846 INFO [train.py:715] (7/8) Epoch 7, batch 350, loss[loss=0.1459, simple_loss=0.2232, pruned_loss=0.03427, over 4905.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2175, pruned_loss=0.03743, over 806341.58 frames.], batch size: 18, lr: 3.02e-04 +2022-05-05 17:50:16,224 INFO [train.py:715] (7/8) Epoch 7, batch 400, loss[loss=0.1274, simple_loss=0.2064, pruned_loss=0.02419, over 4928.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2183, pruned_loss=0.03801, over 843435.45 frames.], batch size: 18, lr: 3.02e-04 +2022-05-05 17:50:54,886 INFO [train.py:715] (7/8) Epoch 7, batch 450, loss[loss=0.1468, simple_loss=0.2143, pruned_loss=0.03969, over 4895.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2181, pruned_loss=0.03768, over 872120.03 frames.], batch size: 19, lr: 3.02e-04 +2022-05-05 17:51:33,768 INFO [train.py:715] (7/8) Epoch 7, batch 500, loss[loss=0.1452, simple_loss=0.2191, pruned_loss=0.03568, over 4904.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2176, pruned_loss=0.03742, over 894472.79 frames.], batch size: 17, lr: 3.02e-04 +2022-05-05 17:52:12,472 INFO [train.py:715] (7/8) Epoch 7, batch 550, loss[loss=0.1576, simple_loss=0.2136, pruned_loss=0.05082, over 4959.00 frames.], tot_loss[loss=0.145, simple_loss=0.2166, pruned_loss=0.03675, over 911349.30 frames.], batch size: 24, lr: 3.02e-04 +2022-05-05 17:52:51,638 INFO [train.py:715] (7/8) Epoch 7, batch 600, loss[loss=0.156, simple_loss=0.2221, pruned_loss=0.04495, over 4792.00 frames.], tot_loss[loss=0.1459, simple_loss=0.217, pruned_loss=0.0374, over 924943.28 frames.], batch size: 17, lr: 3.02e-04 +2022-05-05 17:53:29,945 INFO [train.py:715] (7/8) Epoch 7, batch 650, loss[loss=0.132, simple_loss=0.2033, pruned_loss=0.03037, over 4821.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2185, pruned_loss=0.03788, over 935717.13 frames.], batch size: 26, lr: 3.02e-04 +2022-05-05 17:54:08,326 INFO [train.py:715] (7/8) Epoch 7, batch 700, loss[loss=0.1384, simple_loss=0.2124, pruned_loss=0.03222, over 4868.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2186, pruned_loss=0.03813, over 943577.19 frames.], batch size: 16, lr: 3.02e-04 +2022-05-05 17:54:47,594 INFO [train.py:715] (7/8) Epoch 7, batch 750, loss[loss=0.1621, simple_loss=0.2198, pruned_loss=0.05215, over 4872.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2181, pruned_loss=0.03765, over 950544.78 frames.], batch size: 32, lr: 3.02e-04 +2022-05-05 17:55:26,297 INFO [train.py:715] (7/8) Epoch 7, batch 800, loss[loss=0.1414, simple_loss=0.2094, pruned_loss=0.03667, over 4747.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2181, pruned_loss=0.03759, over 955979.89 frames.], batch size: 19, lr: 3.02e-04 +2022-05-05 17:56:04,983 INFO [train.py:715] (7/8) Epoch 7, batch 850, loss[loss=0.1308, simple_loss=0.2056, pruned_loss=0.028, over 4977.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2191, pruned_loss=0.03776, over 959902.11 frames.], batch size: 24, lr: 3.02e-04 +2022-05-05 17:56:44,239 INFO [train.py:715] (7/8) Epoch 7, batch 900, loss[loss=0.1308, simple_loss=0.2044, pruned_loss=0.02863, over 4878.00 frames.], tot_loss[loss=0.1482, simple_loss=0.22, pruned_loss=0.03817, over 962189.36 frames.], batch size: 22, lr: 3.02e-04 +2022-05-05 17:57:23,220 INFO [train.py:715] (7/8) Epoch 7, batch 950, loss[loss=0.144, simple_loss=0.2217, pruned_loss=0.03314, over 4919.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2194, pruned_loss=0.03807, over 964634.81 frames.], batch size: 23, lr: 3.02e-04 +2022-05-05 17:58:01,722 INFO [train.py:715] (7/8) Epoch 7, batch 1000, loss[loss=0.1444, simple_loss=0.2251, pruned_loss=0.03185, over 4706.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2204, pruned_loss=0.03822, over 965861.19 frames.], batch size: 15, lr: 3.02e-04 +2022-05-05 17:58:40,406 INFO [train.py:715] (7/8) Epoch 7, batch 1050, loss[loss=0.1487, simple_loss=0.2251, pruned_loss=0.03614, over 4834.00 frames.], tot_loss[loss=0.1483, simple_loss=0.22, pruned_loss=0.03827, over 968597.20 frames.], batch size: 15, lr: 3.02e-04 +2022-05-05 17:59:19,624 INFO [train.py:715] (7/8) Epoch 7, batch 1100, loss[loss=0.1612, simple_loss=0.2423, pruned_loss=0.04009, over 4821.00 frames.], tot_loss[loss=0.148, simple_loss=0.2195, pruned_loss=0.0382, over 969272.75 frames.], batch size: 15, lr: 3.02e-04 +2022-05-05 17:59:57,781 INFO [train.py:715] (7/8) Epoch 7, batch 1150, loss[loss=0.1179, simple_loss=0.1923, pruned_loss=0.02177, over 4834.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2197, pruned_loss=0.03791, over 969511.15 frames.], batch size: 13, lr: 3.02e-04 +2022-05-05 18:00:36,967 INFO [train.py:715] (7/8) Epoch 7, batch 1200, loss[loss=0.1445, simple_loss=0.2094, pruned_loss=0.03983, over 4983.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2187, pruned_loss=0.03788, over 969306.35 frames.], batch size: 15, lr: 3.02e-04 +2022-05-05 18:01:16,050 INFO [train.py:715] (7/8) Epoch 7, batch 1250, loss[loss=0.1194, simple_loss=0.1902, pruned_loss=0.02433, over 4824.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2187, pruned_loss=0.0378, over 970139.18 frames.], batch size: 13, lr: 3.02e-04 +2022-05-05 18:01:55,179 INFO [train.py:715] (7/8) Epoch 7, batch 1300, loss[loss=0.1654, simple_loss=0.2442, pruned_loss=0.04335, over 4800.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2188, pruned_loss=0.03794, over 970717.36 frames.], batch size: 25, lr: 3.02e-04 +2022-05-05 18:02:33,763 INFO [train.py:715] (7/8) Epoch 7, batch 1350, loss[loss=0.1715, simple_loss=0.2369, pruned_loss=0.05307, over 4784.00 frames.], tot_loss[loss=0.148, simple_loss=0.2191, pruned_loss=0.03842, over 970257.38 frames.], batch size: 17, lr: 3.02e-04 +2022-05-05 18:03:12,553 INFO [train.py:715] (7/8) Epoch 7, batch 1400, loss[loss=0.1503, simple_loss=0.2223, pruned_loss=0.03917, over 4782.00 frames.], tot_loss[loss=0.1486, simple_loss=0.2196, pruned_loss=0.03879, over 970861.42 frames.], batch size: 18, lr: 3.02e-04 +2022-05-05 18:03:51,645 INFO [train.py:715] (7/8) Epoch 7, batch 1450, loss[loss=0.1704, simple_loss=0.2454, pruned_loss=0.04769, over 4885.00 frames.], tot_loss[loss=0.1476, simple_loss=0.219, pruned_loss=0.03807, over 971710.64 frames.], batch size: 16, lr: 3.02e-04 +2022-05-05 18:04:29,771 INFO [train.py:715] (7/8) Epoch 7, batch 1500, loss[loss=0.1855, simple_loss=0.2429, pruned_loss=0.06401, over 4881.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2179, pruned_loss=0.03751, over 971495.97 frames.], batch size: 32, lr: 3.02e-04 +2022-05-05 18:05:08,981 INFO [train.py:715] (7/8) Epoch 7, batch 1550, loss[loss=0.1452, simple_loss=0.2175, pruned_loss=0.03647, over 4922.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2173, pruned_loss=0.03688, over 971188.54 frames.], batch size: 23, lr: 3.02e-04 +2022-05-05 18:05:47,788 INFO [train.py:715] (7/8) Epoch 7, batch 1600, loss[loss=0.1398, simple_loss=0.2034, pruned_loss=0.03816, over 4958.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2169, pruned_loss=0.03692, over 971086.09 frames.], batch size: 15, lr: 3.02e-04 +2022-05-05 18:06:26,680 INFO [train.py:715] (7/8) Epoch 7, batch 1650, loss[loss=0.1468, simple_loss=0.2146, pruned_loss=0.03952, over 4937.00 frames.], tot_loss[loss=0.1462, simple_loss=0.218, pruned_loss=0.03721, over 972078.51 frames.], batch size: 29, lr: 3.02e-04 +2022-05-05 18:07:05,256 INFO [train.py:715] (7/8) Epoch 7, batch 1700, loss[loss=0.1439, simple_loss=0.2145, pruned_loss=0.03665, over 4866.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2187, pruned_loss=0.03774, over 972529.30 frames.], batch size: 16, lr: 3.02e-04 +2022-05-05 18:07:44,161 INFO [train.py:715] (7/8) Epoch 7, batch 1750, loss[loss=0.1281, simple_loss=0.2037, pruned_loss=0.02629, over 4832.00 frames.], tot_loss[loss=0.1478, simple_loss=0.219, pruned_loss=0.03826, over 972042.54 frames.], batch size: 26, lr: 3.02e-04 +2022-05-05 18:08:24,138 INFO [train.py:715] (7/8) Epoch 7, batch 1800, loss[loss=0.1568, simple_loss=0.2275, pruned_loss=0.04304, over 4977.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2193, pruned_loss=0.03844, over 972431.13 frames.], batch size: 15, lr: 3.02e-04 +2022-05-05 18:09:03,075 INFO [train.py:715] (7/8) Epoch 7, batch 1850, loss[loss=0.1509, simple_loss=0.2176, pruned_loss=0.04213, over 4989.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2187, pruned_loss=0.03752, over 972150.53 frames.], batch size: 28, lr: 3.02e-04 +2022-05-05 18:09:41,925 INFO [train.py:715] (7/8) Epoch 7, batch 1900, loss[loss=0.1215, simple_loss=0.1988, pruned_loss=0.02211, over 4972.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2188, pruned_loss=0.03779, over 972544.72 frames.], batch size: 25, lr: 3.01e-04 +2022-05-05 18:10:20,112 INFO [train.py:715] (7/8) Epoch 7, batch 1950, loss[loss=0.1633, simple_loss=0.2352, pruned_loss=0.04567, over 4984.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2178, pruned_loss=0.03731, over 972322.23 frames.], batch size: 15, lr: 3.01e-04 +2022-05-05 18:10:59,288 INFO [train.py:715] (7/8) Epoch 7, batch 2000, loss[loss=0.1777, simple_loss=0.2459, pruned_loss=0.05473, over 4891.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2184, pruned_loss=0.03771, over 972088.95 frames.], batch size: 39, lr: 3.01e-04 +2022-05-05 18:11:37,486 INFO [train.py:715] (7/8) Epoch 7, batch 2050, loss[loss=0.1747, simple_loss=0.2561, pruned_loss=0.04666, over 4839.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2183, pruned_loss=0.03739, over 972537.83 frames.], batch size: 26, lr: 3.01e-04 +2022-05-05 18:12:16,136 INFO [train.py:715] (7/8) Epoch 7, batch 2100, loss[loss=0.1512, simple_loss=0.226, pruned_loss=0.03814, over 4961.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2174, pruned_loss=0.03701, over 972210.60 frames.], batch size: 35, lr: 3.01e-04 +2022-05-05 18:12:54,593 INFO [train.py:715] (7/8) Epoch 7, batch 2150, loss[loss=0.1609, simple_loss=0.2307, pruned_loss=0.04553, over 4786.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2173, pruned_loss=0.03691, over 971696.87 frames.], batch size: 18, lr: 3.01e-04 +2022-05-05 18:13:32,798 INFO [train.py:715] (7/8) Epoch 7, batch 2200, loss[loss=0.1394, simple_loss=0.2137, pruned_loss=0.03252, over 4782.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2182, pruned_loss=0.03733, over 971080.05 frames.], batch size: 18, lr: 3.01e-04 +2022-05-05 18:14:11,047 INFO [train.py:715] (7/8) Epoch 7, batch 2250, loss[loss=0.1581, simple_loss=0.227, pruned_loss=0.04458, over 4786.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2186, pruned_loss=0.03743, over 970705.31 frames.], batch size: 18, lr: 3.01e-04 +2022-05-05 18:14:50,047 INFO [train.py:715] (7/8) Epoch 7, batch 2300, loss[loss=0.1689, simple_loss=0.2312, pruned_loss=0.05328, over 4867.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2179, pruned_loss=0.03685, over 970800.02 frames.], batch size: 16, lr: 3.01e-04 +2022-05-05 18:15:29,528 INFO [train.py:715] (7/8) Epoch 7, batch 2350, loss[loss=0.139, simple_loss=0.2068, pruned_loss=0.03555, over 4840.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2178, pruned_loss=0.03697, over 970935.92 frames.], batch size: 15, lr: 3.01e-04 +2022-05-05 18:16:08,322 INFO [train.py:715] (7/8) Epoch 7, batch 2400, loss[loss=0.1433, simple_loss=0.2197, pruned_loss=0.03341, over 4849.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2185, pruned_loss=0.03728, over 971514.57 frames.], batch size: 20, lr: 3.01e-04 +2022-05-05 18:16:46,787 INFO [train.py:715] (7/8) Epoch 7, batch 2450, loss[loss=0.127, simple_loss=0.1998, pruned_loss=0.02706, over 4919.00 frames.], tot_loss[loss=0.147, simple_loss=0.2189, pruned_loss=0.03756, over 971665.11 frames.], batch size: 18, lr: 3.01e-04 +2022-05-05 18:17:25,558 INFO [train.py:715] (7/8) Epoch 7, batch 2500, loss[loss=0.1236, simple_loss=0.2016, pruned_loss=0.02282, over 4884.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2182, pruned_loss=0.03727, over 971485.74 frames.], batch size: 22, lr: 3.01e-04 +2022-05-05 18:18:03,861 INFO [train.py:715] (7/8) Epoch 7, batch 2550, loss[loss=0.1408, simple_loss=0.213, pruned_loss=0.0343, over 4778.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2178, pruned_loss=0.03718, over 971585.70 frames.], batch size: 17, lr: 3.01e-04 +2022-05-05 18:18:42,383 INFO [train.py:715] (7/8) Epoch 7, batch 2600, loss[loss=0.1607, simple_loss=0.2284, pruned_loss=0.04646, over 4912.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2186, pruned_loss=0.03738, over 972290.00 frames.], batch size: 17, lr: 3.01e-04 +2022-05-05 18:19:21,118 INFO [train.py:715] (7/8) Epoch 7, batch 2650, loss[loss=0.1496, simple_loss=0.2192, pruned_loss=0.04001, over 4843.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2185, pruned_loss=0.03748, over 972271.05 frames.], batch size: 32, lr: 3.01e-04 +2022-05-05 18:19:59,709 INFO [train.py:715] (7/8) Epoch 7, batch 2700, loss[loss=0.1525, simple_loss=0.2253, pruned_loss=0.03987, over 4841.00 frames.], tot_loss[loss=0.1461, simple_loss=0.218, pruned_loss=0.03715, over 972313.60 frames.], batch size: 32, lr: 3.01e-04 +2022-05-05 18:20:37,585 INFO [train.py:715] (7/8) Epoch 7, batch 2750, loss[loss=0.1262, simple_loss=0.1998, pruned_loss=0.02624, over 4866.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2183, pruned_loss=0.03717, over 972515.90 frames.], batch size: 20, lr: 3.01e-04 +2022-05-05 18:21:16,372 INFO [train.py:715] (7/8) Epoch 7, batch 2800, loss[loss=0.1598, simple_loss=0.2334, pruned_loss=0.04311, over 4811.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2189, pruned_loss=0.03794, over 972350.33 frames.], batch size: 24, lr: 3.01e-04 +2022-05-05 18:21:55,733 INFO [train.py:715] (7/8) Epoch 7, batch 2850, loss[loss=0.1115, simple_loss=0.1809, pruned_loss=0.02104, over 4821.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2174, pruned_loss=0.03704, over 972606.53 frames.], batch size: 25, lr: 3.01e-04 +2022-05-05 18:22:35,310 INFO [train.py:715] (7/8) Epoch 7, batch 2900, loss[loss=0.1128, simple_loss=0.1794, pruned_loss=0.02313, over 4815.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2177, pruned_loss=0.03756, over 972515.21 frames.], batch size: 13, lr: 3.01e-04 +2022-05-05 18:23:14,209 INFO [train.py:715] (7/8) Epoch 7, batch 2950, loss[loss=0.1429, simple_loss=0.2213, pruned_loss=0.03225, over 4751.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2172, pruned_loss=0.0373, over 972256.64 frames.], batch size: 19, lr: 3.01e-04 +2022-05-05 18:23:53,377 INFO [train.py:715] (7/8) Epoch 7, batch 3000, loss[loss=0.1343, simple_loss=0.2006, pruned_loss=0.03396, over 4782.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2179, pruned_loss=0.03776, over 972655.64 frames.], batch size: 14, lr: 3.01e-04 +2022-05-05 18:23:53,378 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 18:24:04,767 INFO [train.py:742] (7/8) Epoch 7, validation: loss=0.1084, simple_loss=0.1933, pruned_loss=0.01171, over 914524.00 frames. +2022-05-05 18:24:44,253 INFO [train.py:715] (7/8) Epoch 7, batch 3050, loss[loss=0.1628, simple_loss=0.2372, pruned_loss=0.04416, over 4919.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2184, pruned_loss=0.03791, over 973067.30 frames.], batch size: 18, lr: 3.01e-04 +2022-05-05 18:25:23,059 INFO [train.py:715] (7/8) Epoch 7, batch 3100, loss[loss=0.1321, simple_loss=0.2058, pruned_loss=0.02923, over 4919.00 frames.], tot_loss[loss=0.146, simple_loss=0.2177, pruned_loss=0.03717, over 972867.04 frames.], batch size: 23, lr: 3.01e-04 +2022-05-05 18:26:01,759 INFO [train.py:715] (7/8) Epoch 7, batch 3150, loss[loss=0.1427, simple_loss=0.2118, pruned_loss=0.03675, over 4814.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2188, pruned_loss=0.03791, over 971790.05 frames.], batch size: 27, lr: 3.01e-04 +2022-05-05 18:26:39,661 INFO [train.py:715] (7/8) Epoch 7, batch 3200, loss[loss=0.1306, simple_loss=0.2044, pruned_loss=0.02838, over 4964.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2191, pruned_loss=0.03832, over 972034.12 frames.], batch size: 35, lr: 3.01e-04 +2022-05-05 18:27:17,886 INFO [train.py:715] (7/8) Epoch 7, batch 3250, loss[loss=0.1675, simple_loss=0.244, pruned_loss=0.04544, over 4734.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2183, pruned_loss=0.03738, over 972565.57 frames.], batch size: 16, lr: 3.01e-04 +2022-05-05 18:27:56,434 INFO [train.py:715] (7/8) Epoch 7, batch 3300, loss[loss=0.213, simple_loss=0.283, pruned_loss=0.07148, over 4968.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2192, pruned_loss=0.03808, over 971971.03 frames.], batch size: 24, lr: 3.01e-04 +2022-05-05 18:28:35,033 INFO [train.py:715] (7/8) Epoch 7, batch 3350, loss[loss=0.1476, simple_loss=0.2183, pruned_loss=0.03845, over 4830.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2192, pruned_loss=0.03786, over 972006.82 frames.], batch size: 30, lr: 3.01e-04 +2022-05-05 18:29:13,822 INFO [train.py:715] (7/8) Epoch 7, batch 3400, loss[loss=0.1264, simple_loss=0.2048, pruned_loss=0.02399, over 4805.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2196, pruned_loss=0.03811, over 972312.61 frames.], batch size: 21, lr: 3.01e-04 +2022-05-05 18:29:52,250 INFO [train.py:715] (7/8) Epoch 7, batch 3450, loss[loss=0.1419, simple_loss=0.2063, pruned_loss=0.03875, over 4912.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2188, pruned_loss=0.0377, over 971882.57 frames.], batch size: 17, lr: 3.01e-04 +2022-05-05 18:30:31,304 INFO [train.py:715] (7/8) Epoch 7, batch 3500, loss[loss=0.1215, simple_loss=0.1961, pruned_loss=0.02348, over 4783.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2188, pruned_loss=0.03772, over 972203.81 frames.], batch size: 17, lr: 3.01e-04 +2022-05-05 18:31:09,923 INFO [train.py:715] (7/8) Epoch 7, batch 3550, loss[loss=0.1512, simple_loss=0.2113, pruned_loss=0.04555, over 4956.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2188, pruned_loss=0.03788, over 971768.25 frames.], batch size: 35, lr: 3.00e-04 +2022-05-05 18:31:48,695 INFO [train.py:715] (7/8) Epoch 7, batch 3600, loss[loss=0.1376, simple_loss=0.2081, pruned_loss=0.03353, over 4881.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2189, pruned_loss=0.03793, over 971778.89 frames.], batch size: 22, lr: 3.00e-04 +2022-05-05 18:32:27,423 INFO [train.py:715] (7/8) Epoch 7, batch 3650, loss[loss=0.1769, simple_loss=0.241, pruned_loss=0.05637, over 4889.00 frames.], tot_loss[loss=0.147, simple_loss=0.2186, pruned_loss=0.0377, over 972089.86 frames.], batch size: 19, lr: 3.00e-04 +2022-05-05 18:33:06,463 INFO [train.py:715] (7/8) Epoch 7, batch 3700, loss[loss=0.1241, simple_loss=0.2045, pruned_loss=0.02182, over 4926.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2175, pruned_loss=0.0375, over 972411.30 frames.], batch size: 21, lr: 3.00e-04 +2022-05-05 18:33:45,232 INFO [train.py:715] (7/8) Epoch 7, batch 3750, loss[loss=0.1165, simple_loss=0.1933, pruned_loss=0.0199, over 4892.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2168, pruned_loss=0.03709, over 972220.60 frames.], batch size: 17, lr: 3.00e-04 +2022-05-05 18:34:23,490 INFO [train.py:715] (7/8) Epoch 7, batch 3800, loss[loss=0.1482, simple_loss=0.2343, pruned_loss=0.03109, over 4814.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2178, pruned_loss=0.03754, over 972041.11 frames.], batch size: 13, lr: 3.00e-04 +2022-05-05 18:35:01,655 INFO [train.py:715] (7/8) Epoch 7, batch 3850, loss[loss=0.134, simple_loss=0.1939, pruned_loss=0.03704, over 4821.00 frames.], tot_loss[loss=0.146, simple_loss=0.2175, pruned_loss=0.03722, over 971944.01 frames.], batch size: 12, lr: 3.00e-04 +2022-05-05 18:35:39,926 INFO [train.py:715] (7/8) Epoch 7, batch 3900, loss[loss=0.1455, simple_loss=0.2178, pruned_loss=0.03662, over 4789.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2174, pruned_loss=0.03724, over 972037.94 frames.], batch size: 18, lr: 3.00e-04 +2022-05-05 18:36:18,413 INFO [train.py:715] (7/8) Epoch 7, batch 3950, loss[loss=0.1364, simple_loss=0.2059, pruned_loss=0.03341, over 4745.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2171, pruned_loss=0.037, over 971263.92 frames.], batch size: 19, lr: 3.00e-04 +2022-05-05 18:36:57,039 INFO [train.py:715] (7/8) Epoch 7, batch 4000, loss[loss=0.1718, simple_loss=0.2343, pruned_loss=0.05461, over 4885.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2174, pruned_loss=0.03739, over 971702.60 frames.], batch size: 16, lr: 3.00e-04 +2022-05-05 18:37:35,135 INFO [train.py:715] (7/8) Epoch 7, batch 4050, loss[loss=0.1269, simple_loss=0.1969, pruned_loss=0.02848, over 4921.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2174, pruned_loss=0.03716, over 972642.51 frames.], batch size: 18, lr: 3.00e-04 +2022-05-05 18:38:14,045 INFO [train.py:715] (7/8) Epoch 7, batch 4100, loss[loss=0.1504, simple_loss=0.2218, pruned_loss=0.03951, over 4841.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2171, pruned_loss=0.03698, over 972196.64 frames.], batch size: 30, lr: 3.00e-04 +2022-05-05 18:38:52,567 INFO [train.py:715] (7/8) Epoch 7, batch 4150, loss[loss=0.1267, simple_loss=0.2018, pruned_loss=0.02582, over 4924.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2178, pruned_loss=0.03751, over 972468.04 frames.], batch size: 19, lr: 3.00e-04 +2022-05-05 18:39:31,259 INFO [train.py:715] (7/8) Epoch 7, batch 4200, loss[loss=0.1059, simple_loss=0.1864, pruned_loss=0.01276, over 4932.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2163, pruned_loss=0.03655, over 972960.79 frames.], batch size: 29, lr: 3.00e-04 +2022-05-05 18:40:09,112 INFO [train.py:715] (7/8) Epoch 7, batch 4250, loss[loss=0.178, simple_loss=0.2475, pruned_loss=0.05422, over 4931.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2176, pruned_loss=0.03727, over 972827.01 frames.], batch size: 18, lr: 3.00e-04 +2022-05-05 18:40:47,953 INFO [train.py:715] (7/8) Epoch 7, batch 4300, loss[loss=0.156, simple_loss=0.2265, pruned_loss=0.04274, over 4946.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2176, pruned_loss=0.03737, over 972808.21 frames.], batch size: 24, lr: 3.00e-04 +2022-05-05 18:41:28,765 INFO [train.py:715] (7/8) Epoch 7, batch 4350, loss[loss=0.1498, simple_loss=0.2255, pruned_loss=0.03711, over 4941.00 frames.], tot_loss[loss=0.1462, simple_loss=0.218, pruned_loss=0.03721, over 973005.53 frames.], batch size: 29, lr: 3.00e-04 +2022-05-05 18:42:07,270 INFO [train.py:715] (7/8) Epoch 7, batch 4400, loss[loss=0.1741, simple_loss=0.2377, pruned_loss=0.05524, over 4909.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2194, pruned_loss=0.03753, over 973285.59 frames.], batch size: 18, lr: 3.00e-04 +2022-05-05 18:42:46,326 INFO [train.py:715] (7/8) Epoch 7, batch 4450, loss[loss=0.1437, simple_loss=0.2246, pruned_loss=0.0314, over 4702.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2198, pruned_loss=0.03815, over 973211.03 frames.], batch size: 15, lr: 3.00e-04 +2022-05-05 18:43:25,202 INFO [train.py:715] (7/8) Epoch 7, batch 4500, loss[loss=0.171, simple_loss=0.2377, pruned_loss=0.05216, over 4780.00 frames.], tot_loss[loss=0.147, simple_loss=0.2187, pruned_loss=0.03763, over 973174.47 frames.], batch size: 17, lr: 3.00e-04 +2022-05-05 18:44:03,954 INFO [train.py:715] (7/8) Epoch 7, batch 4550, loss[loss=0.1575, simple_loss=0.2238, pruned_loss=0.04562, over 4778.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2192, pruned_loss=0.03772, over 973512.72 frames.], batch size: 17, lr: 3.00e-04 +2022-05-05 18:44:42,555 INFO [train.py:715] (7/8) Epoch 7, batch 4600, loss[loss=0.1237, simple_loss=0.2071, pruned_loss=0.0201, over 4864.00 frames.], tot_loss[loss=0.147, simple_loss=0.219, pruned_loss=0.03754, over 973016.74 frames.], batch size: 30, lr: 3.00e-04 +2022-05-05 18:45:21,325 INFO [train.py:715] (7/8) Epoch 7, batch 4650, loss[loss=0.1251, simple_loss=0.1953, pruned_loss=0.02747, over 4972.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2196, pruned_loss=0.03799, over 972269.66 frames.], batch size: 15, lr: 3.00e-04 +2022-05-05 18:45:59,786 INFO [train.py:715] (7/8) Epoch 7, batch 4700, loss[loss=0.1455, simple_loss=0.2187, pruned_loss=0.03617, over 4802.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2192, pruned_loss=0.03772, over 972073.78 frames.], batch size: 14, lr: 3.00e-04 +2022-05-05 18:46:37,972 INFO [train.py:715] (7/8) Epoch 7, batch 4750, loss[loss=0.1381, simple_loss=0.1968, pruned_loss=0.03972, over 4780.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2194, pruned_loss=0.03808, over 971986.38 frames.], batch size: 12, lr: 3.00e-04 +2022-05-05 18:47:17,154 INFO [train.py:715] (7/8) Epoch 7, batch 4800, loss[loss=0.1475, simple_loss=0.218, pruned_loss=0.0385, over 4863.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2184, pruned_loss=0.03754, over 971777.43 frames.], batch size: 20, lr: 3.00e-04 +2022-05-05 18:47:55,561 INFO [train.py:715] (7/8) Epoch 7, batch 4850, loss[loss=0.1228, simple_loss=0.2011, pruned_loss=0.02225, over 4950.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.03684, over 971886.57 frames.], batch size: 29, lr: 3.00e-04 +2022-05-05 18:48:34,302 INFO [train.py:715] (7/8) Epoch 7, batch 4900, loss[loss=0.1628, simple_loss=0.2206, pruned_loss=0.0525, over 4783.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2179, pruned_loss=0.03724, over 971552.64 frames.], batch size: 17, lr: 3.00e-04 +2022-05-05 18:49:12,734 INFO [train.py:715] (7/8) Epoch 7, batch 4950, loss[loss=0.1329, simple_loss=0.2159, pruned_loss=0.02491, over 4875.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2174, pruned_loss=0.0369, over 972513.29 frames.], batch size: 22, lr: 3.00e-04 +2022-05-05 18:49:51,779 INFO [train.py:715] (7/8) Epoch 7, batch 5000, loss[loss=0.1495, simple_loss=0.2159, pruned_loss=0.04152, over 4841.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2185, pruned_loss=0.03744, over 972931.48 frames.], batch size: 15, lr: 3.00e-04 +2022-05-05 18:50:30,775 INFO [train.py:715] (7/8) Epoch 7, batch 5050, loss[loss=0.1253, simple_loss=0.192, pruned_loss=0.02925, over 4822.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2172, pruned_loss=0.03728, over 972773.74 frames.], batch size: 27, lr: 3.00e-04 +2022-05-05 18:51:09,370 INFO [train.py:715] (7/8) Epoch 7, batch 5100, loss[loss=0.151, simple_loss=0.2195, pruned_loss=0.04122, over 4965.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2179, pruned_loss=0.0375, over 972070.99 frames.], batch size: 24, lr: 3.00e-04 +2022-05-05 18:51:48,429 INFO [train.py:715] (7/8) Epoch 7, batch 5150, loss[loss=0.1522, simple_loss=0.222, pruned_loss=0.04123, over 4911.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2184, pruned_loss=0.03788, over 971798.46 frames.], batch size: 18, lr: 3.00e-04 +2022-05-05 18:52:27,134 INFO [train.py:715] (7/8) Epoch 7, batch 5200, loss[loss=0.1412, simple_loss=0.2277, pruned_loss=0.02731, over 4744.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2182, pruned_loss=0.0378, over 970993.18 frames.], batch size: 16, lr: 2.99e-04 +2022-05-05 18:53:06,163 INFO [train.py:715] (7/8) Epoch 7, batch 5250, loss[loss=0.1392, simple_loss=0.2115, pruned_loss=0.03344, over 4855.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2176, pruned_loss=0.03734, over 971210.83 frames.], batch size: 20, lr: 2.99e-04 +2022-05-05 18:53:44,791 INFO [train.py:715] (7/8) Epoch 7, batch 5300, loss[loss=0.1399, simple_loss=0.2253, pruned_loss=0.02723, over 4784.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2184, pruned_loss=0.03724, over 971744.66 frames.], batch size: 18, lr: 2.99e-04 +2022-05-05 18:54:24,157 INFO [train.py:715] (7/8) Epoch 7, batch 5350, loss[loss=0.1489, simple_loss=0.2277, pruned_loss=0.03509, over 4891.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2186, pruned_loss=0.03758, over 971091.50 frames.], batch size: 22, lr: 2.99e-04 +2022-05-05 18:55:02,366 INFO [train.py:715] (7/8) Epoch 7, batch 5400, loss[loss=0.2071, simple_loss=0.2841, pruned_loss=0.06504, over 4812.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2182, pruned_loss=0.03722, over 972033.41 frames.], batch size: 13, lr: 2.99e-04 +2022-05-05 18:55:41,210 INFO [train.py:715] (7/8) Epoch 7, batch 5450, loss[loss=0.1504, simple_loss=0.2276, pruned_loss=0.0366, over 4828.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2196, pruned_loss=0.03766, over 973445.53 frames.], batch size: 26, lr: 2.99e-04 +2022-05-05 18:56:20,340 INFO [train.py:715] (7/8) Epoch 7, batch 5500, loss[loss=0.1488, simple_loss=0.2137, pruned_loss=0.04199, over 4684.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2186, pruned_loss=0.03749, over 973363.14 frames.], batch size: 15, lr: 2.99e-04 +2022-05-05 18:56:59,122 INFO [train.py:715] (7/8) Epoch 7, batch 5550, loss[loss=0.1701, simple_loss=0.2372, pruned_loss=0.05145, over 4869.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2183, pruned_loss=0.03746, over 973241.52 frames.], batch size: 16, lr: 2.99e-04 +2022-05-05 18:57:38,238 INFO [train.py:715] (7/8) Epoch 7, batch 5600, loss[loss=0.1547, simple_loss=0.2258, pruned_loss=0.04176, over 4990.00 frames.], tot_loss[loss=0.147, simple_loss=0.2185, pruned_loss=0.03775, over 972774.35 frames.], batch size: 14, lr: 2.99e-04 +2022-05-05 18:58:17,279 INFO [train.py:715] (7/8) Epoch 7, batch 5650, loss[loss=0.1279, simple_loss=0.2045, pruned_loss=0.0257, over 4886.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2182, pruned_loss=0.03739, over 972980.70 frames.], batch size: 16, lr: 2.99e-04 +2022-05-05 18:58:56,368 INFO [train.py:715] (7/8) Epoch 7, batch 5700, loss[loss=0.2194, simple_loss=0.2974, pruned_loss=0.07075, over 4884.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2188, pruned_loss=0.03796, over 972023.96 frames.], batch size: 19, lr: 2.99e-04 +2022-05-05 18:59:34,747 INFO [train.py:715] (7/8) Epoch 7, batch 5750, loss[loss=0.1682, simple_loss=0.2268, pruned_loss=0.05483, over 4911.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2189, pruned_loss=0.03821, over 972343.96 frames.], batch size: 18, lr: 2.99e-04 +2022-05-05 19:00:12,900 INFO [train.py:715] (7/8) Epoch 7, batch 5800, loss[loss=0.1534, simple_loss=0.2271, pruned_loss=0.03982, over 4879.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2176, pruned_loss=0.03743, over 972230.72 frames.], batch size: 16, lr: 2.99e-04 +2022-05-05 19:00:52,628 INFO [train.py:715] (7/8) Epoch 7, batch 5850, loss[loss=0.1357, simple_loss=0.2039, pruned_loss=0.0338, over 4856.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2187, pruned_loss=0.03794, over 972006.09 frames.], batch size: 12, lr: 2.99e-04 +2022-05-05 19:01:30,924 INFO [train.py:715] (7/8) Epoch 7, batch 5900, loss[loss=0.173, simple_loss=0.2439, pruned_loss=0.05107, over 4925.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2182, pruned_loss=0.03766, over 971813.80 frames.], batch size: 39, lr: 2.99e-04 +2022-05-05 19:02:09,959 INFO [train.py:715] (7/8) Epoch 7, batch 5950, loss[loss=0.1533, simple_loss=0.225, pruned_loss=0.04079, over 4845.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2178, pruned_loss=0.03779, over 971072.48 frames.], batch size: 30, lr: 2.99e-04 +2022-05-05 19:02:48,384 INFO [train.py:715] (7/8) Epoch 7, batch 6000, loss[loss=0.1486, simple_loss=0.2166, pruned_loss=0.04029, over 4769.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2172, pruned_loss=0.03717, over 970464.10 frames.], batch size: 14, lr: 2.99e-04 +2022-05-05 19:02:48,385 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 19:02:58,047 INFO [train.py:742] (7/8) Epoch 7, validation: loss=0.1085, simple_loss=0.1933, pruned_loss=0.0119, over 914524.00 frames. +2022-05-05 19:03:36,916 INFO [train.py:715] (7/8) Epoch 7, batch 6050, loss[loss=0.1459, simple_loss=0.2203, pruned_loss=0.03581, over 4784.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2178, pruned_loss=0.03718, over 970977.63 frames.], batch size: 14, lr: 2.99e-04 +2022-05-05 19:04:16,080 INFO [train.py:715] (7/8) Epoch 7, batch 6100, loss[loss=0.1409, simple_loss=0.2098, pruned_loss=0.03596, over 4936.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2181, pruned_loss=0.03736, over 971219.63 frames.], batch size: 21, lr: 2.99e-04 +2022-05-05 19:04:55,377 INFO [train.py:715] (7/8) Epoch 7, batch 6150, loss[loss=0.1505, simple_loss=0.2186, pruned_loss=0.0412, over 4772.00 frames.], tot_loss[loss=0.147, simple_loss=0.2188, pruned_loss=0.03763, over 971330.69 frames.], batch size: 19, lr: 2.99e-04 +2022-05-05 19:05:33,828 INFO [train.py:715] (7/8) Epoch 7, batch 6200, loss[loss=0.1687, simple_loss=0.2241, pruned_loss=0.05666, over 4795.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2177, pruned_loss=0.03702, over 971478.50 frames.], batch size: 17, lr: 2.99e-04 +2022-05-05 19:06:13,679 INFO [train.py:715] (7/8) Epoch 7, batch 6250, loss[loss=0.1443, simple_loss=0.2217, pruned_loss=0.03347, over 4909.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2168, pruned_loss=0.03709, over 971949.82 frames.], batch size: 18, lr: 2.99e-04 +2022-05-05 19:06:52,574 INFO [train.py:715] (7/8) Epoch 7, batch 6300, loss[loss=0.1448, simple_loss=0.2073, pruned_loss=0.04121, over 4923.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2168, pruned_loss=0.03695, over 972586.85 frames.], batch size: 18, lr: 2.99e-04 +2022-05-05 19:07:30,974 INFO [train.py:715] (7/8) Epoch 7, batch 6350, loss[loss=0.1506, simple_loss=0.2161, pruned_loss=0.04257, over 4846.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2171, pruned_loss=0.03709, over 972228.21 frames.], batch size: 30, lr: 2.99e-04 +2022-05-05 19:08:10,031 INFO [train.py:715] (7/8) Epoch 7, batch 6400, loss[loss=0.1417, simple_loss=0.2201, pruned_loss=0.03161, over 4905.00 frames.], tot_loss[loss=0.1456, simple_loss=0.217, pruned_loss=0.03713, over 972279.30 frames.], batch size: 19, lr: 2.99e-04 +2022-05-05 19:08:49,045 INFO [train.py:715] (7/8) Epoch 7, batch 6450, loss[loss=0.1576, simple_loss=0.2269, pruned_loss=0.04413, over 4784.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2175, pruned_loss=0.03711, over 971893.88 frames.], batch size: 14, lr: 2.99e-04 +2022-05-05 19:09:27,584 INFO [train.py:715] (7/8) Epoch 7, batch 6500, loss[loss=0.168, simple_loss=0.2333, pruned_loss=0.05136, over 4718.00 frames.], tot_loss[loss=0.1464, simple_loss=0.218, pruned_loss=0.03741, over 971170.85 frames.], batch size: 15, lr: 2.99e-04 +2022-05-05 19:10:06,573 INFO [train.py:715] (7/8) Epoch 7, batch 6550, loss[loss=0.1523, simple_loss=0.2315, pruned_loss=0.03661, over 4801.00 frames.], tot_loss[loss=0.1462, simple_loss=0.218, pruned_loss=0.03714, over 970432.64 frames.], batch size: 24, lr: 2.99e-04 +2022-05-05 19:10:46,392 INFO [train.py:715] (7/8) Epoch 7, batch 6600, loss[loss=0.1411, simple_loss=0.2117, pruned_loss=0.03527, over 4738.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2195, pruned_loss=0.03809, over 970963.17 frames.], batch size: 16, lr: 2.99e-04 +2022-05-05 19:11:25,243 INFO [train.py:715] (7/8) Epoch 7, batch 6650, loss[loss=0.1394, simple_loss=0.2203, pruned_loss=0.02923, over 4962.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2196, pruned_loss=0.03783, over 970193.23 frames.], batch size: 21, lr: 2.99e-04 +2022-05-05 19:12:04,476 INFO [train.py:715] (7/8) Epoch 7, batch 6700, loss[loss=0.1275, simple_loss=0.1943, pruned_loss=0.03033, over 4806.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2196, pruned_loss=0.03773, over 970282.67 frames.], batch size: 12, lr: 2.99e-04 +2022-05-05 19:12:43,222 INFO [train.py:715] (7/8) Epoch 7, batch 6750, loss[loss=0.121, simple_loss=0.1852, pruned_loss=0.02841, over 4719.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2187, pruned_loss=0.03747, over 969685.34 frames.], batch size: 12, lr: 2.99e-04 +2022-05-05 19:13:22,216 INFO [train.py:715] (7/8) Epoch 7, batch 6800, loss[loss=0.1577, simple_loss=0.2277, pruned_loss=0.04384, over 4830.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03727, over 971354.68 frames.], batch size: 30, lr: 2.99e-04 +2022-05-05 19:14:00,585 INFO [train.py:715] (7/8) Epoch 7, batch 6850, loss[loss=0.1486, simple_loss=0.2225, pruned_loss=0.03733, over 4980.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2176, pruned_loss=0.03673, over 971798.67 frames.], batch size: 25, lr: 2.99e-04 +2022-05-05 19:14:39,177 INFO [train.py:715] (7/8) Epoch 7, batch 6900, loss[loss=0.175, simple_loss=0.2521, pruned_loss=0.04892, over 4808.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2174, pruned_loss=0.03662, over 972397.91 frames.], batch size: 21, lr: 2.98e-04 +2022-05-05 19:15:18,698 INFO [train.py:715] (7/8) Epoch 7, batch 6950, loss[loss=0.1367, simple_loss=0.2048, pruned_loss=0.03431, over 4857.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2174, pruned_loss=0.03647, over 972595.79 frames.], batch size: 15, lr: 2.98e-04 +2022-05-05 19:15:56,858 INFO [train.py:715] (7/8) Epoch 7, batch 7000, loss[loss=0.131, simple_loss=0.2042, pruned_loss=0.02886, over 4697.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2172, pruned_loss=0.03656, over 972732.55 frames.], batch size: 15, lr: 2.98e-04 +2022-05-05 19:16:35,557 INFO [train.py:715] (7/8) Epoch 7, batch 7050, loss[loss=0.1395, simple_loss=0.2162, pruned_loss=0.03138, over 4958.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2173, pruned_loss=0.03686, over 971870.29 frames.], batch size: 29, lr: 2.98e-04 +2022-05-05 19:17:14,120 INFO [train.py:715] (7/8) Epoch 7, batch 7100, loss[loss=0.1621, simple_loss=0.2326, pruned_loss=0.04583, over 4778.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2169, pruned_loss=0.03692, over 972374.22 frames.], batch size: 18, lr: 2.98e-04 +2022-05-05 19:17:52,400 INFO [train.py:715] (7/8) Epoch 7, batch 7150, loss[loss=0.1336, simple_loss=0.2098, pruned_loss=0.02872, over 4811.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2177, pruned_loss=0.03738, over 972125.08 frames.], batch size: 25, lr: 2.98e-04 +2022-05-05 19:18:31,021 INFO [train.py:715] (7/8) Epoch 7, batch 7200, loss[loss=0.1411, simple_loss=0.2052, pruned_loss=0.03848, over 4850.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2177, pruned_loss=0.03743, over 971454.57 frames.], batch size: 13, lr: 2.98e-04 +2022-05-05 19:19:10,024 INFO [train.py:715] (7/8) Epoch 7, batch 7250, loss[loss=0.1466, simple_loss=0.2173, pruned_loss=0.03794, over 4792.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2178, pruned_loss=0.03754, over 971618.28 frames.], batch size: 24, lr: 2.98e-04 +2022-05-05 19:19:49,673 INFO [train.py:715] (7/8) Epoch 7, batch 7300, loss[loss=0.1398, simple_loss=0.2057, pruned_loss=0.03696, over 4922.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2182, pruned_loss=0.03733, over 972651.26 frames.], batch size: 23, lr: 2.98e-04 +2022-05-05 19:20:28,209 INFO [train.py:715] (7/8) Epoch 7, batch 7350, loss[loss=0.1273, simple_loss=0.1983, pruned_loss=0.02814, over 4809.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2179, pruned_loss=0.03755, over 972076.91 frames.], batch size: 12, lr: 2.98e-04 +2022-05-05 19:21:06,662 INFO [train.py:715] (7/8) Epoch 7, batch 7400, loss[loss=0.1446, simple_loss=0.2137, pruned_loss=0.03774, over 4774.00 frames.], tot_loss[loss=0.1465, simple_loss=0.218, pruned_loss=0.03754, over 971997.13 frames.], batch size: 17, lr: 2.98e-04 +2022-05-05 19:21:45,792 INFO [train.py:715] (7/8) Epoch 7, batch 7450, loss[loss=0.1437, simple_loss=0.2183, pruned_loss=0.03459, over 4888.00 frames.], tot_loss[loss=0.1466, simple_loss=0.218, pruned_loss=0.03759, over 971701.54 frames.], batch size: 16, lr: 2.98e-04 +2022-05-05 19:22:23,999 INFO [train.py:715] (7/8) Epoch 7, batch 7500, loss[loss=0.1297, simple_loss=0.2039, pruned_loss=0.02775, over 4853.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2174, pruned_loss=0.03718, over 972477.18 frames.], batch size: 20, lr: 2.98e-04 +2022-05-05 19:23:02,796 INFO [train.py:715] (7/8) Epoch 7, batch 7550, loss[loss=0.1827, simple_loss=0.279, pruned_loss=0.04316, over 4813.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2177, pruned_loss=0.03737, over 972411.65 frames.], batch size: 27, lr: 2.98e-04 +2022-05-05 19:23:41,663 INFO [train.py:715] (7/8) Epoch 7, batch 7600, loss[loss=0.1527, simple_loss=0.2292, pruned_loss=0.03809, over 4946.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2171, pruned_loss=0.03702, over 972534.99 frames.], batch size: 39, lr: 2.98e-04 +2022-05-05 19:24:20,770 INFO [train.py:715] (7/8) Epoch 7, batch 7650, loss[loss=0.1636, simple_loss=0.2347, pruned_loss=0.04629, over 4965.00 frames.], tot_loss[loss=0.147, simple_loss=0.2183, pruned_loss=0.03784, over 973631.34 frames.], batch size: 24, lr: 2.98e-04 +2022-05-05 19:24:59,080 INFO [train.py:715] (7/8) Epoch 7, batch 7700, loss[loss=0.1294, simple_loss=0.2004, pruned_loss=0.02922, over 4769.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2176, pruned_loss=0.03734, over 973212.82 frames.], batch size: 19, lr: 2.98e-04 +2022-05-05 19:25:38,045 INFO [train.py:715] (7/8) Epoch 7, batch 7750, loss[loss=0.1512, simple_loss=0.2119, pruned_loss=0.04526, over 4954.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2185, pruned_loss=0.03763, over 973557.11 frames.], batch size: 21, lr: 2.98e-04 +2022-05-05 19:26:17,071 INFO [train.py:715] (7/8) Epoch 7, batch 7800, loss[loss=0.1323, simple_loss=0.1976, pruned_loss=0.03348, over 4922.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2181, pruned_loss=0.03705, over 973762.25 frames.], batch size: 18, lr: 2.98e-04 +2022-05-05 19:26:55,229 INFO [train.py:715] (7/8) Epoch 7, batch 7850, loss[loss=0.143, simple_loss=0.2229, pruned_loss=0.0315, over 4885.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2176, pruned_loss=0.03702, over 972856.29 frames.], batch size: 22, lr: 2.98e-04 +2022-05-05 19:27:34,426 INFO [train.py:715] (7/8) Epoch 7, batch 7900, loss[loss=0.1469, simple_loss=0.2223, pruned_loss=0.03579, over 4975.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2183, pruned_loss=0.03741, over 972640.54 frames.], batch size: 39, lr: 2.98e-04 +2022-05-05 19:28:13,173 INFO [train.py:715] (7/8) Epoch 7, batch 7950, loss[loss=0.1672, simple_loss=0.2413, pruned_loss=0.04656, over 4749.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2186, pruned_loss=0.03749, over 972241.67 frames.], batch size: 19, lr: 2.98e-04 +2022-05-05 19:28:52,648 INFO [train.py:715] (7/8) Epoch 7, batch 8000, loss[loss=0.1255, simple_loss=0.206, pruned_loss=0.02253, over 4859.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2181, pruned_loss=0.03709, over 972508.22 frames.], batch size: 30, lr: 2.98e-04 +2022-05-05 19:29:30,737 INFO [train.py:715] (7/8) Epoch 7, batch 8050, loss[loss=0.1269, simple_loss=0.1973, pruned_loss=0.02829, over 4981.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.0366, over 972249.12 frames.], batch size: 14, lr: 2.98e-04 +2022-05-05 19:30:09,296 INFO [train.py:715] (7/8) Epoch 7, batch 8100, loss[loss=0.1212, simple_loss=0.1958, pruned_loss=0.02326, over 4856.00 frames.], tot_loss[loss=0.146, simple_loss=0.2181, pruned_loss=0.03699, over 971738.68 frames.], batch size: 20, lr: 2.98e-04 +2022-05-05 19:30:48,380 INFO [train.py:715] (7/8) Epoch 7, batch 8150, loss[loss=0.1333, simple_loss=0.201, pruned_loss=0.03281, over 4805.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2177, pruned_loss=0.03686, over 971422.70 frames.], batch size: 24, lr: 2.98e-04 +2022-05-05 19:31:26,682 INFO [train.py:715] (7/8) Epoch 7, batch 8200, loss[loss=0.1578, simple_loss=0.2341, pruned_loss=0.04075, over 4902.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2181, pruned_loss=0.03724, over 972568.05 frames.], batch size: 19, lr: 2.98e-04 +2022-05-05 19:32:05,128 INFO [train.py:715] (7/8) Epoch 7, batch 8250, loss[loss=0.1467, simple_loss=0.2145, pruned_loss=0.03945, over 4785.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2175, pruned_loss=0.03709, over 971956.80 frames.], batch size: 17, lr: 2.98e-04 +2022-05-05 19:32:43,781 INFO [train.py:715] (7/8) Epoch 7, batch 8300, loss[loss=0.1418, simple_loss=0.2153, pruned_loss=0.03416, over 4833.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2176, pruned_loss=0.03734, over 972666.96 frames.], batch size: 15, lr: 2.98e-04 +2022-05-05 19:33:22,690 INFO [train.py:715] (7/8) Epoch 7, batch 8350, loss[loss=0.1398, simple_loss=0.2112, pruned_loss=0.0342, over 4695.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2172, pruned_loss=0.03709, over 972233.47 frames.], batch size: 15, lr: 2.98e-04 +2022-05-05 19:34:00,643 INFO [train.py:715] (7/8) Epoch 7, batch 8400, loss[loss=0.1604, simple_loss=0.2353, pruned_loss=0.04272, over 4944.00 frames.], tot_loss[loss=0.1467, simple_loss=0.218, pruned_loss=0.03775, over 972406.27 frames.], batch size: 21, lr: 2.98e-04 +2022-05-05 19:34:39,717 INFO [train.py:715] (7/8) Epoch 7, batch 8450, loss[loss=0.1572, simple_loss=0.2199, pruned_loss=0.04729, over 4821.00 frames.], tot_loss[loss=0.1468, simple_loss=0.218, pruned_loss=0.0378, over 972471.26 frames.], batch size: 13, lr: 2.98e-04 +2022-05-05 19:35:18,875 INFO [train.py:715] (7/8) Epoch 7, batch 8500, loss[loss=0.1709, simple_loss=0.2308, pruned_loss=0.05551, over 4933.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2182, pruned_loss=0.03826, over 972734.50 frames.], batch size: 35, lr: 2.98e-04 +2022-05-05 19:35:58,054 INFO [train.py:715] (7/8) Epoch 7, batch 8550, loss[loss=0.1647, simple_loss=0.2322, pruned_loss=0.04857, over 4885.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2183, pruned_loss=0.03829, over 973212.42 frames.], batch size: 22, lr: 2.97e-04 +2022-05-05 19:36:36,295 INFO [train.py:715] (7/8) Epoch 7, batch 8600, loss[loss=0.1557, simple_loss=0.2326, pruned_loss=0.03944, over 4912.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2192, pruned_loss=0.03822, over 972836.84 frames.], batch size: 17, lr: 2.97e-04 +2022-05-05 19:37:14,959 INFO [train.py:715] (7/8) Epoch 7, batch 8650, loss[loss=0.157, simple_loss=0.2329, pruned_loss=0.04054, over 4926.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2187, pruned_loss=0.0376, over 972092.29 frames.], batch size: 18, lr: 2.97e-04 +2022-05-05 19:37:54,307 INFO [train.py:715] (7/8) Epoch 7, batch 8700, loss[loss=0.1423, simple_loss=0.2173, pruned_loss=0.03363, over 4850.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2178, pruned_loss=0.03688, over 972044.66 frames.], batch size: 20, lr: 2.97e-04 +2022-05-05 19:38:32,518 INFO [train.py:715] (7/8) Epoch 7, batch 8750, loss[loss=0.1128, simple_loss=0.1909, pruned_loss=0.01738, over 4826.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2174, pruned_loss=0.03694, over 972557.00 frames.], batch size: 12, lr: 2.97e-04 +2022-05-05 19:39:11,385 INFO [train.py:715] (7/8) Epoch 7, batch 8800, loss[loss=0.1311, simple_loss=0.199, pruned_loss=0.0316, over 4931.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2177, pruned_loss=0.03709, over 973211.12 frames.], batch size: 23, lr: 2.97e-04 +2022-05-05 19:39:50,317 INFO [train.py:715] (7/8) Epoch 7, batch 8850, loss[loss=0.1612, simple_loss=0.2312, pruned_loss=0.04553, over 4917.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2186, pruned_loss=0.03738, over 971992.82 frames.], batch size: 18, lr: 2.97e-04 +2022-05-05 19:40:30,010 INFO [train.py:715] (7/8) Epoch 7, batch 8900, loss[loss=0.1385, simple_loss=0.2119, pruned_loss=0.03254, over 4971.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2184, pruned_loss=0.03757, over 972498.48 frames.], batch size: 15, lr: 2.97e-04 +2022-05-05 19:41:08,237 INFO [train.py:715] (7/8) Epoch 7, batch 8950, loss[loss=0.161, simple_loss=0.236, pruned_loss=0.04298, over 4897.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03725, over 971346.72 frames.], batch size: 22, lr: 2.97e-04 +2022-05-05 19:41:46,835 INFO [train.py:715] (7/8) Epoch 7, batch 9000, loss[loss=0.1493, simple_loss=0.2362, pruned_loss=0.03118, over 4964.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2181, pruned_loss=0.03715, over 971736.05 frames.], batch size: 24, lr: 2.97e-04 +2022-05-05 19:41:46,836 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 19:41:56,559 INFO [train.py:742] (7/8) Epoch 7, validation: loss=0.1085, simple_loss=0.1932, pruned_loss=0.01192, over 914524.00 frames. +2022-05-05 19:42:35,336 INFO [train.py:715] (7/8) Epoch 7, batch 9050, loss[loss=0.1835, simple_loss=0.2386, pruned_loss=0.06421, over 4946.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2183, pruned_loss=0.03737, over 972439.19 frames.], batch size: 15, lr: 2.97e-04 +2022-05-05 19:43:15,394 INFO [train.py:715] (7/8) Epoch 7, batch 9100, loss[loss=0.1641, simple_loss=0.231, pruned_loss=0.04865, over 4846.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2177, pruned_loss=0.03743, over 972598.54 frames.], batch size: 13, lr: 2.97e-04 +2022-05-05 19:43:54,071 INFO [train.py:715] (7/8) Epoch 7, batch 9150, loss[loss=0.1554, simple_loss=0.2307, pruned_loss=0.04003, over 4770.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2178, pruned_loss=0.03759, over 972346.03 frames.], batch size: 14, lr: 2.97e-04 +2022-05-05 19:44:32,870 INFO [train.py:715] (7/8) Epoch 7, batch 9200, loss[loss=0.1526, simple_loss=0.2078, pruned_loss=0.04865, over 4922.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2175, pruned_loss=0.03716, over 972119.37 frames.], batch size: 18, lr: 2.97e-04 +2022-05-05 19:45:12,206 INFO [train.py:715] (7/8) Epoch 7, batch 9250, loss[loss=0.148, simple_loss=0.2332, pruned_loss=0.03139, over 4823.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2178, pruned_loss=0.03721, over 972485.00 frames.], batch size: 13, lr: 2.97e-04 +2022-05-05 19:45:51,291 INFO [train.py:715] (7/8) Epoch 7, batch 9300, loss[loss=0.1619, simple_loss=0.2337, pruned_loss=0.0451, over 4833.00 frames.], tot_loss[loss=0.1472, simple_loss=0.219, pruned_loss=0.03774, over 973187.29 frames.], batch size: 15, lr: 2.97e-04 +2022-05-05 19:46:30,348 INFO [train.py:715] (7/8) Epoch 7, batch 9350, loss[loss=0.1241, simple_loss=0.2003, pruned_loss=0.02398, over 4848.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2186, pruned_loss=0.03757, over 972836.46 frames.], batch size: 20, lr: 2.97e-04 +2022-05-05 19:47:08,480 INFO [train.py:715] (7/8) Epoch 7, batch 9400, loss[loss=0.155, simple_loss=0.2201, pruned_loss=0.04492, over 4977.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2188, pruned_loss=0.03781, over 972428.01 frames.], batch size: 35, lr: 2.97e-04 +2022-05-05 19:47:48,271 INFO [train.py:715] (7/8) Epoch 7, batch 9450, loss[loss=0.1237, simple_loss=0.1961, pruned_loss=0.02559, over 4916.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2189, pruned_loss=0.03747, over 972724.07 frames.], batch size: 21, lr: 2.97e-04 +2022-05-05 19:48:27,280 INFO [train.py:715] (7/8) Epoch 7, batch 9500, loss[loss=0.1391, simple_loss=0.2169, pruned_loss=0.03059, over 4869.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2186, pruned_loss=0.0369, over 972815.09 frames.], batch size: 16, lr: 2.97e-04 +2022-05-05 19:49:05,878 INFO [train.py:715] (7/8) Epoch 7, batch 9550, loss[loss=0.1434, simple_loss=0.2086, pruned_loss=0.03912, over 4926.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2186, pruned_loss=0.03703, over 973404.97 frames.], batch size: 23, lr: 2.97e-04 +2022-05-05 19:49:44,846 INFO [train.py:715] (7/8) Epoch 7, batch 9600, loss[loss=0.1475, simple_loss=0.2219, pruned_loss=0.03649, over 4954.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2185, pruned_loss=0.03695, over 972559.00 frames.], batch size: 24, lr: 2.97e-04 +2022-05-05 19:50:23,439 INFO [train.py:715] (7/8) Epoch 7, batch 9650, loss[loss=0.1363, simple_loss=0.2086, pruned_loss=0.03196, over 4969.00 frames.], tot_loss[loss=0.1449, simple_loss=0.217, pruned_loss=0.03645, over 972506.50 frames.], batch size: 35, lr: 2.97e-04 +2022-05-05 19:51:02,958 INFO [train.py:715] (7/8) Epoch 7, batch 9700, loss[loss=0.1442, simple_loss=0.2163, pruned_loss=0.03608, over 4799.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2167, pruned_loss=0.03654, over 972130.03 frames.], batch size: 14, lr: 2.97e-04 +2022-05-05 19:51:41,571 INFO [train.py:715] (7/8) Epoch 7, batch 9750, loss[loss=0.1438, simple_loss=0.221, pruned_loss=0.03333, over 4901.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2167, pruned_loss=0.03707, over 971249.35 frames.], batch size: 19, lr: 2.97e-04 +2022-05-05 19:52:20,960 INFO [train.py:715] (7/8) Epoch 7, batch 9800, loss[loss=0.1503, simple_loss=0.2216, pruned_loss=0.03946, over 4918.00 frames.], tot_loss[loss=0.145, simple_loss=0.2163, pruned_loss=0.03683, over 971128.01 frames.], batch size: 18, lr: 2.97e-04 +2022-05-05 19:52:59,042 INFO [train.py:715] (7/8) Epoch 7, batch 9850, loss[loss=0.1492, simple_loss=0.2268, pruned_loss=0.03583, over 4877.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2162, pruned_loss=0.03663, over 971314.89 frames.], batch size: 16, lr: 2.97e-04 +2022-05-05 19:53:37,273 INFO [train.py:715] (7/8) Epoch 7, batch 9900, loss[loss=0.1303, simple_loss=0.2054, pruned_loss=0.02758, over 4945.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2168, pruned_loss=0.03679, over 972477.46 frames.], batch size: 39, lr: 2.97e-04 +2022-05-05 19:54:16,173 INFO [train.py:715] (7/8) Epoch 7, batch 9950, loss[loss=0.1447, simple_loss=0.2129, pruned_loss=0.03829, over 4987.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2172, pruned_loss=0.03666, over 973212.05 frames.], batch size: 35, lr: 2.97e-04 +2022-05-05 19:54:55,286 INFO [train.py:715] (7/8) Epoch 7, batch 10000, loss[loss=0.1433, simple_loss=0.2142, pruned_loss=0.03619, over 4830.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2173, pruned_loss=0.03643, over 973124.07 frames.], batch size: 13, lr: 2.97e-04 +2022-05-05 19:55:33,943 INFO [train.py:715] (7/8) Epoch 7, batch 10050, loss[loss=0.1567, simple_loss=0.235, pruned_loss=0.03921, over 4769.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2176, pruned_loss=0.03647, over 973005.14 frames.], batch size: 19, lr: 2.97e-04 +2022-05-05 19:56:12,504 INFO [train.py:715] (7/8) Epoch 7, batch 10100, loss[loss=0.112, simple_loss=0.2007, pruned_loss=0.01167, over 4770.00 frames.], tot_loss[loss=0.144, simple_loss=0.2164, pruned_loss=0.03583, over 972252.87 frames.], batch size: 12, lr: 2.97e-04 +2022-05-05 19:56:51,797 INFO [train.py:715] (7/8) Epoch 7, batch 10150, loss[loss=0.1536, simple_loss=0.2202, pruned_loss=0.04348, over 4853.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2172, pruned_loss=0.03677, over 971921.31 frames.], batch size: 34, lr: 2.97e-04 +2022-05-05 19:57:30,413 INFO [train.py:715] (7/8) Epoch 7, batch 10200, loss[loss=0.1528, simple_loss=0.2251, pruned_loss=0.04023, over 4807.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2168, pruned_loss=0.037, over 971579.87 frames.], batch size: 13, lr: 2.97e-04 +2022-05-05 19:58:09,059 INFO [train.py:715] (7/8) Epoch 7, batch 10250, loss[loss=0.1395, simple_loss=0.2086, pruned_loss=0.03521, over 4857.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2171, pruned_loss=0.03691, over 970923.86 frames.], batch size: 20, lr: 2.96e-04 +2022-05-05 19:58:48,253 INFO [train.py:715] (7/8) Epoch 7, batch 10300, loss[loss=0.1254, simple_loss=0.203, pruned_loss=0.02389, over 4961.00 frames.], tot_loss[loss=0.1454, simple_loss=0.217, pruned_loss=0.03687, over 971240.91 frames.], batch size: 24, lr: 2.96e-04 +2022-05-05 19:59:26,899 INFO [train.py:715] (7/8) Epoch 7, batch 10350, loss[loss=0.1259, simple_loss=0.207, pruned_loss=0.02236, over 4756.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2178, pruned_loss=0.03746, over 970519.60 frames.], batch size: 19, lr: 2.96e-04 +2022-05-05 20:00:05,912 INFO [train.py:715] (7/8) Epoch 7, batch 10400, loss[loss=0.1068, simple_loss=0.1815, pruned_loss=0.01601, over 4844.00 frames.], tot_loss[loss=0.147, simple_loss=0.2182, pruned_loss=0.0379, over 970368.32 frames.], batch size: 12, lr: 2.96e-04 +2022-05-05 20:00:44,693 INFO [train.py:715] (7/8) Epoch 7, batch 10450, loss[loss=0.1261, simple_loss=0.2053, pruned_loss=0.02346, over 4925.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2179, pruned_loss=0.03774, over 970852.75 frames.], batch size: 18, lr: 2.96e-04 +2022-05-05 20:01:24,297 INFO [train.py:715] (7/8) Epoch 7, batch 10500, loss[loss=0.1335, simple_loss=0.2037, pruned_loss=0.03162, over 4898.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2178, pruned_loss=0.03759, over 971450.51 frames.], batch size: 19, lr: 2.96e-04 +2022-05-05 20:02:03,051 INFO [train.py:715] (7/8) Epoch 7, batch 10550, loss[loss=0.1701, simple_loss=0.2281, pruned_loss=0.05601, over 4964.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2175, pruned_loss=0.03713, over 972080.04 frames.], batch size: 14, lr: 2.96e-04 +2022-05-05 20:02:41,165 INFO [train.py:715] (7/8) Epoch 7, batch 10600, loss[loss=0.1492, simple_loss=0.2255, pruned_loss=0.03643, over 4804.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2186, pruned_loss=0.03741, over 972727.40 frames.], batch size: 13, lr: 2.96e-04 +2022-05-05 20:03:20,355 INFO [train.py:715] (7/8) Epoch 7, batch 10650, loss[loss=0.144, simple_loss=0.2105, pruned_loss=0.03876, over 4855.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2182, pruned_loss=0.0376, over 972224.74 frames.], batch size: 30, lr: 2.96e-04 +2022-05-05 20:03:59,395 INFO [train.py:715] (7/8) Epoch 7, batch 10700, loss[loss=0.1525, simple_loss=0.2205, pruned_loss=0.04226, over 4857.00 frames.], tot_loss[loss=0.1472, simple_loss=0.219, pruned_loss=0.03775, over 971786.99 frames.], batch size: 20, lr: 2.96e-04 +2022-05-05 20:04:38,880 INFO [train.py:715] (7/8) Epoch 7, batch 10750, loss[loss=0.1373, simple_loss=0.2202, pruned_loss=0.02724, over 4953.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2189, pruned_loss=0.03801, over 971839.10 frames.], batch size: 24, lr: 2.96e-04 +2022-05-05 20:05:17,664 INFO [train.py:715] (7/8) Epoch 7, batch 10800, loss[loss=0.152, simple_loss=0.2269, pruned_loss=0.03856, over 4937.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2178, pruned_loss=0.03776, over 972760.37 frames.], batch size: 23, lr: 2.96e-04 +2022-05-05 20:05:57,421 INFO [train.py:715] (7/8) Epoch 7, batch 10850, loss[loss=0.1352, simple_loss=0.2031, pruned_loss=0.03366, over 4713.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2168, pruned_loss=0.03714, over 972279.11 frames.], batch size: 15, lr: 2.96e-04 +2022-05-05 20:06:35,665 INFO [train.py:715] (7/8) Epoch 7, batch 10900, loss[loss=0.1188, simple_loss=0.1888, pruned_loss=0.02442, over 4843.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2164, pruned_loss=0.03638, over 971750.32 frames.], batch size: 12, lr: 2.96e-04 +2022-05-05 20:07:14,755 INFO [train.py:715] (7/8) Epoch 7, batch 10950, loss[loss=0.1724, simple_loss=0.2463, pruned_loss=0.04921, over 4835.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2172, pruned_loss=0.03674, over 971824.10 frames.], batch size: 15, lr: 2.96e-04 +2022-05-05 20:07:53,905 INFO [train.py:715] (7/8) Epoch 7, batch 11000, loss[loss=0.1626, simple_loss=0.2344, pruned_loss=0.0454, over 4976.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2181, pruned_loss=0.03681, over 972187.86 frames.], batch size: 35, lr: 2.96e-04 +2022-05-05 20:08:32,747 INFO [train.py:715] (7/8) Epoch 7, batch 11050, loss[loss=0.1336, simple_loss=0.2104, pruned_loss=0.0284, over 4806.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2188, pruned_loss=0.0371, over 972213.24 frames.], batch size: 13, lr: 2.96e-04 +2022-05-05 20:09:11,470 INFO [train.py:715] (7/8) Epoch 7, batch 11100, loss[loss=0.1567, simple_loss=0.2287, pruned_loss=0.04231, over 4806.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2177, pruned_loss=0.03707, over 971871.06 frames.], batch size: 24, lr: 2.96e-04 +2022-05-05 20:09:50,082 INFO [train.py:715] (7/8) Epoch 7, batch 11150, loss[loss=0.1504, simple_loss=0.2222, pruned_loss=0.03927, over 4820.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2183, pruned_loss=0.03713, over 972249.79 frames.], batch size: 27, lr: 2.96e-04 +2022-05-05 20:10:29,709 INFO [train.py:715] (7/8) Epoch 7, batch 11200, loss[loss=0.126, simple_loss=0.194, pruned_loss=0.02898, over 4796.00 frames.], tot_loss[loss=0.146, simple_loss=0.2181, pruned_loss=0.03698, over 972310.66 frames.], batch size: 17, lr: 2.96e-04 +2022-05-05 20:11:08,077 INFO [train.py:715] (7/8) Epoch 7, batch 11250, loss[loss=0.1155, simple_loss=0.1853, pruned_loss=0.02281, over 4771.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2177, pruned_loss=0.03689, over 972574.93 frames.], batch size: 17, lr: 2.96e-04 +2022-05-05 20:11:46,239 INFO [train.py:715] (7/8) Epoch 7, batch 11300, loss[loss=0.1165, simple_loss=0.1934, pruned_loss=0.01986, over 4753.00 frames.], tot_loss[loss=0.1455, simple_loss=0.217, pruned_loss=0.03696, over 971682.79 frames.], batch size: 19, lr: 2.96e-04 +2022-05-05 20:12:25,980 INFO [train.py:715] (7/8) Epoch 7, batch 11350, loss[loss=0.1466, simple_loss=0.2185, pruned_loss=0.0374, over 4988.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2161, pruned_loss=0.03627, over 971433.70 frames.], batch size: 26, lr: 2.96e-04 +2022-05-05 20:13:04,523 INFO [train.py:715] (7/8) Epoch 7, batch 11400, loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.029, over 4892.00 frames.], tot_loss[loss=0.144, simple_loss=0.2158, pruned_loss=0.03611, over 971196.88 frames.], batch size: 19, lr: 2.96e-04 +2022-05-05 20:13:43,553 INFO [train.py:715] (7/8) Epoch 7, batch 11450, loss[loss=0.125, simple_loss=0.197, pruned_loss=0.02656, over 4952.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2164, pruned_loss=0.0366, over 971372.93 frames.], batch size: 21, lr: 2.96e-04 +2022-05-05 20:14:22,146 INFO [train.py:715] (7/8) Epoch 7, batch 11500, loss[loss=0.1399, simple_loss=0.2102, pruned_loss=0.03483, over 4766.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2182, pruned_loss=0.03768, over 971283.99 frames.], batch size: 14, lr: 2.96e-04 +2022-05-05 20:15:01,730 INFO [train.py:715] (7/8) Epoch 7, batch 11550, loss[loss=0.1465, simple_loss=0.2131, pruned_loss=0.03995, over 4948.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2179, pruned_loss=0.03757, over 971137.35 frames.], batch size: 24, lr: 2.96e-04 +2022-05-05 20:15:39,999 INFO [train.py:715] (7/8) Epoch 7, batch 11600, loss[loss=0.14, simple_loss=0.2103, pruned_loss=0.0348, over 4872.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2173, pruned_loss=0.03719, over 972907.15 frames.], batch size: 32, lr: 2.96e-04 +2022-05-05 20:16:18,808 INFO [train.py:715] (7/8) Epoch 7, batch 11650, loss[loss=0.1237, simple_loss=0.1964, pruned_loss=0.02547, over 4979.00 frames.], tot_loss[loss=0.145, simple_loss=0.2166, pruned_loss=0.03666, over 972947.25 frames.], batch size: 28, lr: 2.96e-04 +2022-05-05 20:16:58,203 INFO [train.py:715] (7/8) Epoch 7, batch 11700, loss[loss=0.1532, simple_loss=0.232, pruned_loss=0.03717, over 4987.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2164, pruned_loss=0.03657, over 972483.75 frames.], batch size: 25, lr: 2.96e-04 +2022-05-05 20:17:36,280 INFO [train.py:715] (7/8) Epoch 7, batch 11750, loss[loss=0.1381, simple_loss=0.1994, pruned_loss=0.03842, over 4797.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2158, pruned_loss=0.03631, over 971894.00 frames.], batch size: 13, lr: 2.96e-04 +2022-05-05 20:18:15,076 INFO [train.py:715] (7/8) Epoch 7, batch 11800, loss[loss=0.1546, simple_loss=0.2276, pruned_loss=0.04077, over 4901.00 frames.], tot_loss[loss=0.144, simple_loss=0.2153, pruned_loss=0.03632, over 971938.09 frames.], batch size: 17, lr: 2.96e-04 +2022-05-05 20:18:54,266 INFO [train.py:715] (7/8) Epoch 7, batch 11850, loss[loss=0.1525, simple_loss=0.2222, pruned_loss=0.04144, over 4985.00 frames.], tot_loss[loss=0.1437, simple_loss=0.215, pruned_loss=0.03616, over 973048.54 frames.], batch size: 25, lr: 2.96e-04 +2022-05-05 20:19:32,624 INFO [train.py:715] (7/8) Epoch 7, batch 11900, loss[loss=0.1442, simple_loss=0.2083, pruned_loss=0.04005, over 4881.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2158, pruned_loss=0.03675, over 972272.17 frames.], batch size: 32, lr: 2.96e-04 +2022-05-05 20:20:11,922 INFO [train.py:715] (7/8) Epoch 7, batch 11950, loss[loss=0.1648, simple_loss=0.2308, pruned_loss=0.04938, over 4973.00 frames.], tot_loss[loss=0.145, simple_loss=0.2164, pruned_loss=0.03673, over 973001.60 frames.], batch size: 39, lr: 2.96e-04 +2022-05-05 20:20:50,620 INFO [train.py:715] (7/8) Epoch 7, batch 12000, loss[loss=0.1577, simple_loss=0.2293, pruned_loss=0.04306, over 4878.00 frames.], tot_loss[loss=0.145, simple_loss=0.2164, pruned_loss=0.03682, over 972542.36 frames.], batch size: 32, lr: 2.95e-04 +2022-05-05 20:20:50,620 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 20:21:00,228 INFO [train.py:742] (7/8) Epoch 7, validation: loss=0.108, simple_loss=0.193, pruned_loss=0.01154, over 914524.00 frames. +2022-05-05 20:21:38,891 INFO [train.py:715] (7/8) Epoch 7, batch 12050, loss[loss=0.1531, simple_loss=0.2176, pruned_loss=0.04431, over 4705.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2161, pruned_loss=0.03675, over 973074.82 frames.], batch size: 15, lr: 2.95e-04 +2022-05-05 20:22:18,261 INFO [train.py:715] (7/8) Epoch 7, batch 12100, loss[loss=0.1638, simple_loss=0.2255, pruned_loss=0.05107, over 4866.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2163, pruned_loss=0.0367, over 972336.71 frames.], batch size: 16, lr: 2.95e-04 +2022-05-05 20:22:56,854 INFO [train.py:715] (7/8) Epoch 7, batch 12150, loss[loss=0.1347, simple_loss=0.2027, pruned_loss=0.03336, over 4903.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2173, pruned_loss=0.03749, over 972730.37 frames.], batch size: 19, lr: 2.95e-04 +2022-05-05 20:23:35,617 INFO [train.py:715] (7/8) Epoch 7, batch 12200, loss[loss=0.1412, simple_loss=0.2272, pruned_loss=0.02762, over 4814.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2166, pruned_loss=0.03689, over 972776.66 frames.], batch size: 25, lr: 2.95e-04 +2022-05-05 20:24:14,744 INFO [train.py:715] (7/8) Epoch 7, batch 12250, loss[loss=0.1395, simple_loss=0.2073, pruned_loss=0.03589, over 4865.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2167, pruned_loss=0.03686, over 972598.85 frames.], batch size: 32, lr: 2.95e-04 +2022-05-05 20:24:53,359 INFO [train.py:715] (7/8) Epoch 7, batch 12300, loss[loss=0.1625, simple_loss=0.2293, pruned_loss=0.04781, over 4990.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2176, pruned_loss=0.03709, over 972832.83 frames.], batch size: 31, lr: 2.95e-04 +2022-05-05 20:25:35,088 INFO [train.py:715] (7/8) Epoch 7, batch 12350, loss[loss=0.1587, simple_loss=0.223, pruned_loss=0.04715, over 4828.00 frames.], tot_loss[loss=0.1455, simple_loss=0.217, pruned_loss=0.03697, over 972108.86 frames.], batch size: 15, lr: 2.95e-04 +2022-05-05 20:26:13,787 INFO [train.py:715] (7/8) Epoch 7, batch 12400, loss[loss=0.1369, simple_loss=0.2222, pruned_loss=0.02575, over 4885.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2169, pruned_loss=0.03704, over 973000.06 frames.], batch size: 22, lr: 2.95e-04 +2022-05-05 20:26:53,003 INFO [train.py:715] (7/8) Epoch 7, batch 12450, loss[loss=0.1663, simple_loss=0.2279, pruned_loss=0.05241, over 4763.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2167, pruned_loss=0.03719, over 972821.25 frames.], batch size: 16, lr: 2.95e-04 +2022-05-05 20:27:31,401 INFO [train.py:715] (7/8) Epoch 7, batch 12500, loss[loss=0.1501, simple_loss=0.2243, pruned_loss=0.03792, over 4927.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2164, pruned_loss=0.03732, over 972585.90 frames.], batch size: 18, lr: 2.95e-04 +2022-05-05 20:28:10,095 INFO [train.py:715] (7/8) Epoch 7, batch 12550, loss[loss=0.1587, simple_loss=0.224, pruned_loss=0.04672, over 4967.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2174, pruned_loss=0.03802, over 972457.55 frames.], batch size: 15, lr: 2.95e-04 +2022-05-05 20:28:49,195 INFO [train.py:715] (7/8) Epoch 7, batch 12600, loss[loss=0.1375, simple_loss=0.1986, pruned_loss=0.03822, over 4784.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2186, pruned_loss=0.03848, over 972517.46 frames.], batch size: 18, lr: 2.95e-04 +2022-05-05 20:29:27,375 INFO [train.py:715] (7/8) Epoch 7, batch 12650, loss[loss=0.11, simple_loss=0.18, pruned_loss=0.02007, over 4806.00 frames.], tot_loss[loss=0.1469, simple_loss=0.218, pruned_loss=0.03794, over 971798.10 frames.], batch size: 12, lr: 2.95e-04 +2022-05-05 20:30:06,577 INFO [train.py:715] (7/8) Epoch 7, batch 12700, loss[loss=0.1274, simple_loss=0.2001, pruned_loss=0.02734, over 4813.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2187, pruned_loss=0.03828, over 971853.89 frames.], batch size: 26, lr: 2.95e-04 +2022-05-05 20:30:44,741 INFO [train.py:715] (7/8) Epoch 7, batch 12750, loss[loss=0.1087, simple_loss=0.1825, pruned_loss=0.01742, over 4955.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2188, pruned_loss=0.0381, over 972209.73 frames.], batch size: 24, lr: 2.95e-04 +2022-05-05 20:31:23,968 INFO [train.py:715] (7/8) Epoch 7, batch 12800, loss[loss=0.1682, simple_loss=0.2411, pruned_loss=0.04767, over 4982.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2186, pruned_loss=0.03795, over 972569.42 frames.], batch size: 15, lr: 2.95e-04 +2022-05-05 20:32:02,915 INFO [train.py:715] (7/8) Epoch 7, batch 12850, loss[loss=0.1549, simple_loss=0.223, pruned_loss=0.04337, over 4899.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2182, pruned_loss=0.03747, over 973038.85 frames.], batch size: 17, lr: 2.95e-04 +2022-05-05 20:32:41,511 INFO [train.py:715] (7/8) Epoch 7, batch 12900, loss[loss=0.126, simple_loss=0.1949, pruned_loss=0.02855, over 4935.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2181, pruned_loss=0.03755, over 972468.06 frames.], batch size: 29, lr: 2.95e-04 +2022-05-05 20:33:20,983 INFO [train.py:715] (7/8) Epoch 7, batch 12950, loss[loss=0.1382, simple_loss=0.2148, pruned_loss=0.03078, over 4896.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2185, pruned_loss=0.03743, over 972746.30 frames.], batch size: 19, lr: 2.95e-04 +2022-05-05 20:33:59,929 INFO [train.py:715] (7/8) Epoch 7, batch 13000, loss[loss=0.1776, simple_loss=0.2263, pruned_loss=0.06442, over 4927.00 frames.], tot_loss[loss=0.146, simple_loss=0.2181, pruned_loss=0.03691, over 973075.53 frames.], batch size: 18, lr: 2.95e-04 +2022-05-05 20:34:38,877 INFO [train.py:715] (7/8) Epoch 7, batch 13050, loss[loss=0.1706, simple_loss=0.2394, pruned_loss=0.05085, over 4741.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2181, pruned_loss=0.03687, over 972151.40 frames.], batch size: 16, lr: 2.95e-04 +2022-05-05 20:35:17,657 INFO [train.py:715] (7/8) Epoch 7, batch 13100, loss[loss=0.1332, simple_loss=0.209, pruned_loss=0.02863, over 4809.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2176, pruned_loss=0.03695, over 972301.02 frames.], batch size: 25, lr: 2.95e-04 +2022-05-05 20:35:57,325 INFO [train.py:715] (7/8) Epoch 7, batch 13150, loss[loss=0.1395, simple_loss=0.2128, pruned_loss=0.03313, over 4800.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2182, pruned_loss=0.03729, over 971628.83 frames.], batch size: 21, lr: 2.95e-04 +2022-05-05 20:36:35,852 INFO [train.py:715] (7/8) Epoch 7, batch 13200, loss[loss=0.1557, simple_loss=0.2304, pruned_loss=0.04047, over 4919.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2194, pruned_loss=0.03796, over 971871.17 frames.], batch size: 17, lr: 2.95e-04 +2022-05-05 20:37:15,491 INFO [train.py:715] (7/8) Epoch 7, batch 13250, loss[loss=0.1571, simple_loss=0.2336, pruned_loss=0.04028, over 4882.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2191, pruned_loss=0.03806, over 971684.42 frames.], batch size: 16, lr: 2.95e-04 +2022-05-05 20:37:54,875 INFO [train.py:715] (7/8) Epoch 7, batch 13300, loss[loss=0.1459, simple_loss=0.2205, pruned_loss=0.03564, over 4763.00 frames.], tot_loss[loss=0.1474, simple_loss=0.2189, pruned_loss=0.03797, over 971870.36 frames.], batch size: 19, lr: 2.95e-04 +2022-05-05 20:38:33,805 INFO [train.py:715] (7/8) Epoch 7, batch 13350, loss[loss=0.1488, simple_loss=0.2237, pruned_loss=0.03701, over 4809.00 frames.], tot_loss[loss=0.147, simple_loss=0.2187, pruned_loss=0.03768, over 971321.60 frames.], batch size: 21, lr: 2.95e-04 +2022-05-05 20:39:12,812 INFO [train.py:715] (7/8) Epoch 7, batch 13400, loss[loss=0.1872, simple_loss=0.2448, pruned_loss=0.06485, over 4861.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2179, pruned_loss=0.03777, over 970977.89 frames.], batch size: 16, lr: 2.95e-04 +2022-05-05 20:39:51,469 INFO [train.py:715] (7/8) Epoch 7, batch 13450, loss[loss=0.1386, simple_loss=0.2169, pruned_loss=0.03017, over 4970.00 frames.], tot_loss[loss=0.1467, simple_loss=0.218, pruned_loss=0.03769, over 971652.90 frames.], batch size: 15, lr: 2.95e-04 +2022-05-05 20:40:30,902 INFO [train.py:715] (7/8) Epoch 7, batch 13500, loss[loss=0.1261, simple_loss=0.214, pruned_loss=0.01906, over 4967.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2173, pruned_loss=0.03719, over 972099.53 frames.], batch size: 24, lr: 2.95e-04 +2022-05-05 20:41:09,546 INFO [train.py:715] (7/8) Epoch 7, batch 13550, loss[loss=0.1683, simple_loss=0.2409, pruned_loss=0.04787, over 4778.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2177, pruned_loss=0.03736, over 971686.82 frames.], batch size: 18, lr: 2.95e-04 +2022-05-05 20:41:48,024 INFO [train.py:715] (7/8) Epoch 7, batch 13600, loss[loss=0.1249, simple_loss=0.1955, pruned_loss=0.02711, over 4935.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2185, pruned_loss=0.0374, over 972679.10 frames.], batch size: 23, lr: 2.95e-04 +2022-05-05 20:42:26,946 INFO [train.py:715] (7/8) Epoch 7, batch 13650, loss[loss=0.1557, simple_loss=0.2183, pruned_loss=0.04652, over 4772.00 frames.], tot_loss[loss=0.146, simple_loss=0.218, pruned_loss=0.03699, over 972466.67 frames.], batch size: 14, lr: 2.95e-04 +2022-05-05 20:43:05,965 INFO [train.py:715] (7/8) Epoch 7, batch 13700, loss[loss=0.2085, simple_loss=0.26, pruned_loss=0.07847, over 4743.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2184, pruned_loss=0.03739, over 971701.99 frames.], batch size: 16, lr: 2.95e-04 +2022-05-05 20:43:44,943 INFO [train.py:715] (7/8) Epoch 7, batch 13750, loss[loss=0.1751, simple_loss=0.2432, pruned_loss=0.05355, over 4925.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2184, pruned_loss=0.03742, over 971574.72 frames.], batch size: 18, lr: 2.94e-04 +2022-05-05 20:44:23,922 INFO [train.py:715] (7/8) Epoch 7, batch 13800, loss[loss=0.1297, simple_loss=0.1973, pruned_loss=0.03106, over 4827.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2169, pruned_loss=0.03685, over 971540.46 frames.], batch size: 13, lr: 2.94e-04 +2022-05-05 20:45:03,233 INFO [train.py:715] (7/8) Epoch 7, batch 13850, loss[loss=0.1323, simple_loss=0.2036, pruned_loss=0.03048, over 4922.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2165, pruned_loss=0.0367, over 971554.15 frames.], batch size: 18, lr: 2.94e-04 +2022-05-05 20:45:41,500 INFO [train.py:715] (7/8) Epoch 7, batch 13900, loss[loss=0.1327, simple_loss=0.2074, pruned_loss=0.02896, over 4907.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2158, pruned_loss=0.03663, over 971071.36 frames.], batch size: 19, lr: 2.94e-04 +2022-05-05 20:46:20,517 INFO [train.py:715] (7/8) Epoch 7, batch 13950, loss[loss=0.1176, simple_loss=0.1906, pruned_loss=0.02229, over 4742.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2156, pruned_loss=0.03667, over 971447.75 frames.], batch size: 12, lr: 2.94e-04 +2022-05-05 20:46:59,561 INFO [train.py:715] (7/8) Epoch 7, batch 14000, loss[loss=0.1276, simple_loss=0.2005, pruned_loss=0.02737, over 4947.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2159, pruned_loss=0.03714, over 971448.49 frames.], batch size: 29, lr: 2.94e-04 +2022-05-05 20:47:38,924 INFO [train.py:715] (7/8) Epoch 7, batch 14050, loss[loss=0.1448, simple_loss=0.2195, pruned_loss=0.03501, over 4938.00 frames.], tot_loss[loss=0.145, simple_loss=0.2162, pruned_loss=0.03694, over 971742.08 frames.], batch size: 21, lr: 2.94e-04 +2022-05-05 20:48:18,050 INFO [train.py:715] (7/8) Epoch 7, batch 14100, loss[loss=0.1195, simple_loss=0.1952, pruned_loss=0.02195, over 4799.00 frames.], tot_loss[loss=0.1447, simple_loss=0.216, pruned_loss=0.03671, over 972444.07 frames.], batch size: 14, lr: 2.94e-04 +2022-05-05 20:48:56,864 INFO [train.py:715] (7/8) Epoch 7, batch 14150, loss[loss=0.1356, simple_loss=0.1985, pruned_loss=0.03634, over 4922.00 frames.], tot_loss[loss=0.1456, simple_loss=0.217, pruned_loss=0.03713, over 972812.04 frames.], batch size: 18, lr: 2.94e-04 +2022-05-05 20:49:36,151 INFO [train.py:715] (7/8) Epoch 7, batch 14200, loss[loss=0.1219, simple_loss=0.1826, pruned_loss=0.03058, over 4834.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2171, pruned_loss=0.03715, over 972917.61 frames.], batch size: 13, lr: 2.94e-04 +2022-05-05 20:50:14,408 INFO [train.py:715] (7/8) Epoch 7, batch 14250, loss[loss=0.1217, simple_loss=0.1957, pruned_loss=0.02385, over 4754.00 frames.], tot_loss[loss=0.146, simple_loss=0.2174, pruned_loss=0.03728, over 972274.33 frames.], batch size: 16, lr: 2.94e-04 +2022-05-05 20:50:53,728 INFO [train.py:715] (7/8) Epoch 7, batch 14300, loss[loss=0.1715, simple_loss=0.2272, pruned_loss=0.05789, over 4966.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2167, pruned_loss=0.03695, over 972455.94 frames.], batch size: 15, lr: 2.94e-04 +2022-05-05 20:51:33,006 INFO [train.py:715] (7/8) Epoch 7, batch 14350, loss[loss=0.1542, simple_loss=0.2226, pruned_loss=0.04289, over 4756.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2173, pruned_loss=0.03712, over 971249.10 frames.], batch size: 18, lr: 2.94e-04 +2022-05-05 20:52:12,027 INFO [train.py:715] (7/8) Epoch 7, batch 14400, loss[loss=0.1549, simple_loss=0.2271, pruned_loss=0.04131, over 4694.00 frames.], tot_loss[loss=0.146, simple_loss=0.2178, pruned_loss=0.03714, over 971259.76 frames.], batch size: 15, lr: 2.94e-04 +2022-05-05 20:52:50,730 INFO [train.py:715] (7/8) Epoch 7, batch 14450, loss[loss=0.1416, simple_loss=0.2238, pruned_loss=0.02972, over 4974.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2175, pruned_loss=0.03698, over 972857.04 frames.], batch size: 39, lr: 2.94e-04 +2022-05-05 20:53:29,523 INFO [train.py:715] (7/8) Epoch 7, batch 14500, loss[loss=0.1469, simple_loss=0.2237, pruned_loss=0.035, over 4921.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2182, pruned_loss=0.0375, over 972419.10 frames.], batch size: 18, lr: 2.94e-04 +2022-05-05 20:54:09,108 INFO [train.py:715] (7/8) Epoch 7, batch 14550, loss[loss=0.1273, simple_loss=0.1993, pruned_loss=0.02763, over 4845.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2183, pruned_loss=0.03739, over 973180.37 frames.], batch size: 30, lr: 2.94e-04 +2022-05-05 20:54:47,910 INFO [train.py:715] (7/8) Epoch 7, batch 14600, loss[loss=0.1284, simple_loss=0.2062, pruned_loss=0.0253, over 4899.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2174, pruned_loss=0.03687, over 973349.30 frames.], batch size: 19, lr: 2.94e-04 +2022-05-05 20:55:26,850 INFO [train.py:715] (7/8) Epoch 7, batch 14650, loss[loss=0.141, simple_loss=0.2198, pruned_loss=0.03114, over 4832.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.03679, over 971748.90 frames.], batch size: 25, lr: 2.94e-04 +2022-05-05 20:56:05,809 INFO [train.py:715] (7/8) Epoch 7, batch 14700, loss[loss=0.1564, simple_loss=0.2227, pruned_loss=0.04502, over 4887.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2175, pruned_loss=0.03693, over 972427.75 frames.], batch size: 22, lr: 2.94e-04 +2022-05-05 20:56:44,945 INFO [train.py:715] (7/8) Epoch 7, batch 14750, loss[loss=0.1713, simple_loss=0.2317, pruned_loss=0.05541, over 4966.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2182, pruned_loss=0.03749, over 972520.31 frames.], batch size: 21, lr: 2.94e-04 +2022-05-05 20:57:23,493 INFO [train.py:715] (7/8) Epoch 7, batch 14800, loss[loss=0.1654, simple_loss=0.2401, pruned_loss=0.04532, over 4977.00 frames.], tot_loss[loss=0.1464, simple_loss=0.218, pruned_loss=0.03742, over 971914.97 frames.], batch size: 39, lr: 2.94e-04 +2022-05-05 20:58:03,006 INFO [train.py:715] (7/8) Epoch 7, batch 14850, loss[loss=0.1506, simple_loss=0.2323, pruned_loss=0.0344, over 4823.00 frames.], tot_loss[loss=0.146, simple_loss=0.218, pruned_loss=0.03704, over 972131.66 frames.], batch size: 25, lr: 2.94e-04 +2022-05-05 20:58:41,952 INFO [train.py:715] (7/8) Epoch 7, batch 14900, loss[loss=0.1498, simple_loss=0.2179, pruned_loss=0.04084, over 4921.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2178, pruned_loss=0.03695, over 972406.94 frames.], batch size: 17, lr: 2.94e-04 +2022-05-05 20:59:20,316 INFO [train.py:715] (7/8) Epoch 7, batch 14950, loss[loss=0.1634, simple_loss=0.2347, pruned_loss=0.04599, over 4831.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2172, pruned_loss=0.03669, over 972660.79 frames.], batch size: 15, lr: 2.94e-04 +2022-05-05 20:59:59,926 INFO [train.py:715] (7/8) Epoch 7, batch 15000, loss[loss=0.1766, simple_loss=0.2311, pruned_loss=0.06106, over 4761.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2172, pruned_loss=0.03687, over 972118.43 frames.], batch size: 17, lr: 2.94e-04 +2022-05-05 20:59:59,927 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 21:00:14,355 INFO [train.py:742] (7/8) Epoch 7, validation: loss=0.1083, simple_loss=0.1931, pruned_loss=0.01175, over 914524.00 frames. +2022-05-05 21:00:53,498 INFO [train.py:715] (7/8) Epoch 7, batch 15050, loss[loss=0.1487, simple_loss=0.2246, pruned_loss=0.03644, over 4837.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2183, pruned_loss=0.03762, over 971624.46 frames.], batch size: 15, lr: 2.94e-04 +2022-05-05 21:01:32,731 INFO [train.py:715] (7/8) Epoch 7, batch 15100, loss[loss=0.1483, simple_loss=0.231, pruned_loss=0.03278, over 4930.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2185, pruned_loss=0.03758, over 972443.60 frames.], batch size: 29, lr: 2.94e-04 +2022-05-05 21:02:11,975 INFO [train.py:715] (7/8) Epoch 7, batch 15150, loss[loss=0.1588, simple_loss=0.2249, pruned_loss=0.04637, over 4966.00 frames.], tot_loss[loss=0.1469, simple_loss=0.219, pruned_loss=0.03737, over 972514.65 frames.], batch size: 15, lr: 2.94e-04 +2022-05-05 21:02:50,722 INFO [train.py:715] (7/8) Epoch 7, batch 15200, loss[loss=0.1588, simple_loss=0.2279, pruned_loss=0.0448, over 4967.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2191, pruned_loss=0.03754, over 973228.01 frames.], batch size: 35, lr: 2.94e-04 +2022-05-05 21:03:30,196 INFO [train.py:715] (7/8) Epoch 7, batch 15250, loss[loss=0.1411, simple_loss=0.2075, pruned_loss=0.03737, over 4989.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2185, pruned_loss=0.03768, over 972922.85 frames.], batch size: 31, lr: 2.94e-04 +2022-05-05 21:04:09,390 INFO [train.py:715] (7/8) Epoch 7, batch 15300, loss[loss=0.1188, simple_loss=0.1918, pruned_loss=0.02289, over 4991.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2178, pruned_loss=0.03735, over 972462.94 frames.], batch size: 26, lr: 2.94e-04 +2022-05-05 21:04:48,396 INFO [train.py:715] (7/8) Epoch 7, batch 15350, loss[loss=0.191, simple_loss=0.266, pruned_loss=0.05795, over 4811.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2181, pruned_loss=0.03728, over 973460.92 frames.], batch size: 26, lr: 2.94e-04 +2022-05-05 21:05:27,509 INFO [train.py:715] (7/8) Epoch 7, batch 15400, loss[loss=0.1622, simple_loss=0.236, pruned_loss=0.04422, over 4840.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2187, pruned_loss=0.03754, over 973588.00 frames.], batch size: 30, lr: 2.94e-04 +2022-05-05 21:06:05,999 INFO [train.py:715] (7/8) Epoch 7, batch 15450, loss[loss=0.1457, simple_loss=0.2153, pruned_loss=0.03802, over 4820.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2187, pruned_loss=0.0374, over 973331.58 frames.], batch size: 13, lr: 2.94e-04 +2022-05-05 21:06:45,044 INFO [train.py:715] (7/8) Epoch 7, batch 15500, loss[loss=0.1279, simple_loss=0.2068, pruned_loss=0.02449, over 4974.00 frames.], tot_loss[loss=0.1472, simple_loss=0.219, pruned_loss=0.03771, over 973573.20 frames.], batch size: 24, lr: 2.93e-04 +2022-05-05 21:07:23,164 INFO [train.py:715] (7/8) Epoch 7, batch 15550, loss[loss=0.1826, simple_loss=0.2478, pruned_loss=0.0587, over 4865.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2193, pruned_loss=0.03783, over 973308.37 frames.], batch size: 32, lr: 2.93e-04 +2022-05-05 21:08:02,569 INFO [train.py:715] (7/8) Epoch 7, batch 15600, loss[loss=0.1379, simple_loss=0.2094, pruned_loss=0.03318, over 4777.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2196, pruned_loss=0.03809, over 973244.65 frames.], batch size: 17, lr: 2.93e-04 +2022-05-05 21:08:42,086 INFO [train.py:715] (7/8) Epoch 7, batch 15650, loss[loss=0.1515, simple_loss=0.2242, pruned_loss=0.03944, over 4965.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2198, pruned_loss=0.03825, over 973346.42 frames.], batch size: 15, lr: 2.93e-04 +2022-05-05 21:09:20,365 INFO [train.py:715] (7/8) Epoch 7, batch 15700, loss[loss=0.1469, simple_loss=0.2196, pruned_loss=0.03709, over 4803.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2195, pruned_loss=0.03815, over 973565.37 frames.], batch size: 13, lr: 2.93e-04 +2022-05-05 21:09:59,358 INFO [train.py:715] (7/8) Epoch 7, batch 15750, loss[loss=0.1478, simple_loss=0.2264, pruned_loss=0.03462, over 4744.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2191, pruned_loss=0.03826, over 972979.99 frames.], batch size: 16, lr: 2.93e-04 +2022-05-05 21:10:39,021 INFO [train.py:715] (7/8) Epoch 7, batch 15800, loss[loss=0.1514, simple_loss=0.2137, pruned_loss=0.0446, over 4970.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2189, pruned_loss=0.03824, over 972857.90 frames.], batch size: 15, lr: 2.93e-04 +2022-05-05 21:11:18,130 INFO [train.py:715] (7/8) Epoch 7, batch 15850, loss[loss=0.158, simple_loss=0.2421, pruned_loss=0.03697, over 4908.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2179, pruned_loss=0.03764, over 972551.69 frames.], batch size: 19, lr: 2.93e-04 +2022-05-05 21:11:57,175 INFO [train.py:715] (7/8) Epoch 7, batch 15900, loss[loss=0.1097, simple_loss=0.1869, pruned_loss=0.01624, over 4817.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2182, pruned_loss=0.03743, over 972534.56 frames.], batch size: 25, lr: 2.93e-04 +2022-05-05 21:12:36,476 INFO [train.py:715] (7/8) Epoch 7, batch 15950, loss[loss=0.1635, simple_loss=0.2257, pruned_loss=0.05059, over 4883.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2184, pruned_loss=0.03744, over 972663.27 frames.], batch size: 16, lr: 2.93e-04 +2022-05-05 21:13:15,929 INFO [train.py:715] (7/8) Epoch 7, batch 16000, loss[loss=0.1159, simple_loss=0.1915, pruned_loss=0.02016, over 4816.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2182, pruned_loss=0.03755, over 972011.50 frames.], batch size: 26, lr: 2.93e-04 +2022-05-05 21:13:54,027 INFO [train.py:715] (7/8) Epoch 7, batch 16050, loss[loss=0.1647, simple_loss=0.2297, pruned_loss=0.04988, over 4768.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2179, pruned_loss=0.03758, over 971200.75 frames.], batch size: 12, lr: 2.93e-04 +2022-05-05 21:14:33,354 INFO [train.py:715] (7/8) Epoch 7, batch 16100, loss[loss=0.154, simple_loss=0.2224, pruned_loss=0.04281, over 4813.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2185, pruned_loss=0.0378, over 970943.79 frames.], batch size: 27, lr: 2.93e-04 +2022-05-05 21:15:12,278 INFO [train.py:715] (7/8) Epoch 7, batch 16150, loss[loss=0.1726, simple_loss=0.2384, pruned_loss=0.05338, over 4867.00 frames.], tot_loss[loss=0.148, simple_loss=0.2193, pruned_loss=0.03835, over 970804.70 frames.], batch size: 16, lr: 2.93e-04 +2022-05-05 21:15:50,930 INFO [train.py:715] (7/8) Epoch 7, batch 16200, loss[loss=0.1509, simple_loss=0.2228, pruned_loss=0.03953, over 4773.00 frames.], tot_loss[loss=0.148, simple_loss=0.2193, pruned_loss=0.0384, over 971330.70 frames.], batch size: 18, lr: 2.93e-04 +2022-05-05 21:16:30,079 INFO [train.py:715] (7/8) Epoch 7, batch 16250, loss[loss=0.1381, simple_loss=0.2144, pruned_loss=0.03088, over 4705.00 frames.], tot_loss[loss=0.1475, simple_loss=0.219, pruned_loss=0.03801, over 971466.71 frames.], batch size: 15, lr: 2.93e-04 +2022-05-05 21:17:08,725 INFO [train.py:715] (7/8) Epoch 7, batch 16300, loss[loss=0.1459, simple_loss=0.217, pruned_loss=0.03737, over 4807.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2186, pruned_loss=0.03744, over 972116.53 frames.], batch size: 25, lr: 2.93e-04 +2022-05-05 21:17:48,271 INFO [train.py:715] (7/8) Epoch 7, batch 16350, loss[loss=0.1443, simple_loss=0.2207, pruned_loss=0.03393, over 4809.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2187, pruned_loss=0.03784, over 970736.39 frames.], batch size: 21, lr: 2.93e-04 +2022-05-05 21:18:26,610 INFO [train.py:715] (7/8) Epoch 7, batch 16400, loss[loss=0.1451, simple_loss=0.2133, pruned_loss=0.03841, over 4952.00 frames.], tot_loss[loss=0.1479, simple_loss=0.2197, pruned_loss=0.03806, over 971579.33 frames.], batch size: 35, lr: 2.93e-04 +2022-05-05 21:19:05,504 INFO [train.py:715] (7/8) Epoch 7, batch 16450, loss[loss=0.137, simple_loss=0.2031, pruned_loss=0.03548, over 4912.00 frames.], tot_loss[loss=0.1481, simple_loss=0.2195, pruned_loss=0.03835, over 972623.51 frames.], batch size: 19, lr: 2.93e-04 +2022-05-05 21:19:44,557 INFO [train.py:715] (7/8) Epoch 7, batch 16500, loss[loss=0.146, simple_loss=0.2162, pruned_loss=0.03786, over 4819.00 frames.], tot_loss[loss=0.1484, simple_loss=0.2194, pruned_loss=0.03866, over 971995.33 frames.], batch size: 26, lr: 2.93e-04 +2022-05-05 21:20:22,827 INFO [train.py:715] (7/8) Epoch 7, batch 16550, loss[loss=0.1384, simple_loss=0.2046, pruned_loss=0.03611, over 4766.00 frames.], tot_loss[loss=0.1472, simple_loss=0.2182, pruned_loss=0.0381, over 971408.83 frames.], batch size: 19, lr: 2.93e-04 +2022-05-05 21:21:02,231 INFO [train.py:715] (7/8) Epoch 7, batch 16600, loss[loss=0.1408, simple_loss=0.2177, pruned_loss=0.03194, over 4801.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2186, pruned_loss=0.03838, over 972221.60 frames.], batch size: 21, lr: 2.93e-04 +2022-05-05 21:21:41,399 INFO [train.py:715] (7/8) Epoch 7, batch 16650, loss[loss=0.1227, simple_loss=0.1939, pruned_loss=0.02578, over 4848.00 frames.], tot_loss[loss=0.1467, simple_loss=0.218, pruned_loss=0.03772, over 971437.24 frames.], batch size: 13, lr: 2.93e-04 +2022-05-05 21:22:20,543 INFO [train.py:715] (7/8) Epoch 7, batch 16700, loss[loss=0.169, simple_loss=0.2342, pruned_loss=0.05189, over 4750.00 frames.], tot_loss[loss=0.146, simple_loss=0.2178, pruned_loss=0.03708, over 972076.42 frames.], batch size: 19, lr: 2.93e-04 +2022-05-05 21:22:59,814 INFO [train.py:715] (7/8) Epoch 7, batch 16750, loss[loss=0.1132, simple_loss=0.186, pruned_loss=0.02015, over 4915.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2179, pruned_loss=0.03697, over 972349.50 frames.], batch size: 23, lr: 2.93e-04 +2022-05-05 21:23:38,669 INFO [train.py:715] (7/8) Epoch 7, batch 16800, loss[loss=0.114, simple_loss=0.1852, pruned_loss=0.02145, over 4746.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2172, pruned_loss=0.03674, over 972008.58 frames.], batch size: 19, lr: 2.93e-04 +2022-05-05 21:24:17,713 INFO [train.py:715] (7/8) Epoch 7, batch 16850, loss[loss=0.1238, simple_loss=0.2002, pruned_loss=0.02365, over 4825.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2169, pruned_loss=0.03629, over 972208.34 frames.], batch size: 26, lr: 2.93e-04 +2022-05-05 21:24:56,997 INFO [train.py:715] (7/8) Epoch 7, batch 16900, loss[loss=0.1653, simple_loss=0.2478, pruned_loss=0.04144, over 4836.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03634, over 973406.87 frames.], batch size: 13, lr: 2.93e-04 +2022-05-05 21:25:36,249 INFO [train.py:715] (7/8) Epoch 7, batch 16950, loss[loss=0.1412, simple_loss=0.2111, pruned_loss=0.03565, over 4936.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03619, over 973202.53 frames.], batch size: 23, lr: 2.93e-04 +2022-05-05 21:26:14,894 INFO [train.py:715] (7/8) Epoch 7, batch 17000, loss[loss=0.1767, simple_loss=0.2522, pruned_loss=0.05061, over 4956.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03622, over 972548.33 frames.], batch size: 24, lr: 2.93e-04 +2022-05-05 21:26:54,053 INFO [train.py:715] (7/8) Epoch 7, batch 17050, loss[loss=0.1303, simple_loss=0.2086, pruned_loss=0.02606, over 4740.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2173, pruned_loss=0.03668, over 971285.66 frames.], batch size: 16, lr: 2.93e-04 +2022-05-05 21:27:32,508 INFO [train.py:715] (7/8) Epoch 7, batch 17100, loss[loss=0.1643, simple_loss=0.2326, pruned_loss=0.04799, over 4981.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2174, pruned_loss=0.03684, over 971224.35 frames.], batch size: 35, lr: 2.93e-04 +2022-05-05 21:28:11,648 INFO [train.py:715] (7/8) Epoch 7, batch 17150, loss[loss=0.1507, simple_loss=0.219, pruned_loss=0.04124, over 4861.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2174, pruned_loss=0.037, over 971047.94 frames.], batch size: 20, lr: 2.93e-04 +2022-05-05 21:28:50,923 INFO [train.py:715] (7/8) Epoch 7, batch 17200, loss[loss=0.1633, simple_loss=0.2383, pruned_loss=0.04416, over 4913.00 frames.], tot_loss[loss=0.1453, simple_loss=0.217, pruned_loss=0.03682, over 971059.57 frames.], batch size: 18, lr: 2.93e-04 +2022-05-05 21:29:29,221 INFO [train.py:715] (7/8) Epoch 7, batch 17250, loss[loss=0.1649, simple_loss=0.237, pruned_loss=0.04638, over 4865.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2173, pruned_loss=0.03674, over 971089.32 frames.], batch size: 32, lr: 2.92e-04 +2022-05-05 21:30:08,292 INFO [train.py:715] (7/8) Epoch 7, batch 17300, loss[loss=0.1396, simple_loss=0.2141, pruned_loss=0.03256, over 4960.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2172, pruned_loss=0.03681, over 971353.83 frames.], batch size: 24, lr: 2.92e-04 +2022-05-05 21:30:46,575 INFO [train.py:715] (7/8) Epoch 7, batch 17350, loss[loss=0.1289, simple_loss=0.1998, pruned_loss=0.02901, over 4851.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2171, pruned_loss=0.03665, over 971221.51 frames.], batch size: 13, lr: 2.92e-04 +2022-05-05 21:31:25,653 INFO [train.py:715] (7/8) Epoch 7, batch 17400, loss[loss=0.1414, simple_loss=0.2165, pruned_loss=0.03309, over 4917.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2175, pruned_loss=0.03696, over 970603.29 frames.], batch size: 17, lr: 2.92e-04 +2022-05-05 21:32:04,442 INFO [train.py:715] (7/8) Epoch 7, batch 17450, loss[loss=0.1283, simple_loss=0.1973, pruned_loss=0.02967, over 4710.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2173, pruned_loss=0.0369, over 970927.71 frames.], batch size: 15, lr: 2.92e-04 +2022-05-05 21:32:43,217 INFO [train.py:715] (7/8) Epoch 7, batch 17500, loss[loss=0.1279, simple_loss=0.2084, pruned_loss=0.0237, over 4986.00 frames.], tot_loss[loss=0.1454, simple_loss=0.217, pruned_loss=0.03693, over 971330.09 frames.], batch size: 25, lr: 2.92e-04 +2022-05-05 21:33:22,411 INFO [train.py:715] (7/8) Epoch 7, batch 17550, loss[loss=0.1614, simple_loss=0.2346, pruned_loss=0.04417, over 4915.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2173, pruned_loss=0.03685, over 972218.25 frames.], batch size: 17, lr: 2.92e-04 +2022-05-05 21:34:00,740 INFO [train.py:715] (7/8) Epoch 7, batch 17600, loss[loss=0.1515, simple_loss=0.2221, pruned_loss=0.04044, over 4859.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2173, pruned_loss=0.037, over 971981.10 frames.], batch size: 34, lr: 2.92e-04 +2022-05-05 21:34:39,810 INFO [train.py:715] (7/8) Epoch 7, batch 17650, loss[loss=0.1473, simple_loss=0.2323, pruned_loss=0.03112, over 4939.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2172, pruned_loss=0.03692, over 972210.66 frames.], batch size: 23, lr: 2.92e-04 +2022-05-05 21:35:19,108 INFO [train.py:715] (7/8) Epoch 7, batch 17700, loss[loss=0.1966, simple_loss=0.2634, pruned_loss=0.06496, over 4980.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2169, pruned_loss=0.03695, over 972651.62 frames.], batch size: 15, lr: 2.92e-04 +2022-05-05 21:35:58,207 INFO [train.py:715] (7/8) Epoch 7, batch 17750, loss[loss=0.1733, simple_loss=0.2479, pruned_loss=0.04942, over 4916.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2173, pruned_loss=0.03685, over 973427.68 frames.], batch size: 39, lr: 2.92e-04 +2022-05-05 21:36:37,513 INFO [train.py:715] (7/8) Epoch 7, batch 17800, loss[loss=0.1292, simple_loss=0.1994, pruned_loss=0.02954, over 4816.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2174, pruned_loss=0.03675, over 973612.02 frames.], batch size: 15, lr: 2.92e-04 +2022-05-05 21:37:16,002 INFO [train.py:715] (7/8) Epoch 7, batch 17850, loss[loss=0.1078, simple_loss=0.1822, pruned_loss=0.01676, over 4893.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03653, over 973842.62 frames.], batch size: 16, lr: 2.92e-04 +2022-05-05 21:37:55,614 INFO [train.py:715] (7/8) Epoch 7, batch 17900, loss[loss=0.1338, simple_loss=0.2153, pruned_loss=0.02615, over 4967.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2174, pruned_loss=0.03657, over 973807.03 frames.], batch size: 24, lr: 2.92e-04 +2022-05-05 21:38:34,075 INFO [train.py:715] (7/8) Epoch 7, batch 17950, loss[loss=0.1446, simple_loss=0.2286, pruned_loss=0.03026, over 4875.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2179, pruned_loss=0.03686, over 972916.17 frames.], batch size: 16, lr: 2.92e-04 +2022-05-05 21:39:13,139 INFO [train.py:715] (7/8) Epoch 7, batch 18000, loss[loss=0.1434, simple_loss=0.2154, pruned_loss=0.03571, over 4808.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2186, pruned_loss=0.03755, over 972242.54 frames.], batch size: 25, lr: 2.92e-04 +2022-05-05 21:39:13,139 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 21:39:22,794 INFO [train.py:742] (7/8) Epoch 7, validation: loss=0.1081, simple_loss=0.193, pruned_loss=0.01158, over 914524.00 frames. +2022-05-05 21:40:01,810 INFO [train.py:715] (7/8) Epoch 7, batch 18050, loss[loss=0.1439, simple_loss=0.2184, pruned_loss=0.03467, over 4985.00 frames.], tot_loss[loss=0.146, simple_loss=0.2182, pruned_loss=0.03696, over 972433.84 frames.], batch size: 33, lr: 2.92e-04 +2022-05-05 21:40:41,008 INFO [train.py:715] (7/8) Epoch 7, batch 18100, loss[loss=0.1394, simple_loss=0.2062, pruned_loss=0.03625, over 4919.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2188, pruned_loss=0.03747, over 972424.49 frames.], batch size: 29, lr: 2.92e-04 +2022-05-05 21:41:19,567 INFO [train.py:715] (7/8) Epoch 7, batch 18150, loss[loss=0.1488, simple_loss=0.2215, pruned_loss=0.03809, over 4881.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2191, pruned_loss=0.03774, over 972538.68 frames.], batch size: 16, lr: 2.92e-04 +2022-05-05 21:41:57,881 INFO [train.py:715] (7/8) Epoch 7, batch 18200, loss[loss=0.1373, simple_loss=0.2047, pruned_loss=0.03492, over 4927.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2192, pruned_loss=0.03788, over 972123.97 frames.], batch size: 17, lr: 2.92e-04 +2022-05-05 21:42:36,254 INFO [train.py:715] (7/8) Epoch 7, batch 18250, loss[loss=0.1236, simple_loss=0.187, pruned_loss=0.03016, over 4776.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2189, pruned_loss=0.03742, over 972026.67 frames.], batch size: 14, lr: 2.92e-04 +2022-05-05 21:43:15,543 INFO [train.py:715] (7/8) Epoch 7, batch 18300, loss[loss=0.1453, simple_loss=0.2104, pruned_loss=0.04011, over 4766.00 frames.], tot_loss[loss=0.146, simple_loss=0.218, pruned_loss=0.03704, over 972448.64 frames.], batch size: 17, lr: 2.92e-04 +2022-05-05 21:43:53,555 INFO [train.py:715] (7/8) Epoch 7, batch 18350, loss[loss=0.1262, simple_loss=0.1998, pruned_loss=0.02631, over 4745.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2184, pruned_loss=0.03698, over 971866.32 frames.], batch size: 16, lr: 2.92e-04 +2022-05-05 21:44:31,936 INFO [train.py:715] (7/8) Epoch 7, batch 18400, loss[loss=0.151, simple_loss=0.2155, pruned_loss=0.04331, over 4774.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2177, pruned_loss=0.03683, over 972456.68 frames.], batch size: 18, lr: 2.92e-04 +2022-05-05 21:45:11,788 INFO [train.py:715] (7/8) Epoch 7, batch 18450, loss[loss=0.1381, simple_loss=0.2147, pruned_loss=0.03078, over 4927.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2187, pruned_loss=0.03744, over 970910.68 frames.], batch size: 23, lr: 2.92e-04 +2022-05-05 21:45:50,715 INFO [train.py:715] (7/8) Epoch 7, batch 18500, loss[loss=0.1448, simple_loss=0.2147, pruned_loss=0.03747, over 4870.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2193, pruned_loss=0.03787, over 970920.64 frames.], batch size: 16, lr: 2.92e-04 +2022-05-05 21:46:29,378 INFO [train.py:715] (7/8) Epoch 7, batch 18550, loss[loss=0.1605, simple_loss=0.2284, pruned_loss=0.04632, over 4809.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2187, pruned_loss=0.03742, over 970795.74 frames.], batch size: 24, lr: 2.92e-04 +2022-05-05 21:47:08,451 INFO [train.py:715] (7/8) Epoch 7, batch 18600, loss[loss=0.1514, simple_loss=0.2272, pruned_loss=0.03777, over 4925.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2179, pruned_loss=0.03718, over 970990.80 frames.], batch size: 18, lr: 2.92e-04 +2022-05-05 21:47:47,274 INFO [train.py:715] (7/8) Epoch 7, batch 18650, loss[loss=0.1559, simple_loss=0.2142, pruned_loss=0.0488, over 4955.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2174, pruned_loss=0.03675, over 970933.15 frames.], batch size: 35, lr: 2.92e-04 +2022-05-05 21:48:25,125 INFO [train.py:715] (7/8) Epoch 7, batch 18700, loss[loss=0.1654, simple_loss=0.2406, pruned_loss=0.04509, over 4828.00 frames.], tot_loss[loss=0.146, simple_loss=0.218, pruned_loss=0.03698, over 971340.38 frames.], batch size: 26, lr: 2.92e-04 +2022-05-05 21:49:03,391 INFO [train.py:715] (7/8) Epoch 7, batch 18750, loss[loss=0.1565, simple_loss=0.2332, pruned_loss=0.03989, over 4874.00 frames.], tot_loss[loss=0.145, simple_loss=0.2175, pruned_loss=0.03625, over 971245.29 frames.], batch size: 22, lr: 2.92e-04 +2022-05-05 21:49:42,763 INFO [train.py:715] (7/8) Epoch 7, batch 18800, loss[loss=0.171, simple_loss=0.2423, pruned_loss=0.04982, over 4780.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2176, pruned_loss=0.03604, over 971352.52 frames.], batch size: 14, lr: 2.92e-04 +2022-05-05 21:50:21,360 INFO [train.py:715] (7/8) Epoch 7, batch 18850, loss[loss=0.1199, simple_loss=0.1869, pruned_loss=0.0265, over 4748.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2174, pruned_loss=0.03591, over 971734.42 frames.], batch size: 16, lr: 2.92e-04 +2022-05-05 21:50:59,415 INFO [train.py:715] (7/8) Epoch 7, batch 18900, loss[loss=0.1324, simple_loss=0.2206, pruned_loss=0.02211, over 4881.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2177, pruned_loss=0.03624, over 972147.27 frames.], batch size: 16, lr: 2.92e-04 +2022-05-05 21:51:36,460 INFO [train.py:715] (7/8) Epoch 7, batch 18950, loss[loss=0.1488, simple_loss=0.2313, pruned_loss=0.03316, over 4799.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2174, pruned_loss=0.0365, over 971753.09 frames.], batch size: 25, lr: 2.92e-04 +2022-05-05 21:52:14,915 INFO [train.py:715] (7/8) Epoch 7, batch 19000, loss[loss=0.1463, simple_loss=0.2223, pruned_loss=0.03512, over 4948.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2178, pruned_loss=0.03687, over 971946.45 frames.], batch size: 21, lr: 2.92e-04 +2022-05-05 21:52:52,514 INFO [train.py:715] (7/8) Epoch 7, batch 19050, loss[loss=0.1229, simple_loss=0.1905, pruned_loss=0.02763, over 4769.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2172, pruned_loss=0.03702, over 972394.23 frames.], batch size: 14, lr: 2.91e-04 +2022-05-05 21:53:30,742 INFO [train.py:715] (7/8) Epoch 7, batch 19100, loss[loss=0.1279, simple_loss=0.1909, pruned_loss=0.03246, over 4966.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2159, pruned_loss=0.03641, over 973208.67 frames.], batch size: 28, lr: 2.91e-04 +2022-05-05 21:54:09,412 INFO [train.py:715] (7/8) Epoch 7, batch 19150, loss[loss=0.1384, simple_loss=0.2088, pruned_loss=0.03399, over 4911.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2163, pruned_loss=0.03638, over 971978.18 frames.], batch size: 19, lr: 2.91e-04 +2022-05-05 21:54:47,124 INFO [train.py:715] (7/8) Epoch 7, batch 19200, loss[loss=0.1916, simple_loss=0.2609, pruned_loss=0.06119, over 4899.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2172, pruned_loss=0.03664, over 972676.29 frames.], batch size: 17, lr: 2.91e-04 +2022-05-05 21:55:24,839 INFO [train.py:715] (7/8) Epoch 7, batch 19250, loss[loss=0.1662, simple_loss=0.2224, pruned_loss=0.05498, over 4824.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2167, pruned_loss=0.03687, over 973799.90 frames.], batch size: 30, lr: 2.91e-04 +2022-05-05 21:56:02,878 INFO [train.py:715] (7/8) Epoch 7, batch 19300, loss[loss=0.1496, simple_loss=0.2245, pruned_loss=0.03733, over 4879.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2168, pruned_loss=0.03628, over 973246.97 frames.], batch size: 39, lr: 2.91e-04 +2022-05-05 21:56:41,357 INFO [train.py:715] (7/8) Epoch 7, batch 19350, loss[loss=0.1701, simple_loss=0.2324, pruned_loss=0.05388, over 4689.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2168, pruned_loss=0.03626, over 973288.82 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 21:57:18,829 INFO [train.py:715] (7/8) Epoch 7, batch 19400, loss[loss=0.1307, simple_loss=0.1944, pruned_loss=0.03347, over 4970.00 frames.], tot_loss[loss=0.145, simple_loss=0.2173, pruned_loss=0.03633, over 972298.55 frames.], batch size: 14, lr: 2.91e-04 +2022-05-05 21:57:56,263 INFO [train.py:715] (7/8) Epoch 7, batch 19450, loss[loss=0.1478, simple_loss=0.227, pruned_loss=0.03432, over 4978.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2178, pruned_loss=0.03648, over 972519.55 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 21:58:34,322 INFO [train.py:715] (7/8) Epoch 7, batch 19500, loss[loss=0.1354, simple_loss=0.2108, pruned_loss=0.03001, over 4800.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2187, pruned_loss=0.03751, over 971513.55 frames.], batch size: 25, lr: 2.91e-04 +2022-05-05 21:59:11,841 INFO [train.py:715] (7/8) Epoch 7, batch 19550, loss[loss=0.1242, simple_loss=0.1998, pruned_loss=0.02426, over 4939.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2194, pruned_loss=0.03784, over 972086.28 frames.], batch size: 29, lr: 2.91e-04 +2022-05-05 21:59:49,563 INFO [train.py:715] (7/8) Epoch 7, batch 19600, loss[loss=0.1684, simple_loss=0.222, pruned_loss=0.05734, over 4800.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2181, pruned_loss=0.03739, over 972253.89 frames.], batch size: 12, lr: 2.91e-04 +2022-05-05 22:00:27,126 INFO [train.py:715] (7/8) Epoch 7, batch 19650, loss[loss=0.1511, simple_loss=0.2242, pruned_loss=0.03899, over 4890.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2182, pruned_loss=0.0375, over 972199.18 frames.], batch size: 19, lr: 2.91e-04 +2022-05-05 22:01:05,537 INFO [train.py:715] (7/8) Epoch 7, batch 19700, loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02995, over 4945.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2176, pruned_loss=0.0371, over 972042.31 frames.], batch size: 21, lr: 2.91e-04 +2022-05-05 22:01:42,745 INFO [train.py:715] (7/8) Epoch 7, batch 19750, loss[loss=0.1737, simple_loss=0.2442, pruned_loss=0.05162, over 4912.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2176, pruned_loss=0.03683, over 971598.72 frames.], batch size: 39, lr: 2.91e-04 +2022-05-05 22:02:20,221 INFO [train.py:715] (7/8) Epoch 7, batch 19800, loss[loss=0.1487, simple_loss=0.2302, pruned_loss=0.03356, over 4820.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2171, pruned_loss=0.03671, over 971987.31 frames.], batch size: 26, lr: 2.91e-04 +2022-05-05 22:02:58,033 INFO [train.py:715] (7/8) Epoch 7, batch 19850, loss[loss=0.1499, simple_loss=0.2154, pruned_loss=0.04215, over 4797.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2178, pruned_loss=0.03698, over 972178.87 frames.], batch size: 21, lr: 2.91e-04 +2022-05-05 22:03:35,858 INFO [train.py:715] (7/8) Epoch 7, batch 19900, loss[loss=0.1526, simple_loss=0.2295, pruned_loss=0.03781, over 4793.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2181, pruned_loss=0.03704, over 971957.90 frames.], batch size: 14, lr: 2.91e-04 +2022-05-05 22:04:12,821 INFO [train.py:715] (7/8) Epoch 7, batch 19950, loss[loss=0.1309, simple_loss=0.2051, pruned_loss=0.02832, over 4954.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2171, pruned_loss=0.03686, over 972208.58 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 22:04:50,680 INFO [train.py:715] (7/8) Epoch 7, batch 20000, loss[loss=0.1436, simple_loss=0.2145, pruned_loss=0.03636, over 4928.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2173, pruned_loss=0.03702, over 972180.78 frames.], batch size: 29, lr: 2.91e-04 +2022-05-05 22:05:28,964 INFO [train.py:715] (7/8) Epoch 7, batch 20050, loss[loss=0.1501, simple_loss=0.2162, pruned_loss=0.042, over 4916.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2169, pruned_loss=0.03661, over 971832.74 frames.], batch size: 17, lr: 2.91e-04 +2022-05-05 22:06:06,296 INFO [train.py:715] (7/8) Epoch 7, batch 20100, loss[loss=0.1749, simple_loss=0.2324, pruned_loss=0.0587, over 4779.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2168, pruned_loss=0.03644, over 971441.38 frames.], batch size: 18, lr: 2.91e-04 +2022-05-05 22:06:43,746 INFO [train.py:715] (7/8) Epoch 7, batch 20150, loss[loss=0.1496, simple_loss=0.2131, pruned_loss=0.04306, over 4837.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2168, pruned_loss=0.03649, over 972232.35 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 22:07:21,914 INFO [train.py:715] (7/8) Epoch 7, batch 20200, loss[loss=0.1317, simple_loss=0.2114, pruned_loss=0.02597, over 4821.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2175, pruned_loss=0.03701, over 972198.11 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 22:08:00,054 INFO [train.py:715] (7/8) Epoch 7, batch 20250, loss[loss=0.1451, simple_loss=0.2154, pruned_loss=0.03741, over 4873.00 frames.], tot_loss[loss=0.146, simple_loss=0.2179, pruned_loss=0.03706, over 972132.68 frames.], batch size: 32, lr: 2.91e-04 +2022-05-05 22:08:37,464 INFO [train.py:715] (7/8) Epoch 7, batch 20300, loss[loss=0.1139, simple_loss=0.1819, pruned_loss=0.02293, over 4846.00 frames.], tot_loss[loss=0.146, simple_loss=0.2179, pruned_loss=0.03699, over 971665.10 frames.], batch size: 13, lr: 2.91e-04 +2022-05-05 22:09:17,216 INFO [train.py:715] (7/8) Epoch 7, batch 20350, loss[loss=0.1686, simple_loss=0.2282, pruned_loss=0.05452, over 4987.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2182, pruned_loss=0.03763, over 971500.81 frames.], batch size: 31, lr: 2.91e-04 +2022-05-05 22:09:55,130 INFO [train.py:715] (7/8) Epoch 7, batch 20400, loss[loss=0.1452, simple_loss=0.2177, pruned_loss=0.03633, over 4772.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2177, pruned_loss=0.03735, over 971317.89 frames.], batch size: 18, lr: 2.91e-04 +2022-05-05 22:10:33,045 INFO [train.py:715] (7/8) Epoch 7, batch 20450, loss[loss=0.1339, simple_loss=0.2101, pruned_loss=0.02888, over 4898.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2174, pruned_loss=0.03754, over 971284.97 frames.], batch size: 19, lr: 2.91e-04 +2022-05-05 22:11:10,606 INFO [train.py:715] (7/8) Epoch 7, batch 20500, loss[loss=0.1126, simple_loss=0.18, pruned_loss=0.02265, over 4809.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2178, pruned_loss=0.03743, over 971680.48 frames.], batch size: 26, lr: 2.91e-04 +2022-05-05 22:11:48,691 INFO [train.py:715] (7/8) Epoch 7, batch 20550, loss[loss=0.1327, simple_loss=0.2065, pruned_loss=0.02938, over 4706.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2181, pruned_loss=0.0375, over 971027.24 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 22:12:26,841 INFO [train.py:715] (7/8) Epoch 7, batch 20600, loss[loss=0.1627, simple_loss=0.2295, pruned_loss=0.04796, over 4990.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2184, pruned_loss=0.03753, over 971627.38 frames.], batch size: 33, lr: 2.91e-04 +2022-05-05 22:13:04,070 INFO [train.py:715] (7/8) Epoch 7, batch 20650, loss[loss=0.1174, simple_loss=0.1976, pruned_loss=0.01862, over 4947.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2183, pruned_loss=0.03748, over 971713.24 frames.], batch size: 24, lr: 2.91e-04 +2022-05-05 22:13:41,770 INFO [train.py:715] (7/8) Epoch 7, batch 20700, loss[loss=0.158, simple_loss=0.2353, pruned_loss=0.04041, over 4699.00 frames.], tot_loss[loss=0.147, simple_loss=0.2186, pruned_loss=0.03768, over 971871.07 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 22:14:19,743 INFO [train.py:715] (7/8) Epoch 7, batch 20750, loss[loss=0.1545, simple_loss=0.2231, pruned_loss=0.04292, over 4854.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2183, pruned_loss=0.0375, over 972608.70 frames.], batch size: 30, lr: 2.91e-04 +2022-05-05 22:14:57,388 INFO [train.py:715] (7/8) Epoch 7, batch 20800, loss[loss=0.1498, simple_loss=0.2234, pruned_loss=0.03808, over 4697.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2171, pruned_loss=0.03695, over 972934.70 frames.], batch size: 15, lr: 2.91e-04 +2022-05-05 22:15:34,692 INFO [train.py:715] (7/8) Epoch 7, batch 20850, loss[loss=0.137, simple_loss=0.2094, pruned_loss=0.0323, over 4775.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2166, pruned_loss=0.03683, over 972642.32 frames.], batch size: 17, lr: 2.90e-04 +2022-05-05 22:16:13,018 INFO [train.py:715] (7/8) Epoch 7, batch 20900, loss[loss=0.1272, simple_loss=0.1977, pruned_loss=0.02835, over 4910.00 frames.], tot_loss[loss=0.145, simple_loss=0.2168, pruned_loss=0.03665, over 972806.16 frames.], batch size: 18, lr: 2.90e-04 +2022-05-05 22:16:50,910 INFO [train.py:715] (7/8) Epoch 7, batch 20950, loss[loss=0.1537, simple_loss=0.2157, pruned_loss=0.04588, over 4849.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2165, pruned_loss=0.03663, over 971873.54 frames.], batch size: 30, lr: 2.90e-04 +2022-05-05 22:17:29,166 INFO [train.py:715] (7/8) Epoch 7, batch 21000, loss[loss=0.1235, simple_loss=0.1974, pruned_loss=0.0248, over 4749.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2166, pruned_loss=0.0366, over 971784.57 frames.], batch size: 16, lr: 2.90e-04 +2022-05-05 22:17:29,167 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 22:17:39,072 INFO [train.py:742] (7/8) Epoch 7, validation: loss=0.1082, simple_loss=0.193, pruned_loss=0.01169, over 914524.00 frames. +2022-05-05 22:18:17,065 INFO [train.py:715] (7/8) Epoch 7, batch 21050, loss[loss=0.1297, simple_loss=0.2033, pruned_loss=0.02799, over 4864.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2163, pruned_loss=0.03655, over 971918.33 frames.], batch size: 34, lr: 2.90e-04 +2022-05-05 22:18:54,964 INFO [train.py:715] (7/8) Epoch 7, batch 21100, loss[loss=0.1482, simple_loss=0.2163, pruned_loss=0.04001, over 4906.00 frames.], tot_loss[loss=0.145, simple_loss=0.2166, pruned_loss=0.03666, over 971354.18 frames.], batch size: 18, lr: 2.90e-04 +2022-05-05 22:19:32,990 INFO [train.py:715] (7/8) Epoch 7, batch 21150, loss[loss=0.1295, simple_loss=0.2021, pruned_loss=0.02848, over 4800.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2168, pruned_loss=0.03724, over 971439.16 frames.], batch size: 25, lr: 2.90e-04 +2022-05-05 22:20:10,776 INFO [train.py:715] (7/8) Epoch 7, batch 21200, loss[loss=0.1469, simple_loss=0.2353, pruned_loss=0.02927, over 4968.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2173, pruned_loss=0.03726, over 972155.14 frames.], batch size: 24, lr: 2.90e-04 +2022-05-05 22:20:48,999 INFO [train.py:715] (7/8) Epoch 7, batch 21250, loss[loss=0.1286, simple_loss=0.1984, pruned_loss=0.02942, over 4765.00 frames.], tot_loss[loss=0.146, simple_loss=0.2173, pruned_loss=0.03729, over 973121.89 frames.], batch size: 16, lr: 2.90e-04 +2022-05-05 22:21:27,129 INFO [train.py:715] (7/8) Epoch 7, batch 21300, loss[loss=0.1576, simple_loss=0.224, pruned_loss=0.04559, over 4859.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2173, pruned_loss=0.0376, over 972126.72 frames.], batch size: 32, lr: 2.90e-04 +2022-05-05 22:22:04,501 INFO [train.py:715] (7/8) Epoch 7, batch 21350, loss[loss=0.1708, simple_loss=0.2359, pruned_loss=0.05285, over 4933.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2172, pruned_loss=0.03749, over 971802.80 frames.], batch size: 35, lr: 2.90e-04 +2022-05-05 22:22:42,287 INFO [train.py:715] (7/8) Epoch 7, batch 21400, loss[loss=0.1287, simple_loss=0.2056, pruned_loss=0.02589, over 4935.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2175, pruned_loss=0.03775, over 972657.28 frames.], batch size: 39, lr: 2.90e-04 +2022-05-05 22:23:20,548 INFO [train.py:715] (7/8) Epoch 7, batch 21450, loss[loss=0.1646, simple_loss=0.2466, pruned_loss=0.0413, over 4994.00 frames.], tot_loss[loss=0.1466, simple_loss=0.218, pruned_loss=0.03757, over 973203.13 frames.], batch size: 14, lr: 2.90e-04 +2022-05-05 22:23:58,721 INFO [train.py:715] (7/8) Epoch 7, batch 21500, loss[loss=0.1223, simple_loss=0.2035, pruned_loss=0.02053, over 4954.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2178, pruned_loss=0.03721, over 973242.37 frames.], batch size: 24, lr: 2.90e-04 +2022-05-05 22:24:36,574 INFO [train.py:715] (7/8) Epoch 7, batch 21550, loss[loss=0.1648, simple_loss=0.2337, pruned_loss=0.04799, over 4752.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2179, pruned_loss=0.03737, over 972476.01 frames.], batch size: 16, lr: 2.90e-04 +2022-05-05 22:25:14,830 INFO [train.py:715] (7/8) Epoch 7, batch 21600, loss[loss=0.1545, simple_loss=0.2249, pruned_loss=0.04203, over 4782.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2178, pruned_loss=0.03742, over 972393.41 frames.], batch size: 17, lr: 2.90e-04 +2022-05-05 22:25:53,300 INFO [train.py:715] (7/8) Epoch 7, batch 21650, loss[loss=0.1568, simple_loss=0.2265, pruned_loss=0.04351, over 4790.00 frames.], tot_loss[loss=0.147, simple_loss=0.2182, pruned_loss=0.03786, over 971065.92 frames.], batch size: 17, lr: 2.90e-04 +2022-05-05 22:26:30,667 INFO [train.py:715] (7/8) Epoch 7, batch 21700, loss[loss=0.1591, simple_loss=0.2361, pruned_loss=0.04112, over 4704.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2179, pruned_loss=0.03772, over 971281.92 frames.], batch size: 15, lr: 2.90e-04 +2022-05-05 22:27:08,758 INFO [train.py:715] (7/8) Epoch 7, batch 21750, loss[loss=0.1271, simple_loss=0.1953, pruned_loss=0.02941, over 4767.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2178, pruned_loss=0.0377, over 971730.91 frames.], batch size: 12, lr: 2.90e-04 +2022-05-05 22:27:46,869 INFO [train.py:715] (7/8) Epoch 7, batch 21800, loss[loss=0.1607, simple_loss=0.2224, pruned_loss=0.04954, over 4771.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2171, pruned_loss=0.03723, over 971631.58 frames.], batch size: 14, lr: 2.90e-04 +2022-05-05 22:28:24,961 INFO [train.py:715] (7/8) Epoch 7, batch 21850, loss[loss=0.1325, simple_loss=0.1986, pruned_loss=0.03319, over 4850.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2162, pruned_loss=0.03668, over 971665.64 frames.], batch size: 13, lr: 2.90e-04 +2022-05-05 22:29:02,874 INFO [train.py:715] (7/8) Epoch 7, batch 21900, loss[loss=0.1482, simple_loss=0.2364, pruned_loss=0.02999, over 4804.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2169, pruned_loss=0.03728, over 971797.86 frames.], batch size: 25, lr: 2.90e-04 +2022-05-05 22:29:40,814 INFO [train.py:715] (7/8) Epoch 7, batch 21950, loss[loss=0.1175, simple_loss=0.1991, pruned_loss=0.01798, over 4691.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2165, pruned_loss=0.03696, over 971976.11 frames.], batch size: 15, lr: 2.90e-04 +2022-05-05 22:30:19,542 INFO [train.py:715] (7/8) Epoch 7, batch 22000, loss[loss=0.126, simple_loss=0.1978, pruned_loss=0.02708, over 4907.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2162, pruned_loss=0.03663, over 971950.69 frames.], batch size: 17, lr: 2.90e-04 +2022-05-05 22:30:57,078 INFO [train.py:715] (7/8) Epoch 7, batch 22050, loss[loss=0.1436, simple_loss=0.2174, pruned_loss=0.03489, over 4743.00 frames.], tot_loss[loss=0.1441, simple_loss=0.216, pruned_loss=0.03613, over 972987.48 frames.], batch size: 16, lr: 2.90e-04 +2022-05-05 22:31:35,217 INFO [train.py:715] (7/8) Epoch 7, batch 22100, loss[loss=0.1821, simple_loss=0.2474, pruned_loss=0.05843, over 4819.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2161, pruned_loss=0.03609, over 973037.37 frames.], batch size: 15, lr: 2.90e-04 +2022-05-05 22:32:13,475 INFO [train.py:715] (7/8) Epoch 7, batch 22150, loss[loss=0.1407, simple_loss=0.212, pruned_loss=0.03472, over 4926.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2169, pruned_loss=0.03667, over 972307.79 frames.], batch size: 23, lr: 2.90e-04 +2022-05-05 22:32:51,985 INFO [train.py:715] (7/8) Epoch 7, batch 22200, loss[loss=0.1469, simple_loss=0.2276, pruned_loss=0.03312, over 4774.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2178, pruned_loss=0.03691, over 971799.21 frames.], batch size: 18, lr: 2.90e-04 +2022-05-05 22:33:29,481 INFO [train.py:715] (7/8) Epoch 7, batch 22250, loss[loss=0.1572, simple_loss=0.2431, pruned_loss=0.03563, over 4807.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2185, pruned_loss=0.03703, over 971495.63 frames.], batch size: 21, lr: 2.90e-04 +2022-05-05 22:34:07,237 INFO [train.py:715] (7/8) Epoch 7, batch 22300, loss[loss=0.1318, simple_loss=0.2096, pruned_loss=0.02707, over 4971.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2187, pruned_loss=0.03696, over 972845.42 frames.], batch size: 24, lr: 2.90e-04 +2022-05-05 22:34:45,534 INFO [train.py:715] (7/8) Epoch 7, batch 22350, loss[loss=0.1406, simple_loss=0.2114, pruned_loss=0.03492, over 4919.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2191, pruned_loss=0.03681, over 973238.76 frames.], batch size: 29, lr: 2.90e-04 +2022-05-05 22:35:22,812 INFO [train.py:715] (7/8) Epoch 7, batch 22400, loss[loss=0.1522, simple_loss=0.2299, pruned_loss=0.03729, over 4694.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2187, pruned_loss=0.0365, over 972420.57 frames.], batch size: 15, lr: 2.90e-04 +2022-05-05 22:36:00,505 INFO [train.py:715] (7/8) Epoch 7, batch 22450, loss[loss=0.1876, simple_loss=0.2476, pruned_loss=0.06375, over 4799.00 frames.], tot_loss[loss=0.1467, simple_loss=0.219, pruned_loss=0.03715, over 971698.81 frames.], batch size: 14, lr: 2.90e-04 +2022-05-05 22:36:38,649 INFO [train.py:715] (7/8) Epoch 7, batch 22500, loss[loss=0.1545, simple_loss=0.2258, pruned_loss=0.04165, over 4827.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2183, pruned_loss=0.03673, over 971694.27 frames.], batch size: 13, lr: 2.90e-04 +2022-05-05 22:37:16,690 INFO [train.py:715] (7/8) Epoch 7, batch 22550, loss[loss=0.1335, simple_loss=0.2017, pruned_loss=0.03267, over 4871.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2187, pruned_loss=0.03734, over 972876.90 frames.], batch size: 22, lr: 2.90e-04 +2022-05-05 22:37:54,354 INFO [train.py:715] (7/8) Epoch 7, batch 22600, loss[loss=0.1152, simple_loss=0.1853, pruned_loss=0.02253, over 4801.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2186, pruned_loss=0.03708, over 972211.52 frames.], batch size: 12, lr: 2.90e-04 +2022-05-05 22:38:32,387 INFO [train.py:715] (7/8) Epoch 7, batch 22650, loss[loss=0.1383, simple_loss=0.2078, pruned_loss=0.03433, over 4967.00 frames.], tot_loss[loss=0.147, simple_loss=0.2187, pruned_loss=0.03765, over 972495.03 frames.], batch size: 15, lr: 2.90e-04 +2022-05-05 22:39:10,750 INFO [train.py:715] (7/8) Epoch 7, batch 22700, loss[loss=0.1627, simple_loss=0.234, pruned_loss=0.04566, over 4826.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2187, pruned_loss=0.03792, over 972180.02 frames.], batch size: 27, lr: 2.89e-04 +2022-05-05 22:39:48,095 INFO [train.py:715] (7/8) Epoch 7, batch 22750, loss[loss=0.1865, simple_loss=0.2713, pruned_loss=0.05086, over 4686.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2191, pruned_loss=0.03769, over 972470.54 frames.], batch size: 15, lr: 2.89e-04 +2022-05-05 22:40:25,727 INFO [train.py:715] (7/8) Epoch 7, batch 22800, loss[loss=0.1219, simple_loss=0.1852, pruned_loss=0.02936, over 4901.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2191, pruned_loss=0.03751, over 972775.83 frames.], batch size: 19, lr: 2.89e-04 +2022-05-05 22:41:03,918 INFO [train.py:715] (7/8) Epoch 7, batch 22850, loss[loss=0.2002, simple_loss=0.2603, pruned_loss=0.07007, over 4983.00 frames.], tot_loss[loss=0.147, simple_loss=0.2189, pruned_loss=0.03753, over 973629.30 frames.], batch size: 35, lr: 2.89e-04 +2022-05-05 22:41:41,491 INFO [train.py:715] (7/8) Epoch 7, batch 22900, loss[loss=0.1682, simple_loss=0.2275, pruned_loss=0.05442, over 4783.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2182, pruned_loss=0.03729, over 972582.35 frames.], batch size: 18, lr: 2.89e-04 +2022-05-05 22:42:19,137 INFO [train.py:715] (7/8) Epoch 7, batch 22950, loss[loss=0.1808, simple_loss=0.2507, pruned_loss=0.05549, over 4800.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2178, pruned_loss=0.03735, over 971545.23 frames.], batch size: 24, lr: 2.89e-04 +2022-05-05 22:42:57,045 INFO [train.py:715] (7/8) Epoch 7, batch 23000, loss[loss=0.1162, simple_loss=0.1908, pruned_loss=0.02079, over 4932.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2172, pruned_loss=0.03707, over 971471.08 frames.], batch size: 23, lr: 2.89e-04 +2022-05-05 22:43:35,193 INFO [train.py:715] (7/8) Epoch 7, batch 23050, loss[loss=0.1429, simple_loss=0.2092, pruned_loss=0.03837, over 4867.00 frames.], tot_loss[loss=0.1454, simple_loss=0.217, pruned_loss=0.03685, over 971477.54 frames.], batch size: 32, lr: 2.89e-04 +2022-05-05 22:44:12,639 INFO [train.py:715] (7/8) Epoch 7, batch 23100, loss[loss=0.1218, simple_loss=0.194, pruned_loss=0.0248, over 4809.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2172, pruned_loss=0.03631, over 972042.28 frames.], batch size: 27, lr: 2.89e-04 +2022-05-05 22:44:49,935 INFO [train.py:715] (7/8) Epoch 7, batch 23150, loss[loss=0.1134, simple_loss=0.186, pruned_loss=0.02039, over 4933.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2165, pruned_loss=0.03616, over 972449.29 frames.], batch size: 23, lr: 2.89e-04 +2022-05-05 22:45:28,253 INFO [train.py:715] (7/8) Epoch 7, batch 23200, loss[loss=0.1816, simple_loss=0.2578, pruned_loss=0.05274, over 4789.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2171, pruned_loss=0.0367, over 972291.89 frames.], batch size: 24, lr: 2.89e-04 +2022-05-05 22:46:06,318 INFO [train.py:715] (7/8) Epoch 7, batch 23250, loss[loss=0.1562, simple_loss=0.2274, pruned_loss=0.04249, over 4872.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2174, pruned_loss=0.03671, over 972727.81 frames.], batch size: 38, lr: 2.89e-04 +2022-05-05 22:46:43,800 INFO [train.py:715] (7/8) Epoch 7, batch 23300, loss[loss=0.1242, simple_loss=0.2036, pruned_loss=0.0224, over 4838.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2178, pruned_loss=0.03705, over 973319.50 frames.], batch size: 13, lr: 2.89e-04 +2022-05-05 22:47:22,580 INFO [train.py:715] (7/8) Epoch 7, batch 23350, loss[loss=0.1198, simple_loss=0.1943, pruned_loss=0.02267, over 4947.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2182, pruned_loss=0.03743, over 973005.16 frames.], batch size: 29, lr: 2.89e-04 +2022-05-05 22:48:01,697 INFO [train.py:715] (7/8) Epoch 7, batch 23400, loss[loss=0.1661, simple_loss=0.2308, pruned_loss=0.05073, over 4851.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2181, pruned_loss=0.0374, over 973551.44 frames.], batch size: 32, lr: 2.89e-04 +2022-05-05 22:48:40,124 INFO [train.py:715] (7/8) Epoch 7, batch 23450, loss[loss=0.1223, simple_loss=0.1861, pruned_loss=0.02922, over 4979.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2177, pruned_loss=0.03721, over 971975.00 frames.], batch size: 28, lr: 2.89e-04 +2022-05-05 22:49:18,248 INFO [train.py:715] (7/8) Epoch 7, batch 23500, loss[loss=0.1479, simple_loss=0.2183, pruned_loss=0.03882, over 4907.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2179, pruned_loss=0.03716, over 971565.38 frames.], batch size: 17, lr: 2.89e-04 +2022-05-05 22:49:56,228 INFO [train.py:715] (7/8) Epoch 7, batch 23550, loss[loss=0.1364, simple_loss=0.2131, pruned_loss=0.02982, over 4816.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2182, pruned_loss=0.03754, over 972413.99 frames.], batch size: 25, lr: 2.89e-04 +2022-05-05 22:50:34,438 INFO [train.py:715] (7/8) Epoch 7, batch 23600, loss[loss=0.1643, simple_loss=0.2306, pruned_loss=0.04894, over 4931.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2178, pruned_loss=0.03731, over 973735.20 frames.], batch size: 39, lr: 2.89e-04 +2022-05-05 22:51:11,416 INFO [train.py:715] (7/8) Epoch 7, batch 23650, loss[loss=0.1344, simple_loss=0.2108, pruned_loss=0.02905, over 4845.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2175, pruned_loss=0.03699, over 973140.85 frames.], batch size: 20, lr: 2.89e-04 +2022-05-05 22:51:49,265 INFO [train.py:715] (7/8) Epoch 7, batch 23700, loss[loss=0.1212, simple_loss=0.1991, pruned_loss=0.02165, over 4934.00 frames.], tot_loss[loss=0.1451, simple_loss=0.217, pruned_loss=0.03659, over 972452.85 frames.], batch size: 23, lr: 2.89e-04 +2022-05-05 22:52:27,395 INFO [train.py:715] (7/8) Epoch 7, batch 23750, loss[loss=0.1476, simple_loss=0.2293, pruned_loss=0.03292, over 4804.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2163, pruned_loss=0.03632, over 972226.84 frames.], batch size: 21, lr: 2.89e-04 +2022-05-05 22:53:04,575 INFO [train.py:715] (7/8) Epoch 7, batch 23800, loss[loss=0.1344, simple_loss=0.2045, pruned_loss=0.03217, over 4920.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2163, pruned_loss=0.0367, over 972280.03 frames.], batch size: 18, lr: 2.89e-04 +2022-05-05 22:53:42,350 INFO [train.py:715] (7/8) Epoch 7, batch 23850, loss[loss=0.1356, simple_loss=0.2045, pruned_loss=0.03333, over 4736.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2159, pruned_loss=0.03648, over 971617.10 frames.], batch size: 16, lr: 2.89e-04 +2022-05-05 22:54:21,021 INFO [train.py:715] (7/8) Epoch 7, batch 23900, loss[loss=0.1403, simple_loss=0.2085, pruned_loss=0.03603, over 4785.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2171, pruned_loss=0.03712, over 971772.02 frames.], batch size: 14, lr: 2.89e-04 +2022-05-05 22:54:59,165 INFO [train.py:715] (7/8) Epoch 7, batch 23950, loss[loss=0.1229, simple_loss=0.189, pruned_loss=0.02835, over 4831.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2167, pruned_loss=0.03679, over 972104.49 frames.], batch size: 13, lr: 2.89e-04 +2022-05-05 22:55:36,637 INFO [train.py:715] (7/8) Epoch 7, batch 24000, loss[loss=0.1257, simple_loss=0.2034, pruned_loss=0.024, over 4958.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2173, pruned_loss=0.03658, over 973383.92 frames.], batch size: 24, lr: 2.89e-04 +2022-05-05 22:55:36,637 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 22:55:46,187 INFO [train.py:742] (7/8) Epoch 7, validation: loss=0.108, simple_loss=0.1929, pruned_loss=0.01156, over 914524.00 frames. +2022-05-05 22:56:23,727 INFO [train.py:715] (7/8) Epoch 7, batch 24050, loss[loss=0.1528, simple_loss=0.2194, pruned_loss=0.04308, over 4799.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2171, pruned_loss=0.03621, over 973100.41 frames.], batch size: 21, lr: 2.89e-04 +2022-05-05 22:57:02,032 INFO [train.py:715] (7/8) Epoch 7, batch 24100, loss[loss=0.1413, simple_loss=0.2096, pruned_loss=0.03654, over 4790.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2179, pruned_loss=0.03683, over 972962.83 frames.], batch size: 14, lr: 2.89e-04 +2022-05-05 22:57:40,437 INFO [train.py:715] (7/8) Epoch 7, batch 24150, loss[loss=0.1361, simple_loss=0.1972, pruned_loss=0.03746, over 4766.00 frames.], tot_loss[loss=0.1458, simple_loss=0.218, pruned_loss=0.03681, over 972622.63 frames.], batch size: 12, lr: 2.89e-04 +2022-05-05 22:58:18,172 INFO [train.py:715] (7/8) Epoch 7, batch 24200, loss[loss=0.1268, simple_loss=0.2011, pruned_loss=0.02622, over 4839.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03652, over 972474.05 frames.], batch size: 13, lr: 2.89e-04 +2022-05-05 22:58:55,939 INFO [train.py:715] (7/8) Epoch 7, batch 24250, loss[loss=0.156, simple_loss=0.2329, pruned_loss=0.03956, over 4957.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2177, pruned_loss=0.03685, over 973435.43 frames.], batch size: 24, lr: 2.89e-04 +2022-05-05 22:59:34,585 INFO [train.py:715] (7/8) Epoch 7, batch 24300, loss[loss=0.167, simple_loss=0.2311, pruned_loss=0.05145, over 4987.00 frames.], tot_loss[loss=0.1455, simple_loss=0.218, pruned_loss=0.03652, over 973210.84 frames.], batch size: 20, lr: 2.89e-04 +2022-05-05 23:00:12,427 INFO [train.py:715] (7/8) Epoch 7, batch 24350, loss[loss=0.1463, simple_loss=0.2134, pruned_loss=0.03965, over 4948.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2178, pruned_loss=0.03647, over 973266.71 frames.], batch size: 14, lr: 2.89e-04 +2022-05-05 23:00:50,087 INFO [train.py:715] (7/8) Epoch 7, batch 24400, loss[loss=0.1784, simple_loss=0.242, pruned_loss=0.0574, over 4870.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2172, pruned_loss=0.03674, over 972704.21 frames.], batch size: 16, lr: 2.89e-04 +2022-05-05 23:01:28,243 INFO [train.py:715] (7/8) Epoch 7, batch 24450, loss[loss=0.1455, simple_loss=0.2262, pruned_loss=0.03238, over 4777.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2166, pruned_loss=0.03643, over 971920.48 frames.], batch size: 18, lr: 2.89e-04 +2022-05-05 23:02:06,217 INFO [train.py:715] (7/8) Epoch 7, batch 24500, loss[loss=0.1401, simple_loss=0.2138, pruned_loss=0.0332, over 4833.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2177, pruned_loss=0.03747, over 972603.62 frames.], batch size: 15, lr: 2.89e-04 +2022-05-05 23:02:43,833 INFO [train.py:715] (7/8) Epoch 7, batch 24550, loss[loss=0.147, simple_loss=0.222, pruned_loss=0.03605, over 4971.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03731, over 973225.36 frames.], batch size: 35, lr: 2.88e-04 +2022-05-05 23:03:22,004 INFO [train.py:715] (7/8) Epoch 7, batch 24600, loss[loss=0.1369, simple_loss=0.2137, pruned_loss=0.0301, over 4793.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2179, pruned_loss=0.03678, over 973725.15 frames.], batch size: 12, lr: 2.88e-04 +2022-05-05 23:04:01,122 INFO [train.py:715] (7/8) Epoch 7, batch 24650, loss[loss=0.1276, simple_loss=0.2077, pruned_loss=0.02372, over 4899.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2178, pruned_loss=0.03675, over 972466.74 frames.], batch size: 19, lr: 2.88e-04 +2022-05-05 23:04:39,572 INFO [train.py:715] (7/8) Epoch 7, batch 24700, loss[loss=0.1468, simple_loss=0.2233, pruned_loss=0.03513, over 4694.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2191, pruned_loss=0.03735, over 971767.04 frames.], batch size: 15, lr: 2.88e-04 +2022-05-05 23:05:17,696 INFO [train.py:715] (7/8) Epoch 7, batch 24750, loss[loss=0.1946, simple_loss=0.2559, pruned_loss=0.06662, over 4900.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2186, pruned_loss=0.03728, over 972632.75 frames.], batch size: 17, lr: 2.88e-04 +2022-05-05 23:05:56,160 INFO [train.py:715] (7/8) Epoch 7, batch 24800, loss[loss=0.1486, simple_loss=0.2285, pruned_loss=0.03437, over 4887.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03723, over 973103.89 frames.], batch size: 19, lr: 2.88e-04 +2022-05-05 23:06:35,234 INFO [train.py:715] (7/8) Epoch 7, batch 24850, loss[loss=0.1775, simple_loss=0.2433, pruned_loss=0.05581, over 4806.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2189, pruned_loss=0.03768, over 972359.35 frames.], batch size: 25, lr: 2.88e-04 +2022-05-05 23:07:13,829 INFO [train.py:715] (7/8) Epoch 7, batch 24900, loss[loss=0.1624, simple_loss=0.225, pruned_loss=0.04986, over 4958.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2183, pruned_loss=0.03763, over 972572.99 frames.], batch size: 15, lr: 2.88e-04 +2022-05-05 23:07:53,088 INFO [train.py:715] (7/8) Epoch 7, batch 24950, loss[loss=0.1419, simple_loss=0.2133, pruned_loss=0.03523, over 4786.00 frames.], tot_loss[loss=0.1465, simple_loss=0.218, pruned_loss=0.03749, over 972626.71 frames.], batch size: 14, lr: 2.88e-04 +2022-05-05 23:08:32,940 INFO [train.py:715] (7/8) Epoch 7, batch 25000, loss[loss=0.1244, simple_loss=0.1965, pruned_loss=0.02611, over 4791.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2179, pruned_loss=0.03713, over 972817.95 frames.], batch size: 13, lr: 2.88e-04 +2022-05-05 23:09:12,211 INFO [train.py:715] (7/8) Epoch 7, batch 25050, loss[loss=0.1744, simple_loss=0.2459, pruned_loss=0.05144, over 4893.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2183, pruned_loss=0.0371, over 972215.70 frames.], batch size: 19, lr: 2.88e-04 +2022-05-05 23:09:51,231 INFO [train.py:715] (7/8) Epoch 7, batch 25100, loss[loss=0.1357, simple_loss=0.2269, pruned_loss=0.02223, over 4826.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2169, pruned_loss=0.0364, over 971038.47 frames.], batch size: 26, lr: 2.88e-04 +2022-05-05 23:10:31,401 INFO [train.py:715] (7/8) Epoch 7, batch 25150, loss[loss=0.1652, simple_loss=0.2144, pruned_loss=0.05794, over 4962.00 frames.], tot_loss[loss=0.144, simple_loss=0.2159, pruned_loss=0.03603, over 971725.50 frames.], batch size: 14, lr: 2.88e-04 +2022-05-05 23:11:11,713 INFO [train.py:715] (7/8) Epoch 7, batch 25200, loss[loss=0.1465, simple_loss=0.2229, pruned_loss=0.035, over 4877.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2167, pruned_loss=0.03655, over 971358.50 frames.], batch size: 16, lr: 2.88e-04 +2022-05-05 23:11:51,361 INFO [train.py:715] (7/8) Epoch 7, batch 25250, loss[loss=0.141, simple_loss=0.2101, pruned_loss=0.03596, over 4828.00 frames.], tot_loss[loss=0.145, simple_loss=0.2165, pruned_loss=0.03674, over 971997.03 frames.], batch size: 13, lr: 2.88e-04 +2022-05-05 23:12:31,935 INFO [train.py:715] (7/8) Epoch 7, batch 25300, loss[loss=0.1489, simple_loss=0.2184, pruned_loss=0.03974, over 4972.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2164, pruned_loss=0.03717, over 972737.94 frames.], batch size: 31, lr: 2.88e-04 +2022-05-05 23:13:13,666 INFO [train.py:715] (7/8) Epoch 7, batch 25350, loss[loss=0.1424, simple_loss=0.2002, pruned_loss=0.04231, over 4790.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2178, pruned_loss=0.03799, over 971879.55 frames.], batch size: 13, lr: 2.88e-04 +2022-05-05 23:13:55,234 INFO [train.py:715] (7/8) Epoch 7, batch 25400, loss[loss=0.1456, simple_loss=0.214, pruned_loss=0.03857, over 4869.00 frames.], tot_loss[loss=0.1482, simple_loss=0.2189, pruned_loss=0.03877, over 971848.40 frames.], batch size: 20, lr: 2.88e-04 +2022-05-05 23:14:36,169 INFO [train.py:715] (7/8) Epoch 7, batch 25450, loss[loss=0.1406, simple_loss=0.2101, pruned_loss=0.03553, over 4958.00 frames.], tot_loss[loss=0.1476, simple_loss=0.2188, pruned_loss=0.03824, over 972369.86 frames.], batch size: 15, lr: 2.88e-04 +2022-05-05 23:15:18,364 INFO [train.py:715] (7/8) Epoch 7, batch 25500, loss[loss=0.1241, simple_loss=0.2057, pruned_loss=0.02123, over 4936.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2189, pruned_loss=0.03828, over 972573.16 frames.], batch size: 29, lr: 2.88e-04 +2022-05-05 23:16:00,250 INFO [train.py:715] (7/8) Epoch 7, batch 25550, loss[loss=0.1488, simple_loss=0.2293, pruned_loss=0.03413, over 4884.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2181, pruned_loss=0.03787, over 971464.16 frames.], batch size: 22, lr: 2.88e-04 +2022-05-05 23:16:41,013 INFO [train.py:715] (7/8) Epoch 7, batch 25600, loss[loss=0.1392, simple_loss=0.2128, pruned_loss=0.0328, over 4987.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2182, pruned_loss=0.03759, over 971865.73 frames.], batch size: 25, lr: 2.88e-04 +2022-05-05 23:17:22,271 INFO [train.py:715] (7/8) Epoch 7, batch 25650, loss[loss=0.1269, simple_loss=0.1998, pruned_loss=0.027, over 4760.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2184, pruned_loss=0.03776, over 972264.52 frames.], batch size: 19, lr: 2.88e-04 +2022-05-05 23:18:03,672 INFO [train.py:715] (7/8) Epoch 7, batch 25700, loss[loss=0.141, simple_loss=0.216, pruned_loss=0.033, over 4752.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2179, pruned_loss=0.03717, over 971923.16 frames.], batch size: 19, lr: 2.88e-04 +2022-05-05 23:18:45,503 INFO [train.py:715] (7/8) Epoch 7, batch 25750, loss[loss=0.1656, simple_loss=0.2437, pruned_loss=0.04375, over 4812.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.03604, over 972156.74 frames.], batch size: 25, lr: 2.88e-04 +2022-05-05 23:19:26,138 INFO [train.py:715] (7/8) Epoch 7, batch 25800, loss[loss=0.1308, simple_loss=0.201, pruned_loss=0.03026, over 4875.00 frames.], tot_loss[loss=0.145, simple_loss=0.2168, pruned_loss=0.03657, over 973128.13 frames.], batch size: 16, lr: 2.88e-04 +2022-05-05 23:20:08,461 INFO [train.py:715] (7/8) Epoch 7, batch 25850, loss[loss=0.1456, simple_loss=0.2128, pruned_loss=0.03918, over 4800.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2169, pruned_loss=0.03683, over 973730.30 frames.], batch size: 17, lr: 2.88e-04 +2022-05-05 23:20:50,389 INFO [train.py:715] (7/8) Epoch 7, batch 25900, loss[loss=0.1347, simple_loss=0.2078, pruned_loss=0.03084, over 4857.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2166, pruned_loss=0.03686, over 973572.87 frames.], batch size: 20, lr: 2.88e-04 +2022-05-05 23:21:31,304 INFO [train.py:715] (7/8) Epoch 7, batch 25950, loss[loss=0.1739, simple_loss=0.2414, pruned_loss=0.0532, over 4985.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2164, pruned_loss=0.03662, over 973649.94 frames.], batch size: 25, lr: 2.88e-04 +2022-05-05 23:22:12,745 INFO [train.py:715] (7/8) Epoch 7, batch 26000, loss[loss=0.1216, simple_loss=0.182, pruned_loss=0.0306, over 4740.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2167, pruned_loss=0.03693, over 973523.71 frames.], batch size: 16, lr: 2.88e-04 +2022-05-05 23:22:54,187 INFO [train.py:715] (7/8) Epoch 7, batch 26050, loss[loss=0.1613, simple_loss=0.2456, pruned_loss=0.03848, over 4817.00 frames.], tot_loss[loss=0.145, simple_loss=0.2167, pruned_loss=0.03663, over 973583.73 frames.], batch size: 21, lr: 2.88e-04 +2022-05-05 23:23:36,131 INFO [train.py:715] (7/8) Epoch 7, batch 26100, loss[loss=0.1227, simple_loss=0.1852, pruned_loss=0.03011, over 4982.00 frames.], tot_loss[loss=0.1451, simple_loss=0.217, pruned_loss=0.03663, over 974047.16 frames.], batch size: 14, lr: 2.88e-04 +2022-05-05 23:24:16,479 INFO [train.py:715] (7/8) Epoch 7, batch 26150, loss[loss=0.1243, simple_loss=0.1957, pruned_loss=0.02648, over 4955.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2162, pruned_loss=0.03608, over 973210.23 frames.], batch size: 24, lr: 2.88e-04 +2022-05-05 23:24:57,989 INFO [train.py:715] (7/8) Epoch 7, batch 26200, loss[loss=0.1441, simple_loss=0.2239, pruned_loss=0.03216, over 4992.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2163, pruned_loss=0.03615, over 973246.72 frames.], batch size: 20, lr: 2.88e-04 +2022-05-05 23:25:39,234 INFO [train.py:715] (7/8) Epoch 7, batch 26250, loss[loss=0.1962, simple_loss=0.2555, pruned_loss=0.06848, over 4902.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.03602, over 973037.39 frames.], batch size: 18, lr: 2.88e-04 +2022-05-05 23:26:19,598 INFO [train.py:715] (7/8) Epoch 7, batch 26300, loss[loss=0.1161, simple_loss=0.1875, pruned_loss=0.02235, over 4829.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2152, pruned_loss=0.03562, over 972509.54 frames.], batch size: 13, lr: 2.88e-04 +2022-05-05 23:26:59,776 INFO [train.py:715] (7/8) Epoch 7, batch 26350, loss[loss=0.1617, simple_loss=0.2166, pruned_loss=0.05338, over 4685.00 frames.], tot_loss[loss=0.144, simple_loss=0.2159, pruned_loss=0.03608, over 972817.73 frames.], batch size: 15, lr: 2.88e-04 +2022-05-05 23:27:40,226 INFO [train.py:715] (7/8) Epoch 7, batch 26400, loss[loss=0.1648, simple_loss=0.2273, pruned_loss=0.05117, over 4897.00 frames.], tot_loss[loss=0.145, simple_loss=0.2171, pruned_loss=0.0365, over 972224.78 frames.], batch size: 16, lr: 2.87e-04 +2022-05-05 23:28:20,884 INFO [train.py:715] (7/8) Epoch 7, batch 26450, loss[loss=0.1302, simple_loss=0.2013, pruned_loss=0.02959, over 4805.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2173, pruned_loss=0.03692, over 971997.94 frames.], batch size: 24, lr: 2.87e-04 +2022-05-05 23:29:00,624 INFO [train.py:715] (7/8) Epoch 7, batch 26500, loss[loss=0.1476, simple_loss=0.2142, pruned_loss=0.04051, over 4964.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2162, pruned_loss=0.03658, over 972437.88 frames.], batch size: 28, lr: 2.87e-04 +2022-05-05 23:29:40,313 INFO [train.py:715] (7/8) Epoch 7, batch 26550, loss[loss=0.1306, simple_loss=0.2155, pruned_loss=0.02289, over 4881.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2158, pruned_loss=0.03623, over 972063.24 frames.], batch size: 16, lr: 2.87e-04 +2022-05-05 23:30:20,785 INFO [train.py:715] (7/8) Epoch 7, batch 26600, loss[loss=0.1405, simple_loss=0.2154, pruned_loss=0.03278, over 4696.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2157, pruned_loss=0.03574, over 970967.24 frames.], batch size: 15, lr: 2.87e-04 +2022-05-05 23:31:00,459 INFO [train.py:715] (7/8) Epoch 7, batch 26650, loss[loss=0.1565, simple_loss=0.2287, pruned_loss=0.04217, over 4908.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2159, pruned_loss=0.03598, over 971147.42 frames.], batch size: 18, lr: 2.87e-04 +2022-05-05 23:31:40,551 INFO [train.py:715] (7/8) Epoch 7, batch 26700, loss[loss=0.1256, simple_loss=0.2033, pruned_loss=0.02397, over 4818.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2164, pruned_loss=0.03592, over 970265.29 frames.], batch size: 26, lr: 2.87e-04 +2022-05-05 23:32:21,229 INFO [train.py:715] (7/8) Epoch 7, batch 26750, loss[loss=0.1295, simple_loss=0.2082, pruned_loss=0.02542, over 4928.00 frames.], tot_loss[loss=0.145, simple_loss=0.2175, pruned_loss=0.03623, over 970840.69 frames.], batch size: 23, lr: 2.87e-04 +2022-05-05 23:33:01,189 INFO [train.py:715] (7/8) Epoch 7, batch 26800, loss[loss=0.147, simple_loss=0.2156, pruned_loss=0.03923, over 4978.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2179, pruned_loss=0.03641, over 970842.80 frames.], batch size: 35, lr: 2.87e-04 +2022-05-05 23:33:40,947 INFO [train.py:715] (7/8) Epoch 7, batch 26850, loss[loss=0.1173, simple_loss=0.1923, pruned_loss=0.02114, over 4862.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2172, pruned_loss=0.03604, over 971622.94 frames.], batch size: 20, lr: 2.87e-04 +2022-05-05 23:34:21,587 INFO [train.py:715] (7/8) Epoch 7, batch 26900, loss[loss=0.1782, simple_loss=0.2354, pruned_loss=0.06044, over 4911.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2167, pruned_loss=0.03607, over 972504.64 frames.], batch size: 17, lr: 2.87e-04 +2022-05-05 23:35:02,613 INFO [train.py:715] (7/8) Epoch 7, batch 26950, loss[loss=0.1481, simple_loss=0.2264, pruned_loss=0.03489, over 4797.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2162, pruned_loss=0.03562, over 973494.91 frames.], batch size: 24, lr: 2.87e-04 +2022-05-05 23:35:42,949 INFO [train.py:715] (7/8) Epoch 7, batch 27000, loss[loss=0.1638, simple_loss=0.2262, pruned_loss=0.05072, over 4909.00 frames.], tot_loss[loss=0.1444, simple_loss=0.217, pruned_loss=0.03594, over 973925.78 frames.], batch size: 17, lr: 2.87e-04 +2022-05-05 23:35:42,950 INFO [train.py:733] (7/8) Computing validation loss +2022-05-05 23:35:52,668 INFO [train.py:742] (7/8) Epoch 7, validation: loss=0.108, simple_loss=0.1928, pruned_loss=0.01156, over 914524.00 frames. +2022-05-05 23:36:33,206 INFO [train.py:715] (7/8) Epoch 7, batch 27050, loss[loss=0.1474, simple_loss=0.2208, pruned_loss=0.03705, over 4972.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2162, pruned_loss=0.03575, over 973664.64 frames.], batch size: 15, lr: 2.87e-04 +2022-05-05 23:37:14,381 INFO [train.py:715] (7/8) Epoch 7, batch 27100, loss[loss=0.1528, simple_loss=0.2314, pruned_loss=0.03709, over 4859.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2161, pruned_loss=0.03567, over 973371.17 frames.], batch size: 30, lr: 2.87e-04 +2022-05-05 23:37:56,248 INFO [train.py:715] (7/8) Epoch 7, batch 27150, loss[loss=0.1484, simple_loss=0.2122, pruned_loss=0.04229, over 4974.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2162, pruned_loss=0.03599, over 974283.94 frames.], batch size: 14, lr: 2.87e-04 +2022-05-05 23:38:37,503 INFO [train.py:715] (7/8) Epoch 7, batch 27200, loss[loss=0.1289, simple_loss=0.1975, pruned_loss=0.03015, over 4776.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2161, pruned_loss=0.03613, over 973809.86 frames.], batch size: 12, lr: 2.87e-04 +2022-05-05 23:39:18,967 INFO [train.py:715] (7/8) Epoch 7, batch 27250, loss[loss=0.1544, simple_loss=0.2289, pruned_loss=0.03994, over 4761.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2172, pruned_loss=0.03651, over 974159.86 frames.], batch size: 19, lr: 2.87e-04 +2022-05-05 23:40:00,803 INFO [train.py:715] (7/8) Epoch 7, batch 27300, loss[loss=0.1516, simple_loss=0.2298, pruned_loss=0.03667, over 4798.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2174, pruned_loss=0.03669, over 973062.47 frames.], batch size: 21, lr: 2.87e-04 +2022-05-05 23:40:41,770 INFO [train.py:715] (7/8) Epoch 7, batch 27350, loss[loss=0.1827, simple_loss=0.2322, pruned_loss=0.06666, over 4958.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2179, pruned_loss=0.03681, over 972628.38 frames.], batch size: 14, lr: 2.87e-04 +2022-05-05 23:41:23,053 INFO [train.py:715] (7/8) Epoch 7, batch 27400, loss[loss=0.1477, simple_loss=0.225, pruned_loss=0.03523, over 4794.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2173, pruned_loss=0.0366, over 972723.84 frames.], batch size: 21, lr: 2.87e-04 +2022-05-05 23:42:04,079 INFO [train.py:715] (7/8) Epoch 7, batch 27450, loss[loss=0.1274, simple_loss=0.2029, pruned_loss=0.0259, over 4850.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2175, pruned_loss=0.0366, over 972203.18 frames.], batch size: 15, lr: 2.87e-04 +2022-05-05 23:42:45,294 INFO [train.py:715] (7/8) Epoch 7, batch 27500, loss[loss=0.1219, simple_loss=0.1943, pruned_loss=0.02471, over 4750.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2169, pruned_loss=0.03639, over 973160.05 frames.], batch size: 19, lr: 2.87e-04 +2022-05-05 23:43:25,860 INFO [train.py:715] (7/8) Epoch 7, batch 27550, loss[loss=0.1403, simple_loss=0.2117, pruned_loss=0.03442, over 4892.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2179, pruned_loss=0.03711, over 972476.41 frames.], batch size: 19, lr: 2.87e-04 +2022-05-05 23:44:06,390 INFO [train.py:715] (7/8) Epoch 7, batch 27600, loss[loss=0.1359, simple_loss=0.2155, pruned_loss=0.0282, over 4827.00 frames.], tot_loss[loss=0.1459, simple_loss=0.218, pruned_loss=0.03695, over 972260.77 frames.], batch size: 27, lr: 2.87e-04 +2022-05-05 23:44:47,781 INFO [train.py:715] (7/8) Epoch 7, batch 27650, loss[loss=0.1219, simple_loss=0.2085, pruned_loss=0.01769, over 4804.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2184, pruned_loss=0.03714, over 971975.67 frames.], batch size: 24, lr: 2.87e-04 +2022-05-05 23:45:28,504 INFO [train.py:715] (7/8) Epoch 7, batch 27700, loss[loss=0.1672, simple_loss=0.2323, pruned_loss=0.05103, over 4935.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2179, pruned_loss=0.03679, over 972630.63 frames.], batch size: 18, lr: 2.87e-04 +2022-05-05 23:46:09,243 INFO [train.py:715] (7/8) Epoch 7, batch 27750, loss[loss=0.1224, simple_loss=0.186, pruned_loss=0.02941, over 4970.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2182, pruned_loss=0.03671, over 972330.41 frames.], batch size: 15, lr: 2.87e-04 +2022-05-05 23:46:50,120 INFO [train.py:715] (7/8) Epoch 7, batch 27800, loss[loss=0.1611, simple_loss=0.2303, pruned_loss=0.04596, over 4873.00 frames.], tot_loss[loss=0.1471, simple_loss=0.2191, pruned_loss=0.0375, over 972339.75 frames.], batch size: 38, lr: 2.87e-04 +2022-05-05 23:47:31,340 INFO [train.py:715] (7/8) Epoch 7, batch 27850, loss[loss=0.1307, simple_loss=0.2048, pruned_loss=0.02834, over 4766.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2185, pruned_loss=0.03705, over 972845.22 frames.], batch size: 12, lr: 2.87e-04 +2022-05-05 23:48:11,400 INFO [train.py:715] (7/8) Epoch 7, batch 27900, loss[loss=0.1488, simple_loss=0.2212, pruned_loss=0.03823, over 4829.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2189, pruned_loss=0.03735, over 973891.76 frames.], batch size: 15, lr: 2.87e-04 +2022-05-05 23:48:52,354 INFO [train.py:715] (7/8) Epoch 7, batch 27950, loss[loss=0.1655, simple_loss=0.2315, pruned_loss=0.04978, over 4970.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2187, pruned_loss=0.03722, over 972560.14 frames.], batch size: 35, lr: 2.87e-04 +2022-05-05 23:49:33,552 INFO [train.py:715] (7/8) Epoch 7, batch 28000, loss[loss=0.1623, simple_loss=0.2399, pruned_loss=0.04232, over 4697.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2178, pruned_loss=0.03692, over 972166.66 frames.], batch size: 15, lr: 2.87e-04 +2022-05-05 23:50:14,241 INFO [train.py:715] (7/8) Epoch 7, batch 28050, loss[loss=0.1633, simple_loss=0.2345, pruned_loss=0.04603, over 4752.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2173, pruned_loss=0.03648, over 971950.92 frames.], batch size: 16, lr: 2.87e-04 +2022-05-05 23:50:54,404 INFO [train.py:715] (7/8) Epoch 7, batch 28100, loss[loss=0.1492, simple_loss=0.2174, pruned_loss=0.04047, over 4896.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2182, pruned_loss=0.03731, over 972281.81 frames.], batch size: 39, lr: 2.87e-04 +2022-05-05 23:51:35,210 INFO [train.py:715] (7/8) Epoch 7, batch 28150, loss[loss=0.1396, simple_loss=0.2118, pruned_loss=0.03372, over 4795.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2186, pruned_loss=0.03735, over 972301.29 frames.], batch size: 18, lr: 2.87e-04 +2022-05-05 23:52:16,640 INFO [train.py:715] (7/8) Epoch 7, batch 28200, loss[loss=0.1584, simple_loss=0.22, pruned_loss=0.04845, over 4778.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03658, over 972644.27 frames.], batch size: 17, lr: 2.87e-04 +2022-05-05 23:52:56,844 INFO [train.py:715] (7/8) Epoch 7, batch 28250, loss[loss=0.1546, simple_loss=0.2114, pruned_loss=0.04894, over 4794.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.03619, over 971514.19 frames.], batch size: 24, lr: 2.87e-04 +2022-05-05 23:53:38,368 INFO [train.py:715] (7/8) Epoch 7, batch 28300, loss[loss=0.1959, simple_loss=0.2548, pruned_loss=0.06848, over 4834.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2174, pruned_loss=0.03646, over 971005.29 frames.], batch size: 30, lr: 2.86e-04 +2022-05-05 23:54:21,477 INFO [train.py:715] (7/8) Epoch 7, batch 28350, loss[loss=0.1328, simple_loss=0.2087, pruned_loss=0.02845, over 4803.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2168, pruned_loss=0.03631, over 971054.95 frames.], batch size: 21, lr: 2.86e-04 +2022-05-05 23:55:01,292 INFO [train.py:715] (7/8) Epoch 7, batch 28400, loss[loss=0.1468, simple_loss=0.2238, pruned_loss=0.03488, over 4789.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2165, pruned_loss=0.03606, over 971108.41 frames.], batch size: 18, lr: 2.86e-04 +2022-05-05 23:55:40,825 INFO [train.py:715] (7/8) Epoch 7, batch 28450, loss[loss=0.1515, simple_loss=0.2097, pruned_loss=0.04667, over 4883.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2169, pruned_loss=0.0361, over 971525.34 frames.], batch size: 20, lr: 2.86e-04 +2022-05-05 23:56:20,930 INFO [train.py:715] (7/8) Epoch 7, batch 28500, loss[loss=0.155, simple_loss=0.2181, pruned_loss=0.04591, over 4912.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2164, pruned_loss=0.0362, over 972183.19 frames.], batch size: 17, lr: 2.86e-04 +2022-05-05 23:57:01,424 INFO [train.py:715] (7/8) Epoch 7, batch 28550, loss[loss=0.1317, simple_loss=0.2053, pruned_loss=0.02906, over 4828.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2164, pruned_loss=0.03633, over 972213.76 frames.], batch size: 27, lr: 2.86e-04 +2022-05-05 23:57:41,416 INFO [train.py:715] (7/8) Epoch 7, batch 28600, loss[loss=0.1151, simple_loss=0.1881, pruned_loss=0.02101, over 4781.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2171, pruned_loss=0.03638, over 972066.83 frames.], batch size: 18, lr: 2.86e-04 +2022-05-05 23:58:21,634 INFO [train.py:715] (7/8) Epoch 7, batch 28650, loss[loss=0.1395, simple_loss=0.216, pruned_loss=0.03152, over 4853.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.0369, over 972550.99 frames.], batch size: 20, lr: 2.86e-04 +2022-05-05 23:59:03,073 INFO [train.py:715] (7/8) Epoch 7, batch 28700, loss[loss=0.1457, simple_loss=0.2279, pruned_loss=0.03179, over 4956.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2175, pruned_loss=0.03677, over 972491.22 frames.], batch size: 35, lr: 2.86e-04 +2022-05-05 23:59:43,956 INFO [train.py:715] (7/8) Epoch 7, batch 28750, loss[loss=0.1375, simple_loss=0.2152, pruned_loss=0.02989, over 4855.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2177, pruned_loss=0.03705, over 972613.52 frames.], batch size: 16, lr: 2.86e-04 +2022-05-06 00:00:24,192 INFO [train.py:715] (7/8) Epoch 7, batch 28800, loss[loss=0.143, simple_loss=0.2055, pruned_loss=0.04022, over 4987.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2173, pruned_loss=0.03676, over 971767.19 frames.], batch size: 25, lr: 2.86e-04 +2022-05-06 00:01:04,805 INFO [train.py:715] (7/8) Epoch 7, batch 28850, loss[loss=0.1211, simple_loss=0.1934, pruned_loss=0.02435, over 4779.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2169, pruned_loss=0.03664, over 971204.20 frames.], batch size: 18, lr: 2.86e-04 +2022-05-06 00:01:45,171 INFO [train.py:715] (7/8) Epoch 7, batch 28900, loss[loss=0.183, simple_loss=0.2408, pruned_loss=0.06263, over 4982.00 frames.], tot_loss[loss=0.145, simple_loss=0.217, pruned_loss=0.03643, over 972218.56 frames.], batch size: 15, lr: 2.86e-04 +2022-05-06 00:02:24,696 INFO [train.py:715] (7/8) Epoch 7, batch 28950, loss[loss=0.1515, simple_loss=0.2189, pruned_loss=0.04205, over 4989.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2162, pruned_loss=0.03581, over 972369.57 frames.], batch size: 25, lr: 2.86e-04 +2022-05-06 00:03:04,247 INFO [train.py:715] (7/8) Epoch 7, batch 29000, loss[loss=0.133, simple_loss=0.2186, pruned_loss=0.02372, over 4839.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03527, over 971844.05 frames.], batch size: 26, lr: 2.86e-04 +2022-05-06 00:03:44,907 INFO [train.py:715] (7/8) Epoch 7, batch 29050, loss[loss=0.1673, simple_loss=0.2324, pruned_loss=0.05107, over 4754.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2167, pruned_loss=0.03549, over 972067.34 frames.], batch size: 14, lr: 2.86e-04 +2022-05-06 00:04:24,477 INFO [train.py:715] (7/8) Epoch 7, batch 29100, loss[loss=0.1613, simple_loss=0.236, pruned_loss=0.04329, over 4870.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2184, pruned_loss=0.03602, over 971505.78 frames.], batch size: 30, lr: 2.86e-04 +2022-05-06 00:05:04,251 INFO [train.py:715] (7/8) Epoch 7, batch 29150, loss[loss=0.1283, simple_loss=0.1953, pruned_loss=0.0306, over 4861.00 frames.], tot_loss[loss=0.145, simple_loss=0.2177, pruned_loss=0.0361, over 971977.97 frames.], batch size: 13, lr: 2.86e-04 +2022-05-06 00:05:44,142 INFO [train.py:715] (7/8) Epoch 7, batch 29200, loss[loss=0.1522, simple_loss=0.2219, pruned_loss=0.04119, over 4912.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2168, pruned_loss=0.03572, over 972400.21 frames.], batch size: 17, lr: 2.86e-04 +2022-05-06 00:06:24,420 INFO [train.py:715] (7/8) Epoch 7, batch 29250, loss[loss=0.1497, simple_loss=0.2195, pruned_loss=0.03997, over 4837.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2164, pruned_loss=0.03625, over 971752.38 frames.], batch size: 15, lr: 2.86e-04 +2022-05-06 00:07:04,328 INFO [train.py:715] (7/8) Epoch 7, batch 29300, loss[loss=0.1271, simple_loss=0.2048, pruned_loss=0.02475, over 4961.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2159, pruned_loss=0.03626, over 971423.78 frames.], batch size: 24, lr: 2.86e-04 +2022-05-06 00:07:44,014 INFO [train.py:715] (7/8) Epoch 7, batch 29350, loss[loss=0.1373, simple_loss=0.2092, pruned_loss=0.03274, over 4965.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2161, pruned_loss=0.03664, over 972218.19 frames.], batch size: 28, lr: 2.86e-04 +2022-05-06 00:08:24,290 INFO [train.py:715] (7/8) Epoch 7, batch 29400, loss[loss=0.1643, simple_loss=0.2209, pruned_loss=0.0538, over 4986.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2169, pruned_loss=0.0369, over 972487.44 frames.], batch size: 35, lr: 2.86e-04 +2022-05-06 00:09:03,562 INFO [train.py:715] (7/8) Epoch 7, batch 29450, loss[loss=0.1636, simple_loss=0.2406, pruned_loss=0.04332, over 4772.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2162, pruned_loss=0.03663, over 972049.29 frames.], batch size: 14, lr: 2.86e-04 +2022-05-06 00:09:43,849 INFO [train.py:715] (7/8) Epoch 7, batch 29500, loss[loss=0.1529, simple_loss=0.226, pruned_loss=0.03986, over 4985.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03617, over 972442.44 frames.], batch size: 15, lr: 2.86e-04 +2022-05-06 00:10:23,572 INFO [train.py:715] (7/8) Epoch 7, batch 29550, loss[loss=0.1247, simple_loss=0.2022, pruned_loss=0.02361, over 4931.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2167, pruned_loss=0.03657, over 972667.76 frames.], batch size: 29, lr: 2.86e-04 +2022-05-06 00:11:03,253 INFO [train.py:715] (7/8) Epoch 7, batch 29600, loss[loss=0.1663, simple_loss=0.2298, pruned_loss=0.05135, over 4824.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2177, pruned_loss=0.03755, over 972112.31 frames.], batch size: 26, lr: 2.86e-04 +2022-05-06 00:11:43,206 INFO [train.py:715] (7/8) Epoch 7, batch 29650, loss[loss=0.1456, simple_loss=0.2326, pruned_loss=0.0293, over 4695.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2177, pruned_loss=0.0372, over 972258.93 frames.], batch size: 15, lr: 2.86e-04 +2022-05-06 00:12:23,003 INFO [train.py:715] (7/8) Epoch 7, batch 29700, loss[loss=0.1419, simple_loss=0.2111, pruned_loss=0.03632, over 4947.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2174, pruned_loss=0.03715, over 973126.16 frames.], batch size: 39, lr: 2.86e-04 +2022-05-06 00:13:02,663 INFO [train.py:715] (7/8) Epoch 7, batch 29750, loss[loss=0.1734, simple_loss=0.2383, pruned_loss=0.05425, over 4813.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2181, pruned_loss=0.03707, over 972879.44 frames.], batch size: 15, lr: 2.86e-04 +2022-05-06 00:13:42,293 INFO [train.py:715] (7/8) Epoch 7, batch 29800, loss[loss=0.1288, simple_loss=0.2008, pruned_loss=0.02841, over 4764.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2186, pruned_loss=0.03753, over 972690.03 frames.], batch size: 14, lr: 2.86e-04 +2022-05-06 00:14:22,412 INFO [train.py:715] (7/8) Epoch 7, batch 29850, loss[loss=0.1818, simple_loss=0.2588, pruned_loss=0.05245, over 4877.00 frames.], tot_loss[loss=0.1468, simple_loss=0.2188, pruned_loss=0.03738, over 971990.29 frames.], batch size: 22, lr: 2.86e-04 +2022-05-06 00:15:02,283 INFO [train.py:715] (7/8) Epoch 7, batch 29900, loss[loss=0.146, simple_loss=0.218, pruned_loss=0.03699, over 4893.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03722, over 971931.19 frames.], batch size: 19, lr: 2.86e-04 +2022-05-06 00:15:41,862 INFO [train.py:715] (7/8) Epoch 7, batch 29950, loss[loss=0.135, simple_loss=0.2098, pruned_loss=0.03005, over 4977.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2185, pruned_loss=0.0376, over 971924.64 frames.], batch size: 28, lr: 2.86e-04 +2022-05-06 00:16:21,225 INFO [train.py:715] (7/8) Epoch 7, batch 30000, loss[loss=0.1663, simple_loss=0.24, pruned_loss=0.04631, over 4934.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2187, pruned_loss=0.03759, over 972698.82 frames.], batch size: 23, lr: 2.86e-04 +2022-05-06 00:16:21,226 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 00:16:41,747 INFO [train.py:742] (7/8) Epoch 7, validation: loss=0.1081, simple_loss=0.1929, pruned_loss=0.01164, over 914524.00 frames. +2022-05-06 00:17:21,557 INFO [train.py:715] (7/8) Epoch 7, batch 30050, loss[loss=0.1715, simple_loss=0.2421, pruned_loss=0.05043, over 4833.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2188, pruned_loss=0.03746, over 973389.86 frames.], batch size: 20, lr: 2.86e-04 +2022-05-06 00:18:00,848 INFO [train.py:715] (7/8) Epoch 7, batch 30100, loss[loss=0.131, simple_loss=0.2072, pruned_loss=0.02741, over 4900.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2194, pruned_loss=0.03764, over 972758.54 frames.], batch size: 17, lr: 2.86e-04 +2022-05-06 00:18:40,788 INFO [train.py:715] (7/8) Epoch 7, batch 30150, loss[loss=0.1265, simple_loss=0.1983, pruned_loss=0.02733, over 4963.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2183, pruned_loss=0.03724, over 972573.62 frames.], batch size: 28, lr: 2.86e-04 +2022-05-06 00:19:20,431 INFO [train.py:715] (7/8) Epoch 7, batch 30200, loss[loss=0.1008, simple_loss=0.1746, pruned_loss=0.01353, over 4844.00 frames.], tot_loss[loss=0.1454, simple_loss=0.217, pruned_loss=0.03685, over 973175.76 frames.], batch size: 20, lr: 2.85e-04 +2022-05-06 00:20:00,697 INFO [train.py:715] (7/8) Epoch 7, batch 30250, loss[loss=0.1258, simple_loss=0.1882, pruned_loss=0.03172, over 4782.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2165, pruned_loss=0.03658, over 972886.90 frames.], batch size: 14, lr: 2.85e-04 +2022-05-06 00:20:39,867 INFO [train.py:715] (7/8) Epoch 7, batch 30300, loss[loss=0.1601, simple_loss=0.2398, pruned_loss=0.04017, over 4971.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2169, pruned_loss=0.03638, over 973334.90 frames.], batch size: 15, lr: 2.85e-04 +2022-05-06 00:21:19,491 INFO [train.py:715] (7/8) Epoch 7, batch 30350, loss[loss=0.1664, simple_loss=0.2398, pruned_loss=0.04651, over 4985.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.03606, over 973445.97 frames.], batch size: 14, lr: 2.85e-04 +2022-05-06 00:21:58,988 INFO [train.py:715] (7/8) Epoch 7, batch 30400, loss[loss=0.1458, simple_loss=0.2217, pruned_loss=0.03495, over 4958.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2165, pruned_loss=0.03619, over 972533.44 frames.], batch size: 28, lr: 2.85e-04 +2022-05-06 00:22:38,977 INFO [train.py:715] (7/8) Epoch 7, batch 30450, loss[loss=0.142, simple_loss=0.2204, pruned_loss=0.03181, over 4819.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2164, pruned_loss=0.03634, over 972410.83 frames.], batch size: 27, lr: 2.85e-04 +2022-05-06 00:23:18,897 INFO [train.py:715] (7/8) Epoch 7, batch 30500, loss[loss=0.1723, simple_loss=0.245, pruned_loss=0.04985, over 4769.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2175, pruned_loss=0.03737, over 971982.88 frames.], batch size: 19, lr: 2.85e-04 +2022-05-06 00:23:58,829 INFO [train.py:715] (7/8) Epoch 7, batch 30550, loss[loss=0.1372, simple_loss=0.2129, pruned_loss=0.03079, over 4836.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2167, pruned_loss=0.03712, over 971437.16 frames.], batch size: 15, lr: 2.85e-04 +2022-05-06 00:24:38,530 INFO [train.py:715] (7/8) Epoch 7, batch 30600, loss[loss=0.1418, simple_loss=0.2174, pruned_loss=0.03314, over 4974.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2177, pruned_loss=0.03762, over 970238.89 frames.], batch size: 28, lr: 2.85e-04 +2022-05-06 00:25:18,167 INFO [train.py:715] (7/8) Epoch 7, batch 30650, loss[loss=0.1684, simple_loss=0.2407, pruned_loss=0.04803, over 4784.00 frames.], tot_loss[loss=0.147, simple_loss=0.2182, pruned_loss=0.03793, over 970616.69 frames.], batch size: 18, lr: 2.85e-04 +2022-05-06 00:25:57,788 INFO [train.py:715] (7/8) Epoch 7, batch 30700, loss[loss=0.1283, simple_loss=0.2058, pruned_loss=0.02542, over 4869.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2181, pruned_loss=0.03789, over 971140.08 frames.], batch size: 22, lr: 2.85e-04 +2022-05-06 00:26:36,837 INFO [train.py:715] (7/8) Epoch 7, batch 30750, loss[loss=0.1165, simple_loss=0.1851, pruned_loss=0.02391, over 4974.00 frames.], tot_loss[loss=0.147, simple_loss=0.218, pruned_loss=0.03801, over 971074.83 frames.], batch size: 25, lr: 2.85e-04 +2022-05-06 00:27:15,904 INFO [train.py:715] (7/8) Epoch 7, batch 30800, loss[loss=0.1407, simple_loss=0.2111, pruned_loss=0.03519, over 4855.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2173, pruned_loss=0.03713, over 970452.38 frames.], batch size: 32, lr: 2.85e-04 +2022-05-06 00:27:55,687 INFO [train.py:715] (7/8) Epoch 7, batch 30850, loss[loss=0.1115, simple_loss=0.1808, pruned_loss=0.0211, over 4805.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2177, pruned_loss=0.03701, over 970524.63 frames.], batch size: 12, lr: 2.85e-04 +2022-05-06 00:28:35,190 INFO [train.py:715] (7/8) Epoch 7, batch 30900, loss[loss=0.1143, simple_loss=0.1845, pruned_loss=0.02204, over 4961.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2183, pruned_loss=0.03752, over 971490.89 frames.], batch size: 24, lr: 2.85e-04 +2022-05-06 00:29:15,591 INFO [train.py:715] (7/8) Epoch 7, batch 30950, loss[loss=0.1451, simple_loss=0.2186, pruned_loss=0.03583, over 4925.00 frames.], tot_loss[loss=0.1461, simple_loss=0.218, pruned_loss=0.03708, over 971630.75 frames.], batch size: 23, lr: 2.85e-04 +2022-05-06 00:29:54,980 INFO [train.py:715] (7/8) Epoch 7, batch 31000, loss[loss=0.1218, simple_loss=0.1797, pruned_loss=0.03195, over 4710.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2178, pruned_loss=0.03741, over 971787.24 frames.], batch size: 12, lr: 2.85e-04 +2022-05-06 00:30:34,537 INFO [train.py:715] (7/8) Epoch 7, batch 31050, loss[loss=0.1475, simple_loss=0.2173, pruned_loss=0.03887, over 4956.00 frames.], tot_loss[loss=0.1465, simple_loss=0.218, pruned_loss=0.03748, over 971852.64 frames.], batch size: 24, lr: 2.85e-04 +2022-05-06 00:31:14,373 INFO [train.py:715] (7/8) Epoch 7, batch 31100, loss[loss=0.1811, simple_loss=0.2468, pruned_loss=0.0577, over 4813.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2186, pruned_loss=0.03741, over 971851.99 frames.], batch size: 26, lr: 2.85e-04 +2022-05-06 00:31:54,494 INFO [train.py:715] (7/8) Epoch 7, batch 31150, loss[loss=0.1365, simple_loss=0.2042, pruned_loss=0.03441, over 4683.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2195, pruned_loss=0.03779, over 972294.14 frames.], batch size: 15, lr: 2.85e-04 +2022-05-06 00:32:33,847 INFO [train.py:715] (7/8) Epoch 7, batch 31200, loss[loss=0.1421, simple_loss=0.2204, pruned_loss=0.03188, over 4962.00 frames.], tot_loss[loss=0.1469, simple_loss=0.2189, pruned_loss=0.03741, over 972985.29 frames.], batch size: 24, lr: 2.85e-04 +2022-05-06 00:33:13,815 INFO [train.py:715] (7/8) Epoch 7, batch 31250, loss[loss=0.1523, simple_loss=0.2235, pruned_loss=0.04055, over 4813.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2184, pruned_loss=0.03739, over 972317.46 frames.], batch size: 25, lr: 2.85e-04 +2022-05-06 00:33:54,541 INFO [train.py:715] (7/8) Epoch 7, batch 31300, loss[loss=0.1463, simple_loss=0.2142, pruned_loss=0.03919, over 4786.00 frames.], tot_loss[loss=0.1467, simple_loss=0.2181, pruned_loss=0.03763, over 972907.82 frames.], batch size: 17, lr: 2.85e-04 +2022-05-06 00:34:34,119 INFO [train.py:715] (7/8) Epoch 7, batch 31350, loss[loss=0.1328, simple_loss=0.2136, pruned_loss=0.02599, over 4847.00 frames.], tot_loss[loss=0.1475, simple_loss=0.2187, pruned_loss=0.03813, over 973063.54 frames.], batch size: 32, lr: 2.85e-04 +2022-05-06 00:35:14,068 INFO [train.py:715] (7/8) Epoch 7, batch 31400, loss[loss=0.1415, simple_loss=0.221, pruned_loss=0.03098, over 4776.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2195, pruned_loss=0.03807, over 972851.66 frames.], batch size: 18, lr: 2.85e-04 +2022-05-06 00:35:53,411 INFO [train.py:715] (7/8) Epoch 7, batch 31450, loss[loss=0.1261, simple_loss=0.2092, pruned_loss=0.02151, over 4776.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2188, pruned_loss=0.03723, over 972818.54 frames.], batch size: 18, lr: 2.85e-04 +2022-05-06 00:36:33,193 INFO [train.py:715] (7/8) Epoch 7, batch 31500, loss[loss=0.1328, simple_loss=0.2058, pruned_loss=0.02994, over 4922.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2184, pruned_loss=0.03718, over 972782.30 frames.], batch size: 17, lr: 2.85e-04 +2022-05-06 00:37:12,317 INFO [train.py:715] (7/8) Epoch 7, batch 31550, loss[loss=0.1383, simple_loss=0.2121, pruned_loss=0.03229, over 4934.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2179, pruned_loss=0.03676, over 972856.94 frames.], batch size: 35, lr: 2.85e-04 +2022-05-06 00:37:52,279 INFO [train.py:715] (7/8) Epoch 7, batch 31600, loss[loss=0.1758, simple_loss=0.2385, pruned_loss=0.05652, over 4779.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2173, pruned_loss=0.03643, over 972212.89 frames.], batch size: 14, lr: 2.85e-04 +2022-05-06 00:38:32,100 INFO [train.py:715] (7/8) Epoch 7, batch 31650, loss[loss=0.1297, simple_loss=0.2091, pruned_loss=0.02517, over 4935.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2179, pruned_loss=0.0369, over 971593.62 frames.], batch size: 23, lr: 2.85e-04 +2022-05-06 00:39:11,525 INFO [train.py:715] (7/8) Epoch 7, batch 31700, loss[loss=0.1519, simple_loss=0.2235, pruned_loss=0.04016, over 4884.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2178, pruned_loss=0.03657, over 971728.32 frames.], batch size: 16, lr: 2.85e-04 +2022-05-06 00:39:51,226 INFO [train.py:715] (7/8) Epoch 7, batch 31750, loss[loss=0.1236, simple_loss=0.2023, pruned_loss=0.02244, over 4799.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2177, pruned_loss=0.03639, over 971479.04 frames.], batch size: 25, lr: 2.85e-04 +2022-05-06 00:40:30,490 INFO [train.py:715] (7/8) Epoch 7, batch 31800, loss[loss=0.1201, simple_loss=0.188, pruned_loss=0.02614, over 4737.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2171, pruned_loss=0.03649, over 971300.45 frames.], batch size: 12, lr: 2.85e-04 +2022-05-06 00:41:09,606 INFO [train.py:715] (7/8) Epoch 7, batch 31850, loss[loss=0.1164, simple_loss=0.1992, pruned_loss=0.01678, over 4761.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.03602, over 972166.57 frames.], batch size: 19, lr: 2.85e-04 +2022-05-06 00:41:49,864 INFO [train.py:715] (7/8) Epoch 7, batch 31900, loss[loss=0.1337, simple_loss=0.2155, pruned_loss=0.02597, over 4961.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2174, pruned_loss=0.03646, over 972199.61 frames.], batch size: 24, lr: 2.85e-04 +2022-05-06 00:42:30,604 INFO [train.py:715] (7/8) Epoch 7, batch 31950, loss[loss=0.1419, simple_loss=0.2238, pruned_loss=0.03002, over 4891.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2166, pruned_loss=0.03589, over 972604.19 frames.], batch size: 22, lr: 2.85e-04 +2022-05-06 00:43:11,064 INFO [train.py:715] (7/8) Epoch 7, batch 32000, loss[loss=0.1715, simple_loss=0.2453, pruned_loss=0.04883, over 4989.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2175, pruned_loss=0.03662, over 972052.86 frames.], batch size: 25, lr: 2.85e-04 +2022-05-06 00:43:50,734 INFO [train.py:715] (7/8) Epoch 7, batch 32050, loss[loss=0.1395, simple_loss=0.2113, pruned_loss=0.03387, over 4823.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2174, pruned_loss=0.0366, over 972189.95 frames.], batch size: 27, lr: 2.85e-04 +2022-05-06 00:44:30,666 INFO [train.py:715] (7/8) Epoch 7, batch 32100, loss[loss=0.1373, simple_loss=0.2165, pruned_loss=0.02911, over 4941.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2176, pruned_loss=0.03656, over 972548.16 frames.], batch size: 21, lr: 2.85e-04 +2022-05-06 00:45:10,480 INFO [train.py:715] (7/8) Epoch 7, batch 32150, loss[loss=0.1421, simple_loss=0.2065, pruned_loss=0.0389, over 4925.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2169, pruned_loss=0.03597, over 972693.09 frames.], batch size: 18, lr: 2.84e-04 +2022-05-06 00:45:50,035 INFO [train.py:715] (7/8) Epoch 7, batch 32200, loss[loss=0.1383, simple_loss=0.2018, pruned_loss=0.03745, over 4806.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2163, pruned_loss=0.03578, over 972626.35 frames.], batch size: 25, lr: 2.84e-04 +2022-05-06 00:46:29,884 INFO [train.py:715] (7/8) Epoch 7, batch 32250, loss[loss=0.1485, simple_loss=0.2275, pruned_loss=0.03476, over 4908.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2153, pruned_loss=0.03557, over 972050.58 frames.], batch size: 17, lr: 2.84e-04 +2022-05-06 00:47:09,677 INFO [train.py:715] (7/8) Epoch 7, batch 32300, loss[loss=0.1401, simple_loss=0.2119, pruned_loss=0.03413, over 4934.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2163, pruned_loss=0.03635, over 972167.51 frames.], batch size: 18, lr: 2.84e-04 +2022-05-06 00:47:50,013 INFO [train.py:715] (7/8) Epoch 7, batch 32350, loss[loss=0.1277, simple_loss=0.1981, pruned_loss=0.02862, over 4915.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2171, pruned_loss=0.03653, over 971802.40 frames.], batch size: 32, lr: 2.84e-04 +2022-05-06 00:48:29,373 INFO [train.py:715] (7/8) Epoch 7, batch 32400, loss[loss=0.1372, simple_loss=0.211, pruned_loss=0.03172, over 4814.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2175, pruned_loss=0.03671, over 973120.57 frames.], batch size: 14, lr: 2.84e-04 +2022-05-06 00:49:09,263 INFO [train.py:715] (7/8) Epoch 7, batch 32450, loss[loss=0.1247, simple_loss=0.1991, pruned_loss=0.02516, over 4946.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2168, pruned_loss=0.03604, over 973703.90 frames.], batch size: 29, lr: 2.84e-04 +2022-05-06 00:49:48,737 INFO [train.py:715] (7/8) Epoch 7, batch 32500, loss[loss=0.1319, simple_loss=0.2038, pruned_loss=0.02998, over 4909.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2168, pruned_loss=0.03629, over 974030.90 frames.], batch size: 29, lr: 2.84e-04 +2022-05-06 00:50:28,301 INFO [train.py:715] (7/8) Epoch 7, batch 32550, loss[loss=0.1359, simple_loss=0.2089, pruned_loss=0.03141, over 4905.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2162, pruned_loss=0.03599, over 974330.01 frames.], batch size: 19, lr: 2.84e-04 +2022-05-06 00:51:08,054 INFO [train.py:715] (7/8) Epoch 7, batch 32600, loss[loss=0.1563, simple_loss=0.2309, pruned_loss=0.04083, over 4938.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.03598, over 974053.44 frames.], batch size: 35, lr: 2.84e-04 +2022-05-06 00:51:47,567 INFO [train.py:715] (7/8) Epoch 7, batch 32650, loss[loss=0.1622, simple_loss=0.2375, pruned_loss=0.04342, over 4825.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2171, pruned_loss=0.03664, over 972615.24 frames.], batch size: 25, lr: 2.84e-04 +2022-05-06 00:52:27,385 INFO [train.py:715] (7/8) Epoch 7, batch 32700, loss[loss=0.1498, simple_loss=0.235, pruned_loss=0.03233, over 4833.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2169, pruned_loss=0.03663, over 972648.40 frames.], batch size: 26, lr: 2.84e-04 +2022-05-06 00:53:06,818 INFO [train.py:715] (7/8) Epoch 7, batch 32750, loss[loss=0.1613, simple_loss=0.2433, pruned_loss=0.03971, over 4931.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2166, pruned_loss=0.03651, over 973193.33 frames.], batch size: 21, lr: 2.84e-04 +2022-05-06 00:53:47,308 INFO [train.py:715] (7/8) Epoch 7, batch 32800, loss[loss=0.1207, simple_loss=0.1896, pruned_loss=0.02584, over 4836.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2166, pruned_loss=0.03661, over 972618.74 frames.], batch size: 13, lr: 2.84e-04 +2022-05-06 00:54:27,993 INFO [train.py:715] (7/8) Epoch 7, batch 32850, loss[loss=0.1401, simple_loss=0.2175, pruned_loss=0.0314, over 4905.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2164, pruned_loss=0.03646, over 972713.28 frames.], batch size: 17, lr: 2.84e-04 +2022-05-06 00:55:08,135 INFO [train.py:715] (7/8) Epoch 7, batch 32900, loss[loss=0.1345, simple_loss=0.2097, pruned_loss=0.02965, over 4915.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2168, pruned_loss=0.03627, over 972742.37 frames.], batch size: 18, lr: 2.84e-04 +2022-05-06 00:55:48,472 INFO [train.py:715] (7/8) Epoch 7, batch 32950, loss[loss=0.1383, simple_loss=0.2023, pruned_loss=0.03715, over 4912.00 frames.], tot_loss[loss=0.145, simple_loss=0.2172, pruned_loss=0.03641, over 972905.31 frames.], batch size: 19, lr: 2.84e-04 +2022-05-06 00:56:28,429 INFO [train.py:715] (7/8) Epoch 7, batch 33000, loss[loss=0.1267, simple_loss=0.1888, pruned_loss=0.03227, over 4809.00 frames.], tot_loss[loss=0.145, simple_loss=0.2171, pruned_loss=0.03647, over 972876.70 frames.], batch size: 12, lr: 2.84e-04 +2022-05-06 00:56:28,430 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 00:56:38,008 INFO [train.py:742] (7/8) Epoch 7, validation: loss=0.108, simple_loss=0.1927, pruned_loss=0.01164, over 914524.00 frames. +2022-05-06 00:57:17,520 INFO [train.py:715] (7/8) Epoch 7, batch 33050, loss[loss=0.1989, simple_loss=0.2774, pruned_loss=0.06025, over 4793.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2181, pruned_loss=0.03712, over 972847.12 frames.], batch size: 18, lr: 2.84e-04 +2022-05-06 00:57:57,505 INFO [train.py:715] (7/8) Epoch 7, batch 33100, loss[loss=0.1412, simple_loss=0.2233, pruned_loss=0.02954, over 4972.00 frames.], tot_loss[loss=0.146, simple_loss=0.2179, pruned_loss=0.03709, over 973360.51 frames.], batch size: 24, lr: 2.84e-04 +2022-05-06 00:58:36,957 INFO [train.py:715] (7/8) Epoch 7, batch 33150, loss[loss=0.1747, simple_loss=0.2304, pruned_loss=0.05943, over 4823.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2179, pruned_loss=0.03736, over 973055.14 frames.], batch size: 13, lr: 2.84e-04 +2022-05-06 00:59:16,725 INFO [train.py:715] (7/8) Epoch 7, batch 33200, loss[loss=0.1694, simple_loss=0.2213, pruned_loss=0.05879, over 4988.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2173, pruned_loss=0.03748, over 973406.25 frames.], batch size: 14, lr: 2.84e-04 +2022-05-06 00:59:56,296 INFO [train.py:715] (7/8) Epoch 7, batch 33250, loss[loss=0.114, simple_loss=0.1924, pruned_loss=0.01783, over 4904.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2174, pruned_loss=0.03723, over 973150.32 frames.], batch size: 19, lr: 2.84e-04 +2022-05-06 01:00:35,759 INFO [train.py:715] (7/8) Epoch 7, batch 33300, loss[loss=0.1376, simple_loss=0.2051, pruned_loss=0.03506, over 4906.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2171, pruned_loss=0.03701, over 973062.89 frames.], batch size: 22, lr: 2.84e-04 +2022-05-06 01:01:15,277 INFO [train.py:715] (7/8) Epoch 7, batch 33350, loss[loss=0.1292, simple_loss=0.2152, pruned_loss=0.02159, over 4797.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2167, pruned_loss=0.03678, over 972895.93 frames.], batch size: 21, lr: 2.84e-04 +2022-05-06 01:01:55,576 INFO [train.py:715] (7/8) Epoch 7, batch 33400, loss[loss=0.1643, simple_loss=0.2366, pruned_loss=0.04595, over 4960.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2167, pruned_loss=0.03642, over 971814.03 frames.], batch size: 24, lr: 2.84e-04 +2022-05-06 01:02:35,671 INFO [train.py:715] (7/8) Epoch 7, batch 33450, loss[loss=0.1135, simple_loss=0.1869, pruned_loss=0.0201, over 4839.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2166, pruned_loss=0.03633, over 971998.08 frames.], batch size: 13, lr: 2.84e-04 +2022-05-06 01:03:16,255 INFO [train.py:715] (7/8) Epoch 7, batch 33500, loss[loss=0.1417, simple_loss=0.2217, pruned_loss=0.03081, over 4691.00 frames.], tot_loss[loss=0.145, simple_loss=0.2172, pruned_loss=0.0364, over 972093.16 frames.], batch size: 15, lr: 2.84e-04 +2022-05-06 01:03:56,833 INFO [train.py:715] (7/8) Epoch 7, batch 33550, loss[loss=0.135, simple_loss=0.2134, pruned_loss=0.02827, over 4869.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2167, pruned_loss=0.03625, over 971817.58 frames.], batch size: 16, lr: 2.84e-04 +2022-05-06 01:04:37,435 INFO [train.py:715] (7/8) Epoch 7, batch 33600, loss[loss=0.1411, simple_loss=0.215, pruned_loss=0.03359, over 4809.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2169, pruned_loss=0.03625, over 972764.58 frames.], batch size: 25, lr: 2.84e-04 +2022-05-06 01:05:17,938 INFO [train.py:715] (7/8) Epoch 7, batch 33650, loss[loss=0.1419, simple_loss=0.2089, pruned_loss=0.0375, over 4981.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2165, pruned_loss=0.03625, over 974010.39 frames.], batch size: 15, lr: 2.84e-04 +2022-05-06 01:05:57,809 INFO [train.py:715] (7/8) Epoch 7, batch 33700, loss[loss=0.1297, simple_loss=0.2073, pruned_loss=0.02603, over 4812.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2163, pruned_loss=0.03627, over 973750.59 frames.], batch size: 21, lr: 2.84e-04 +2022-05-06 01:06:37,963 INFO [train.py:715] (7/8) Epoch 7, batch 33750, loss[loss=0.1415, simple_loss=0.2081, pruned_loss=0.03748, over 4817.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03579, over 973488.84 frames.], batch size: 26, lr: 2.84e-04 +2022-05-06 01:07:17,447 INFO [train.py:715] (7/8) Epoch 7, batch 33800, loss[loss=0.1332, simple_loss=0.219, pruned_loss=0.02373, over 4876.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2168, pruned_loss=0.03618, over 974102.21 frames.], batch size: 22, lr: 2.84e-04 +2022-05-06 01:07:58,045 INFO [train.py:715] (7/8) Epoch 7, batch 33850, loss[loss=0.1527, simple_loss=0.2238, pruned_loss=0.0408, over 4964.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2186, pruned_loss=0.03725, over 973694.02 frames.], batch size: 35, lr: 2.84e-04 +2022-05-06 01:08:37,725 INFO [train.py:715] (7/8) Epoch 7, batch 33900, loss[loss=0.1278, simple_loss=0.1918, pruned_loss=0.03191, over 4912.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2173, pruned_loss=0.03646, over 973849.42 frames.], batch size: 17, lr: 2.84e-04 +2022-05-06 01:09:17,828 INFO [train.py:715] (7/8) Epoch 7, batch 33950, loss[loss=0.1426, simple_loss=0.212, pruned_loss=0.03665, over 4845.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2179, pruned_loss=0.03693, over 973679.31 frames.], batch size: 13, lr: 2.84e-04 +2022-05-06 01:09:57,287 INFO [train.py:715] (7/8) Epoch 7, batch 34000, loss[loss=0.1539, simple_loss=0.2323, pruned_loss=0.03778, over 4844.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2188, pruned_loss=0.03715, over 972635.40 frames.], batch size: 34, lr: 2.84e-04 +2022-05-06 01:10:37,478 INFO [train.py:715] (7/8) Epoch 7, batch 34050, loss[loss=0.1567, simple_loss=0.2314, pruned_loss=0.04099, over 4767.00 frames.], tot_loss[loss=0.1465, simple_loss=0.2185, pruned_loss=0.0372, over 971711.96 frames.], batch size: 18, lr: 2.84e-04 +2022-05-06 01:11:17,479 INFO [train.py:715] (7/8) Epoch 7, batch 34100, loss[loss=0.1206, simple_loss=0.1982, pruned_loss=0.02149, over 4807.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2178, pruned_loss=0.03673, over 971230.78 frames.], batch size: 25, lr: 2.83e-04 +2022-05-06 01:11:56,982 INFO [train.py:715] (7/8) Epoch 7, batch 34150, loss[loss=0.1256, simple_loss=0.1934, pruned_loss=0.02891, over 4833.00 frames.], tot_loss[loss=0.1446, simple_loss=0.217, pruned_loss=0.0361, over 971761.00 frames.], batch size: 15, lr: 2.83e-04 +2022-05-06 01:12:37,403 INFO [train.py:715] (7/8) Epoch 7, batch 34200, loss[loss=0.1483, simple_loss=0.2197, pruned_loss=0.0384, over 4913.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2173, pruned_loss=0.03651, over 972619.58 frames.], batch size: 29, lr: 2.83e-04 +2022-05-06 01:13:17,640 INFO [train.py:715] (7/8) Epoch 7, batch 34250, loss[loss=0.1254, simple_loss=0.1903, pruned_loss=0.03025, over 4730.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2168, pruned_loss=0.03646, over 972941.89 frames.], batch size: 16, lr: 2.83e-04 +2022-05-06 01:13:58,300 INFO [train.py:715] (7/8) Epoch 7, batch 34300, loss[loss=0.1387, simple_loss=0.2098, pruned_loss=0.03382, over 4978.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2159, pruned_loss=0.03611, over 972531.05 frames.], batch size: 15, lr: 2.83e-04 +2022-05-06 01:14:38,116 INFO [train.py:715] (7/8) Epoch 7, batch 34350, loss[loss=0.1313, simple_loss=0.2003, pruned_loss=0.03116, over 4696.00 frames.], tot_loss[loss=0.144, simple_loss=0.2157, pruned_loss=0.03617, over 971707.62 frames.], batch size: 15, lr: 2.83e-04 +2022-05-06 01:15:18,246 INFO [train.py:715] (7/8) Epoch 7, batch 34400, loss[loss=0.1554, simple_loss=0.23, pruned_loss=0.04039, over 4931.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2155, pruned_loss=0.03607, over 971445.59 frames.], batch size: 17, lr: 2.83e-04 +2022-05-06 01:15:58,921 INFO [train.py:715] (7/8) Epoch 7, batch 34450, loss[loss=0.159, simple_loss=0.241, pruned_loss=0.03848, over 4962.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2164, pruned_loss=0.03613, over 972083.95 frames.], batch size: 28, lr: 2.83e-04 +2022-05-06 01:16:38,146 INFO [train.py:715] (7/8) Epoch 7, batch 34500, loss[loss=0.1459, simple_loss=0.2178, pruned_loss=0.03701, over 4904.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.03625, over 971860.46 frames.], batch size: 17, lr: 2.83e-04 +2022-05-06 01:17:18,210 INFO [train.py:715] (7/8) Epoch 7, batch 34550, loss[loss=0.1564, simple_loss=0.2149, pruned_loss=0.04894, over 4876.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2164, pruned_loss=0.03624, over 971971.38 frames.], batch size: 16, lr: 2.83e-04 +2022-05-06 01:17:58,847 INFO [train.py:715] (7/8) Epoch 7, batch 34600, loss[loss=0.1246, simple_loss=0.1936, pruned_loss=0.02778, over 4780.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2162, pruned_loss=0.03635, over 972585.15 frames.], batch size: 14, lr: 2.83e-04 +2022-05-06 01:18:38,812 INFO [train.py:715] (7/8) Epoch 7, batch 34650, loss[loss=0.1248, simple_loss=0.2025, pruned_loss=0.02352, over 4816.00 frames.], tot_loss[loss=0.1454, simple_loss=0.217, pruned_loss=0.03688, over 971923.69 frames.], batch size: 12, lr: 2.83e-04 +2022-05-06 01:19:19,027 INFO [train.py:715] (7/8) Epoch 7, batch 34700, loss[loss=0.1542, simple_loss=0.2193, pruned_loss=0.04454, over 4858.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2173, pruned_loss=0.03692, over 971263.03 frames.], batch size: 32, lr: 2.83e-04 +2022-05-06 01:19:57,526 INFO [train.py:715] (7/8) Epoch 7, batch 34750, loss[loss=0.1305, simple_loss=0.2048, pruned_loss=0.02811, over 4946.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2174, pruned_loss=0.03689, over 971958.42 frames.], batch size: 29, lr: 2.83e-04 +2022-05-06 01:20:35,932 INFO [train.py:715] (7/8) Epoch 7, batch 34800, loss[loss=0.1115, simple_loss=0.184, pruned_loss=0.01953, over 4822.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2157, pruned_loss=0.0363, over 970986.12 frames.], batch size: 12, lr: 2.83e-04 +2022-05-06 01:21:27,011 INFO [train.py:715] (7/8) Epoch 8, batch 0, loss[loss=0.1634, simple_loss=0.2283, pruned_loss=0.04926, over 4884.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2283, pruned_loss=0.04926, over 4884.00 frames.], batch size: 32, lr: 2.69e-04 +2022-05-06 01:22:06,298 INFO [train.py:715] (7/8) Epoch 8, batch 50, loss[loss=0.1422, simple_loss=0.2126, pruned_loss=0.03588, over 4832.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2165, pruned_loss=0.03686, over 219614.27 frames.], batch size: 30, lr: 2.69e-04 +2022-05-06 01:22:47,067 INFO [train.py:715] (7/8) Epoch 8, batch 100, loss[loss=0.1483, simple_loss=0.2122, pruned_loss=0.04221, over 4841.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2156, pruned_loss=0.0363, over 386756.23 frames.], batch size: 32, lr: 2.69e-04 +2022-05-06 01:23:26,802 INFO [train.py:715] (7/8) Epoch 8, batch 150, loss[loss=0.1542, simple_loss=0.2145, pruned_loss=0.04692, over 4908.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2158, pruned_loss=0.0362, over 516034.50 frames.], batch size: 17, lr: 2.69e-04 +2022-05-06 01:24:07,303 INFO [train.py:715] (7/8) Epoch 8, batch 200, loss[loss=0.1703, simple_loss=0.2462, pruned_loss=0.04716, over 4869.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2161, pruned_loss=0.03616, over 617217.66 frames.], batch size: 22, lr: 2.69e-04 +2022-05-06 01:24:47,113 INFO [train.py:715] (7/8) Epoch 8, batch 250, loss[loss=0.154, simple_loss=0.2264, pruned_loss=0.04078, over 4876.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2165, pruned_loss=0.03613, over 696389.98 frames.], batch size: 39, lr: 2.69e-04 +2022-05-06 01:25:27,372 INFO [train.py:715] (7/8) Epoch 8, batch 300, loss[loss=0.12, simple_loss=0.1936, pruned_loss=0.0232, over 4778.00 frames.], tot_loss[loss=0.1448, simple_loss=0.217, pruned_loss=0.03628, over 756910.70 frames.], batch size: 12, lr: 2.69e-04 +2022-05-06 01:26:07,151 INFO [train.py:715] (7/8) Epoch 8, batch 350, loss[loss=0.1439, simple_loss=0.2121, pruned_loss=0.03786, over 4778.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2175, pruned_loss=0.03672, over 804335.59 frames.], batch size: 18, lr: 2.69e-04 +2022-05-06 01:26:46,036 INFO [train.py:715] (7/8) Epoch 8, batch 400, loss[loss=0.1429, simple_loss=0.2075, pruned_loss=0.03921, over 4988.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2175, pruned_loss=0.03672, over 842483.40 frames.], batch size: 28, lr: 2.69e-04 +2022-05-06 01:27:26,634 INFO [train.py:715] (7/8) Epoch 8, batch 450, loss[loss=0.1214, simple_loss=0.1951, pruned_loss=0.02385, over 4907.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03653, over 870602.89 frames.], batch size: 29, lr: 2.69e-04 +2022-05-06 01:28:06,608 INFO [train.py:715] (7/8) Epoch 8, batch 500, loss[loss=0.1544, simple_loss=0.2287, pruned_loss=0.04011, over 4873.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2169, pruned_loss=0.03634, over 892984.02 frames.], batch size: 16, lr: 2.69e-04 +2022-05-06 01:28:47,246 INFO [train.py:715] (7/8) Epoch 8, batch 550, loss[loss=0.1596, simple_loss=0.2347, pruned_loss=0.04224, over 4966.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2173, pruned_loss=0.03662, over 910855.34 frames.], batch size: 35, lr: 2.69e-04 +2022-05-06 01:29:26,914 INFO [train.py:715] (7/8) Epoch 8, batch 600, loss[loss=0.166, simple_loss=0.234, pruned_loss=0.04903, over 4844.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2174, pruned_loss=0.03663, over 924472.12 frames.], batch size: 30, lr: 2.69e-04 +2022-05-06 01:30:07,133 INFO [train.py:715] (7/8) Epoch 8, batch 650, loss[loss=0.1726, simple_loss=0.2343, pruned_loss=0.05543, over 4824.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2169, pruned_loss=0.03678, over 936057.39 frames.], batch size: 30, lr: 2.68e-04 +2022-05-06 01:30:47,387 INFO [train.py:715] (7/8) Epoch 8, batch 700, loss[loss=0.1307, simple_loss=0.2009, pruned_loss=0.03025, over 4892.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2161, pruned_loss=0.03639, over 943792.36 frames.], batch size: 18, lr: 2.68e-04 +2022-05-06 01:31:27,085 INFO [train.py:715] (7/8) Epoch 8, batch 750, loss[loss=0.1162, simple_loss=0.1869, pruned_loss=0.02281, over 4785.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2165, pruned_loss=0.03626, over 950001.57 frames.], batch size: 14, lr: 2.68e-04 +2022-05-06 01:32:07,145 INFO [train.py:715] (7/8) Epoch 8, batch 800, loss[loss=0.1565, simple_loss=0.2202, pruned_loss=0.04636, over 4792.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.03621, over 955304.27 frames.], batch size: 24, lr: 2.68e-04 +2022-05-06 01:32:47,136 INFO [train.py:715] (7/8) Epoch 8, batch 850, loss[loss=0.1311, simple_loss=0.21, pruned_loss=0.02615, over 4851.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2169, pruned_loss=0.03646, over 958244.37 frames.], batch size: 20, lr: 2.68e-04 +2022-05-06 01:33:28,550 INFO [train.py:715] (7/8) Epoch 8, batch 900, loss[loss=0.1873, simple_loss=0.2453, pruned_loss=0.06465, over 4988.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2173, pruned_loss=0.0368, over 961673.78 frames.], batch size: 14, lr: 2.68e-04 +2022-05-06 01:34:08,656 INFO [train.py:715] (7/8) Epoch 8, batch 950, loss[loss=0.1223, simple_loss=0.2036, pruned_loss=0.02046, over 4800.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2178, pruned_loss=0.03716, over 963129.52 frames.], batch size: 21, lr: 2.68e-04 +2022-05-06 01:34:49,709 INFO [train.py:715] (7/8) Epoch 8, batch 1000, loss[loss=0.1436, simple_loss=0.2094, pruned_loss=0.03888, over 4759.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2174, pruned_loss=0.03709, over 964908.98 frames.], batch size: 19, lr: 2.68e-04 +2022-05-06 01:35:30,790 INFO [train.py:715] (7/8) Epoch 8, batch 1050, loss[loss=0.1759, simple_loss=0.2421, pruned_loss=0.05484, over 4979.00 frames.], tot_loss[loss=0.1466, simple_loss=0.2181, pruned_loss=0.03756, over 967375.39 frames.], batch size: 15, lr: 2.68e-04 +2022-05-06 01:36:11,905 INFO [train.py:715] (7/8) Epoch 8, batch 1100, loss[loss=0.1361, simple_loss=0.2014, pruned_loss=0.03542, over 4841.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2177, pruned_loss=0.03732, over 968665.89 frames.], batch size: 32, lr: 2.68e-04 +2022-05-06 01:36:52,404 INFO [train.py:715] (7/8) Epoch 8, batch 1150, loss[loss=0.1684, simple_loss=0.2363, pruned_loss=0.05025, over 4758.00 frames.], tot_loss[loss=0.1455, simple_loss=0.217, pruned_loss=0.03697, over 968990.99 frames.], batch size: 19, lr: 2.68e-04 +2022-05-06 01:37:33,432 INFO [train.py:715] (7/8) Epoch 8, batch 1200, loss[loss=0.131, simple_loss=0.2114, pruned_loss=0.02526, over 4830.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2168, pruned_loss=0.03672, over 969367.46 frames.], batch size: 26, lr: 2.68e-04 +2022-05-06 01:38:14,753 INFO [train.py:715] (7/8) Epoch 8, batch 1250, loss[loss=0.1552, simple_loss=0.232, pruned_loss=0.03919, over 4691.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2166, pruned_loss=0.03656, over 969721.47 frames.], batch size: 15, lr: 2.68e-04 +2022-05-06 01:38:55,094 INFO [train.py:715] (7/8) Epoch 8, batch 1300, loss[loss=0.1418, simple_loss=0.2152, pruned_loss=0.03422, over 4897.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03637, over 971015.89 frames.], batch size: 39, lr: 2.68e-04 +2022-05-06 01:39:36,449 INFO [train.py:715] (7/8) Epoch 8, batch 1350, loss[loss=0.1412, simple_loss=0.2321, pruned_loss=0.02514, over 4901.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2166, pruned_loss=0.03647, over 971604.59 frames.], batch size: 22, lr: 2.68e-04 +2022-05-06 01:40:17,098 INFO [train.py:715] (7/8) Epoch 8, batch 1400, loss[loss=0.1688, simple_loss=0.235, pruned_loss=0.05133, over 4952.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2163, pruned_loss=0.03617, over 972210.43 frames.], batch size: 39, lr: 2.68e-04 +2022-05-06 01:40:57,931 INFO [train.py:715] (7/8) Epoch 8, batch 1450, loss[loss=0.1577, simple_loss=0.2331, pruned_loss=0.04117, over 4813.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2169, pruned_loss=0.03646, over 971902.73 frames.], batch size: 21, lr: 2.68e-04 +2022-05-06 01:41:37,779 INFO [train.py:715] (7/8) Epoch 8, batch 1500, loss[loss=0.1413, simple_loss=0.2124, pruned_loss=0.03513, over 4790.00 frames.], tot_loss[loss=0.1451, simple_loss=0.217, pruned_loss=0.0366, over 971930.33 frames.], batch size: 17, lr: 2.68e-04 +2022-05-06 01:42:20,412 INFO [train.py:715] (7/8) Epoch 8, batch 1550, loss[loss=0.1536, simple_loss=0.2305, pruned_loss=0.03838, over 4861.00 frames.], tot_loss[loss=0.1453, simple_loss=0.217, pruned_loss=0.03679, over 971791.24 frames.], batch size: 22, lr: 2.68e-04 +2022-05-06 01:43:00,532 INFO [train.py:715] (7/8) Epoch 8, batch 1600, loss[loss=0.1587, simple_loss=0.2409, pruned_loss=0.03824, over 4897.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2173, pruned_loss=0.03671, over 971640.20 frames.], batch size: 16, lr: 2.68e-04 +2022-05-06 01:43:39,975 INFO [train.py:715] (7/8) Epoch 8, batch 1650, loss[loss=0.1283, simple_loss=0.2074, pruned_loss=0.02463, over 4803.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2162, pruned_loss=0.03646, over 972421.39 frames.], batch size: 24, lr: 2.68e-04 +2022-05-06 01:44:20,196 INFO [train.py:715] (7/8) Epoch 8, batch 1700, loss[loss=0.1638, simple_loss=0.224, pruned_loss=0.05179, over 4858.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2163, pruned_loss=0.03647, over 972443.86 frames.], batch size: 32, lr: 2.68e-04 +2022-05-06 01:44:59,609 INFO [train.py:715] (7/8) Epoch 8, batch 1750, loss[loss=0.1241, simple_loss=0.2002, pruned_loss=0.02404, over 4946.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2165, pruned_loss=0.03639, over 972503.28 frames.], batch size: 29, lr: 2.68e-04 +2022-05-06 01:45:39,057 INFO [train.py:715] (7/8) Epoch 8, batch 1800, loss[loss=0.1396, simple_loss=0.2098, pruned_loss=0.03473, over 4976.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2163, pruned_loss=0.03644, over 972177.68 frames.], batch size: 15, lr: 2.68e-04 +2022-05-06 01:46:18,115 INFO [train.py:715] (7/8) Epoch 8, batch 1850, loss[loss=0.1214, simple_loss=0.1929, pruned_loss=0.02493, over 4824.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2162, pruned_loss=0.03634, over 971401.81 frames.], batch size: 13, lr: 2.68e-04 +2022-05-06 01:46:57,512 INFO [train.py:715] (7/8) Epoch 8, batch 1900, loss[loss=0.1305, simple_loss=0.1975, pruned_loss=0.03169, over 4768.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2166, pruned_loss=0.03636, over 971993.76 frames.], batch size: 12, lr: 2.68e-04 +2022-05-06 01:47:37,012 INFO [train.py:715] (7/8) Epoch 8, batch 1950, loss[loss=0.1509, simple_loss=0.2299, pruned_loss=0.03598, over 4898.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2166, pruned_loss=0.03655, over 972013.42 frames.], batch size: 22, lr: 2.68e-04 +2022-05-06 01:48:16,132 INFO [train.py:715] (7/8) Epoch 8, batch 2000, loss[loss=0.1556, simple_loss=0.2383, pruned_loss=0.03647, over 4787.00 frames.], tot_loss[loss=0.1448, simple_loss=0.217, pruned_loss=0.03629, over 971731.65 frames.], batch size: 14, lr: 2.68e-04 +2022-05-06 01:48:56,145 INFO [train.py:715] (7/8) Epoch 8, batch 2050, loss[loss=0.1392, simple_loss=0.2085, pruned_loss=0.03498, over 4736.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2168, pruned_loss=0.03636, over 972467.11 frames.], batch size: 16, lr: 2.68e-04 +2022-05-06 01:49:35,101 INFO [train.py:715] (7/8) Epoch 8, batch 2100, loss[loss=0.1364, simple_loss=0.2002, pruned_loss=0.03632, over 4867.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.03686, over 972881.86 frames.], batch size: 20, lr: 2.68e-04 +2022-05-06 01:50:14,050 INFO [train.py:715] (7/8) Epoch 8, batch 2150, loss[loss=0.1467, simple_loss=0.2202, pruned_loss=0.0366, over 4919.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2174, pruned_loss=0.03659, over 973593.60 frames.], batch size: 23, lr: 2.68e-04 +2022-05-06 01:50:53,034 INFO [train.py:715] (7/8) Epoch 8, batch 2200, loss[loss=0.1485, simple_loss=0.2238, pruned_loss=0.03655, over 4745.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2165, pruned_loss=0.03608, over 973220.31 frames.], batch size: 16, lr: 2.68e-04 +2022-05-06 01:51:32,661 INFO [train.py:715] (7/8) Epoch 8, batch 2250, loss[loss=0.1443, simple_loss=0.2134, pruned_loss=0.03757, over 4783.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.03601, over 972566.28 frames.], batch size: 18, lr: 2.68e-04 +2022-05-06 01:52:12,078 INFO [train.py:715] (7/8) Epoch 8, batch 2300, loss[loss=0.1535, simple_loss=0.2176, pruned_loss=0.04467, over 4876.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2167, pruned_loss=0.03619, over 972013.08 frames.], batch size: 16, lr: 2.68e-04 +2022-05-06 01:52:50,787 INFO [train.py:715] (7/8) Epoch 8, batch 2350, loss[loss=0.144, simple_loss=0.2175, pruned_loss=0.03528, over 4785.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2167, pruned_loss=0.03602, over 972337.31 frames.], batch size: 14, lr: 2.68e-04 +2022-05-06 01:53:30,840 INFO [train.py:715] (7/8) Epoch 8, batch 2400, loss[loss=0.1387, simple_loss=0.2108, pruned_loss=0.03326, over 4847.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2168, pruned_loss=0.03587, over 972097.64 frames.], batch size: 13, lr: 2.68e-04 +2022-05-06 01:54:10,338 INFO [train.py:715] (7/8) Epoch 8, batch 2450, loss[loss=0.1229, simple_loss=0.2058, pruned_loss=0.02, over 4788.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2166, pruned_loss=0.03532, over 971572.49 frames.], batch size: 18, lr: 2.68e-04 +2022-05-06 01:54:49,893 INFO [train.py:715] (7/8) Epoch 8, batch 2500, loss[loss=0.165, simple_loss=0.2437, pruned_loss=0.04316, over 4779.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2158, pruned_loss=0.03485, over 971594.92 frames.], batch size: 18, lr: 2.68e-04 +2022-05-06 01:55:28,677 INFO [train.py:715] (7/8) Epoch 8, batch 2550, loss[loss=0.1183, simple_loss=0.1848, pruned_loss=0.02589, over 4762.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2163, pruned_loss=0.03519, over 972007.85 frames.], batch size: 18, lr: 2.68e-04 +2022-05-06 01:56:08,303 INFO [train.py:715] (7/8) Epoch 8, batch 2600, loss[loss=0.1578, simple_loss=0.2282, pruned_loss=0.04373, over 4991.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2171, pruned_loss=0.0362, over 972702.72 frames.], batch size: 20, lr: 2.68e-04 +2022-05-06 01:56:47,552 INFO [train.py:715] (7/8) Epoch 8, batch 2650, loss[loss=0.1373, simple_loss=0.2051, pruned_loss=0.03476, over 4775.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2171, pruned_loss=0.03652, over 972511.15 frames.], batch size: 17, lr: 2.68e-04 +2022-05-06 01:57:27,034 INFO [train.py:715] (7/8) Epoch 8, batch 2700, loss[loss=0.1314, simple_loss=0.2012, pruned_loss=0.03079, over 4810.00 frames.], tot_loss[loss=0.1453, simple_loss=0.217, pruned_loss=0.0368, over 971921.27 frames.], batch size: 26, lr: 2.68e-04 +2022-05-06 01:58:06,372 INFO [train.py:715] (7/8) Epoch 8, batch 2750, loss[loss=0.1202, simple_loss=0.1952, pruned_loss=0.02254, over 4804.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2163, pruned_loss=0.03677, over 972181.00 frames.], batch size: 25, lr: 2.67e-04 +2022-05-06 01:58:45,751 INFO [train.py:715] (7/8) Epoch 8, batch 2800, loss[loss=0.1461, simple_loss=0.2263, pruned_loss=0.03299, over 4804.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2176, pruned_loss=0.037, over 972313.08 frames.], batch size: 24, lr: 2.67e-04 +2022-05-06 01:59:24,998 INFO [train.py:715] (7/8) Epoch 8, batch 2850, loss[loss=0.1663, simple_loss=0.2319, pruned_loss=0.05031, over 4986.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2177, pruned_loss=0.03677, over 972611.86 frames.], batch size: 33, lr: 2.67e-04 +2022-05-06 02:00:03,842 INFO [train.py:715] (7/8) Epoch 8, batch 2900, loss[loss=0.1336, simple_loss=0.2003, pruned_loss=0.03347, over 4727.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2172, pruned_loss=0.03676, over 971248.76 frames.], batch size: 12, lr: 2.67e-04 +2022-05-06 02:00:43,808 INFO [train.py:715] (7/8) Epoch 8, batch 2950, loss[loss=0.1574, simple_loss=0.2213, pruned_loss=0.04678, over 4741.00 frames.], tot_loss[loss=0.145, simple_loss=0.2167, pruned_loss=0.03663, over 972076.18 frames.], batch size: 16, lr: 2.67e-04 +2022-05-06 02:01:22,468 INFO [train.py:715] (7/8) Epoch 8, batch 3000, loss[loss=0.1323, simple_loss=0.2088, pruned_loss=0.0279, over 4958.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2166, pruned_loss=0.03653, over 971914.54 frames.], batch size: 35, lr: 2.67e-04 +2022-05-06 02:01:22,468 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 02:01:32,130 INFO [train.py:742] (7/8) Epoch 8, validation: loss=0.1076, simple_loss=0.1923, pruned_loss=0.0115, over 914524.00 frames. +2022-05-06 02:02:11,366 INFO [train.py:715] (7/8) Epoch 8, batch 3050, loss[loss=0.1892, simple_loss=0.2593, pruned_loss=0.0596, over 4976.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2172, pruned_loss=0.03668, over 972202.08 frames.], batch size: 39, lr: 2.67e-04 +2022-05-06 02:02:50,370 INFO [train.py:715] (7/8) Epoch 8, batch 3100, loss[loss=0.1598, simple_loss=0.2421, pruned_loss=0.03871, over 4871.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2174, pruned_loss=0.03704, over 972295.64 frames.], batch size: 16, lr: 2.67e-04 +2022-05-06 02:03:29,325 INFO [train.py:715] (7/8) Epoch 8, batch 3150, loss[loss=0.1385, simple_loss=0.2093, pruned_loss=0.03387, over 4962.00 frames.], tot_loss[loss=0.1459, simple_loss=0.218, pruned_loss=0.0369, over 972020.49 frames.], batch size: 35, lr: 2.67e-04 +2022-05-06 02:04:09,016 INFO [train.py:715] (7/8) Epoch 8, batch 3200, loss[loss=0.112, simple_loss=0.1905, pruned_loss=0.01676, over 4976.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2176, pruned_loss=0.0365, over 972284.39 frames.], batch size: 24, lr: 2.67e-04 +2022-05-06 02:04:48,446 INFO [train.py:715] (7/8) Epoch 8, batch 3250, loss[loss=0.1472, simple_loss=0.2196, pruned_loss=0.03737, over 4790.00 frames.], tot_loss[loss=0.145, simple_loss=0.217, pruned_loss=0.03649, over 972473.49 frames.], batch size: 14, lr: 2.67e-04 +2022-05-06 02:05:28,480 INFO [train.py:715] (7/8) Epoch 8, batch 3300, loss[loss=0.1993, simple_loss=0.2569, pruned_loss=0.07084, over 4902.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.03681, over 972484.55 frames.], batch size: 17, lr: 2.67e-04 +2022-05-06 02:06:08,838 INFO [train.py:715] (7/8) Epoch 8, batch 3350, loss[loss=0.1605, simple_loss=0.2293, pruned_loss=0.04589, over 4974.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2168, pruned_loss=0.03592, over 973118.40 frames.], batch size: 14, lr: 2.67e-04 +2022-05-06 02:06:49,936 INFO [train.py:715] (7/8) Epoch 8, batch 3400, loss[loss=0.1715, simple_loss=0.2352, pruned_loss=0.05385, over 4907.00 frames.], tot_loss[loss=0.144, simple_loss=0.2165, pruned_loss=0.03577, over 973293.18 frames.], batch size: 17, lr: 2.67e-04 +2022-05-06 02:07:30,802 INFO [train.py:715] (7/8) Epoch 8, batch 3450, loss[loss=0.1738, simple_loss=0.2484, pruned_loss=0.04965, over 4946.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2173, pruned_loss=0.03624, over 973329.57 frames.], batch size: 23, lr: 2.67e-04 +2022-05-06 02:08:11,008 INFO [train.py:715] (7/8) Epoch 8, batch 3500, loss[loss=0.1699, simple_loss=0.2473, pruned_loss=0.04627, over 4888.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2173, pruned_loss=0.03625, over 973804.25 frames.], batch size: 22, lr: 2.67e-04 +2022-05-06 02:08:52,349 INFO [train.py:715] (7/8) Epoch 8, batch 3550, loss[loss=0.1227, simple_loss=0.1874, pruned_loss=0.02899, over 4806.00 frames.], tot_loss[loss=0.1446, simple_loss=0.217, pruned_loss=0.03605, over 973468.57 frames.], batch size: 12, lr: 2.67e-04 +2022-05-06 02:09:33,202 INFO [train.py:715] (7/8) Epoch 8, batch 3600, loss[loss=0.1446, simple_loss=0.2174, pruned_loss=0.03596, over 4957.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2161, pruned_loss=0.03551, over 973878.03 frames.], batch size: 39, lr: 2.67e-04 +2022-05-06 02:10:13,458 INFO [train.py:715] (7/8) Epoch 8, batch 3650, loss[loss=0.1506, simple_loss=0.2297, pruned_loss=0.03576, over 4830.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2163, pruned_loss=0.03596, over 972931.42 frames.], batch size: 25, lr: 2.67e-04 +2022-05-06 02:10:53,935 INFO [train.py:715] (7/8) Epoch 8, batch 3700, loss[loss=0.133, simple_loss=0.2009, pruned_loss=0.03255, over 4926.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2155, pruned_loss=0.03575, over 972916.34 frames.], batch size: 29, lr: 2.67e-04 +2022-05-06 02:11:34,281 INFO [train.py:715] (7/8) Epoch 8, batch 3750, loss[loss=0.1431, simple_loss=0.2177, pruned_loss=0.03428, over 4961.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2148, pruned_loss=0.03527, over 973238.91 frames.], batch size: 35, lr: 2.67e-04 +2022-05-06 02:12:13,644 INFO [train.py:715] (7/8) Epoch 8, batch 3800, loss[loss=0.1326, simple_loss=0.2118, pruned_loss=0.02674, over 4938.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2152, pruned_loss=0.03523, over 972621.89 frames.], batch size: 29, lr: 2.67e-04 +2022-05-06 02:12:54,034 INFO [train.py:715] (7/8) Epoch 8, batch 3850, loss[loss=0.1233, simple_loss=0.1832, pruned_loss=0.03174, over 4870.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03531, over 972995.66 frames.], batch size: 16, lr: 2.67e-04 +2022-05-06 02:13:34,219 INFO [train.py:715] (7/8) Epoch 8, batch 3900, loss[loss=0.1665, simple_loss=0.2342, pruned_loss=0.04938, over 4770.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2148, pruned_loss=0.03506, over 973212.78 frames.], batch size: 18, lr: 2.67e-04 +2022-05-06 02:14:14,990 INFO [train.py:715] (7/8) Epoch 8, batch 3950, loss[loss=0.1493, simple_loss=0.2121, pruned_loss=0.04323, over 4970.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2153, pruned_loss=0.03497, over 973218.64 frames.], batch size: 39, lr: 2.67e-04 +2022-05-06 02:14:54,902 INFO [train.py:715] (7/8) Epoch 8, batch 4000, loss[loss=0.1377, simple_loss=0.2041, pruned_loss=0.03559, over 4881.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2155, pruned_loss=0.03553, over 972415.09 frames.], batch size: 16, lr: 2.67e-04 +2022-05-06 02:15:35,361 INFO [train.py:715] (7/8) Epoch 8, batch 4050, loss[loss=0.1239, simple_loss=0.1996, pruned_loss=0.02411, over 4807.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2169, pruned_loss=0.03623, over 972051.80 frames.], batch size: 21, lr: 2.67e-04 +2022-05-06 02:16:16,173 INFO [train.py:715] (7/8) Epoch 8, batch 4100, loss[loss=0.1604, simple_loss=0.2299, pruned_loss=0.04547, over 4760.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2168, pruned_loss=0.03653, over 971531.33 frames.], batch size: 19, lr: 2.67e-04 +2022-05-06 02:16:55,925 INFO [train.py:715] (7/8) Epoch 8, batch 4150, loss[loss=0.1414, simple_loss=0.2155, pruned_loss=0.03367, over 4805.00 frames.], tot_loss[loss=0.1454, simple_loss=0.217, pruned_loss=0.03684, over 972079.94 frames.], batch size: 15, lr: 2.67e-04 +2022-05-06 02:17:35,659 INFO [train.py:715] (7/8) Epoch 8, batch 4200, loss[loss=0.1373, simple_loss=0.2092, pruned_loss=0.03274, over 4925.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2165, pruned_loss=0.03669, over 972606.68 frames.], batch size: 18, lr: 2.67e-04 +2022-05-06 02:18:15,233 INFO [train.py:715] (7/8) Epoch 8, batch 4250, loss[loss=0.136, simple_loss=0.2023, pruned_loss=0.03481, over 4949.00 frames.], tot_loss[loss=0.146, simple_loss=0.2175, pruned_loss=0.03721, over 972484.62 frames.], batch size: 24, lr: 2.67e-04 +2022-05-06 02:18:54,989 INFO [train.py:715] (7/8) Epoch 8, batch 4300, loss[loss=0.1194, simple_loss=0.1859, pruned_loss=0.0265, over 4907.00 frames.], tot_loss[loss=0.1464, simple_loss=0.2179, pruned_loss=0.03744, over 972722.78 frames.], batch size: 19, lr: 2.67e-04 +2022-05-06 02:19:34,152 INFO [train.py:715] (7/8) Epoch 8, batch 4350, loss[loss=0.1318, simple_loss=0.2037, pruned_loss=0.02998, over 4882.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2175, pruned_loss=0.0368, over 972483.53 frames.], batch size: 16, lr: 2.67e-04 +2022-05-06 02:20:13,544 INFO [train.py:715] (7/8) Epoch 8, batch 4400, loss[loss=0.1329, simple_loss=0.1998, pruned_loss=0.03305, over 4934.00 frames.], tot_loss[loss=0.145, simple_loss=0.2166, pruned_loss=0.03674, over 972523.12 frames.], batch size: 23, lr: 2.67e-04 +2022-05-06 02:20:53,464 INFO [train.py:715] (7/8) Epoch 8, batch 4450, loss[loss=0.1702, simple_loss=0.2444, pruned_loss=0.04798, over 4796.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2169, pruned_loss=0.03673, over 973390.53 frames.], batch size: 21, lr: 2.67e-04 +2022-05-06 02:21:33,238 INFO [train.py:715] (7/8) Epoch 8, batch 4500, loss[loss=0.1477, simple_loss=0.2187, pruned_loss=0.03841, over 4859.00 frames.], tot_loss[loss=0.145, simple_loss=0.2166, pruned_loss=0.03667, over 973443.07 frames.], batch size: 32, lr: 2.67e-04 +2022-05-06 02:22:12,204 INFO [train.py:715] (7/8) Epoch 8, batch 4550, loss[loss=0.1319, simple_loss=0.211, pruned_loss=0.0264, over 4798.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03626, over 974106.39 frames.], batch size: 25, lr: 2.67e-04 +2022-05-06 02:22:52,187 INFO [train.py:715] (7/8) Epoch 8, batch 4600, loss[loss=0.1306, simple_loss=0.2057, pruned_loss=0.02779, over 4895.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.03598, over 973336.69 frames.], batch size: 39, lr: 2.67e-04 +2022-05-06 02:23:31,720 INFO [train.py:715] (7/8) Epoch 8, batch 4650, loss[loss=0.152, simple_loss=0.2245, pruned_loss=0.03972, over 4939.00 frames.], tot_loss[loss=0.1451, simple_loss=0.217, pruned_loss=0.03662, over 972639.78 frames.], batch size: 24, lr: 2.67e-04 +2022-05-06 02:24:11,301 INFO [train.py:715] (7/8) Epoch 8, batch 4700, loss[loss=0.1412, simple_loss=0.2226, pruned_loss=0.02991, over 4805.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2166, pruned_loss=0.03638, over 972463.38 frames.], batch size: 25, lr: 2.67e-04 +2022-05-06 02:24:50,829 INFO [train.py:715] (7/8) Epoch 8, batch 4750, loss[loss=0.1216, simple_loss=0.1943, pruned_loss=0.02448, over 4775.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2174, pruned_loss=0.03716, over 972542.50 frames.], batch size: 14, lr: 2.67e-04 +2022-05-06 02:25:30,487 INFO [train.py:715] (7/8) Epoch 8, batch 4800, loss[loss=0.1309, simple_loss=0.2023, pruned_loss=0.02976, over 4885.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2172, pruned_loss=0.03697, over 972712.11 frames.], batch size: 22, lr: 2.67e-04 +2022-05-06 02:26:10,391 INFO [train.py:715] (7/8) Epoch 8, batch 4850, loss[loss=0.1457, simple_loss=0.2108, pruned_loss=0.04029, over 4785.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2171, pruned_loss=0.03688, over 972140.32 frames.], batch size: 14, lr: 2.66e-04 +2022-05-06 02:26:49,516 INFO [train.py:715] (7/8) Epoch 8, batch 4900, loss[loss=0.1525, simple_loss=0.2295, pruned_loss=0.03778, over 4889.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2181, pruned_loss=0.03724, over 972797.67 frames.], batch size: 16, lr: 2.66e-04 +2022-05-06 02:27:29,278 INFO [train.py:715] (7/8) Epoch 8, batch 4950, loss[loss=0.143, simple_loss=0.2082, pruned_loss=0.03887, over 4786.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2175, pruned_loss=0.03694, over 972254.10 frames.], batch size: 18, lr: 2.66e-04 +2022-05-06 02:28:08,943 INFO [train.py:715] (7/8) Epoch 8, batch 5000, loss[loss=0.1552, simple_loss=0.2319, pruned_loss=0.03918, over 4813.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2179, pruned_loss=0.03738, over 972354.37 frames.], batch size: 26, lr: 2.66e-04 +2022-05-06 02:28:47,812 INFO [train.py:715] (7/8) Epoch 8, batch 5050, loss[loss=0.1201, simple_loss=0.199, pruned_loss=0.02062, over 4819.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2175, pruned_loss=0.03655, over 973429.11 frames.], batch size: 25, lr: 2.66e-04 +2022-05-06 02:29:26,964 INFO [train.py:715] (7/8) Epoch 8, batch 5100, loss[loss=0.1758, simple_loss=0.2315, pruned_loss=0.06009, over 4863.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03614, over 973555.63 frames.], batch size: 32, lr: 2.66e-04 +2022-05-06 02:30:06,423 INFO [train.py:715] (7/8) Epoch 8, batch 5150, loss[loss=0.1304, simple_loss=0.2067, pruned_loss=0.02701, over 4825.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2166, pruned_loss=0.03657, over 974146.62 frames.], batch size: 13, lr: 2.66e-04 +2022-05-06 02:30:45,328 INFO [train.py:715] (7/8) Epoch 8, batch 5200, loss[loss=0.1238, simple_loss=0.2015, pruned_loss=0.02306, over 4987.00 frames.], tot_loss[loss=0.1443, simple_loss=0.216, pruned_loss=0.03635, over 972848.98 frames.], batch size: 28, lr: 2.66e-04 +2022-05-06 02:31:24,027 INFO [train.py:715] (7/8) Epoch 8, batch 5250, loss[loss=0.1496, simple_loss=0.2213, pruned_loss=0.03893, over 4908.00 frames.], tot_loss[loss=0.144, simple_loss=0.2157, pruned_loss=0.03614, over 973187.51 frames.], batch size: 19, lr: 2.66e-04 +2022-05-06 02:32:04,135 INFO [train.py:715] (7/8) Epoch 8, batch 5300, loss[loss=0.1562, simple_loss=0.2245, pruned_loss=0.04399, over 4772.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2156, pruned_loss=0.03588, over 972138.53 frames.], batch size: 17, lr: 2.66e-04 +2022-05-06 02:32:43,760 INFO [train.py:715] (7/8) Epoch 8, batch 5350, loss[loss=0.1604, simple_loss=0.2285, pruned_loss=0.04619, over 4876.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2167, pruned_loss=0.03675, over 970854.77 frames.], batch size: 32, lr: 2.66e-04 +2022-05-06 02:33:23,693 INFO [train.py:715] (7/8) Epoch 8, batch 5400, loss[loss=0.1372, simple_loss=0.2119, pruned_loss=0.03124, over 4880.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2168, pruned_loss=0.03671, over 972525.23 frames.], batch size: 22, lr: 2.66e-04 +2022-05-06 02:34:04,182 INFO [train.py:715] (7/8) Epoch 8, batch 5450, loss[loss=0.1235, simple_loss=0.1978, pruned_loss=0.02461, over 4965.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2164, pruned_loss=0.0363, over 972215.90 frames.], batch size: 24, lr: 2.66e-04 +2022-05-06 02:34:44,677 INFO [train.py:715] (7/8) Epoch 8, batch 5500, loss[loss=0.1467, simple_loss=0.2271, pruned_loss=0.03314, over 4955.00 frames.], tot_loss[loss=0.1441, simple_loss=0.216, pruned_loss=0.03604, over 971877.17 frames.], batch size: 35, lr: 2.66e-04 +2022-05-06 02:35:24,971 INFO [train.py:715] (7/8) Epoch 8, batch 5550, loss[loss=0.1521, simple_loss=0.2318, pruned_loss=0.03613, over 4773.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2168, pruned_loss=0.03653, over 972905.92 frames.], batch size: 14, lr: 2.66e-04 +2022-05-06 02:36:04,811 INFO [train.py:715] (7/8) Epoch 8, batch 5600, loss[loss=0.1287, simple_loss=0.1851, pruned_loss=0.03613, over 4979.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2173, pruned_loss=0.03672, over 973011.93 frames.], batch size: 15, lr: 2.66e-04 +2022-05-06 02:36:44,877 INFO [train.py:715] (7/8) Epoch 8, batch 5650, loss[loss=0.1531, simple_loss=0.2222, pruned_loss=0.04203, over 4839.00 frames.], tot_loss[loss=0.145, simple_loss=0.2168, pruned_loss=0.03653, over 973002.97 frames.], batch size: 15, lr: 2.66e-04 +2022-05-06 02:37:23,999 INFO [train.py:715] (7/8) Epoch 8, batch 5700, loss[loss=0.1317, simple_loss=0.2089, pruned_loss=0.02724, over 4785.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2168, pruned_loss=0.03666, over 973250.34 frames.], batch size: 17, lr: 2.66e-04 +2022-05-06 02:38:03,517 INFO [train.py:715] (7/8) Epoch 8, batch 5750, loss[loss=0.1646, simple_loss=0.219, pruned_loss=0.05507, over 4829.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2175, pruned_loss=0.03733, over 973763.75 frames.], batch size: 25, lr: 2.66e-04 +2022-05-06 02:38:42,301 INFO [train.py:715] (7/8) Epoch 8, batch 5800, loss[loss=0.1395, simple_loss=0.206, pruned_loss=0.03648, over 4782.00 frames.], tot_loss[loss=0.1463, simple_loss=0.2182, pruned_loss=0.03718, over 973395.35 frames.], batch size: 14, lr: 2.66e-04 +2022-05-06 02:39:21,800 INFO [train.py:715] (7/8) Epoch 8, batch 5850, loss[loss=0.1577, simple_loss=0.2241, pruned_loss=0.04566, over 4970.00 frames.], tot_loss[loss=0.1459, simple_loss=0.2179, pruned_loss=0.03696, over 973481.25 frames.], batch size: 35, lr: 2.66e-04 +2022-05-06 02:40:00,569 INFO [train.py:715] (7/8) Epoch 8, batch 5900, loss[loss=0.1677, simple_loss=0.2278, pruned_loss=0.05377, over 4904.00 frames.], tot_loss[loss=0.146, simple_loss=0.2177, pruned_loss=0.03719, over 973093.94 frames.], batch size: 17, lr: 2.66e-04 +2022-05-06 02:40:40,149 INFO [train.py:715] (7/8) Epoch 8, batch 5950, loss[loss=0.1225, simple_loss=0.193, pruned_loss=0.02598, over 4986.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2168, pruned_loss=0.03667, over 972878.23 frames.], batch size: 31, lr: 2.66e-04 +2022-05-06 02:41:20,035 INFO [train.py:715] (7/8) Epoch 8, batch 6000, loss[loss=0.1275, simple_loss=0.1864, pruned_loss=0.03432, over 4829.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2159, pruned_loss=0.03623, over 972043.44 frames.], batch size: 12, lr: 2.66e-04 +2022-05-06 02:41:20,036 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 02:41:29,609 INFO [train.py:742] (7/8) Epoch 8, validation: loss=0.1075, simple_loss=0.1921, pruned_loss=0.01146, over 914524.00 frames. +2022-05-06 02:42:09,071 INFO [train.py:715] (7/8) Epoch 8, batch 6050, loss[loss=0.1293, simple_loss=0.2084, pruned_loss=0.02513, over 4960.00 frames.], tot_loss[loss=0.145, simple_loss=0.2167, pruned_loss=0.0366, over 972501.72 frames.], batch size: 24, lr: 2.66e-04 +2022-05-06 02:42:48,767 INFO [train.py:715] (7/8) Epoch 8, batch 6100, loss[loss=0.1515, simple_loss=0.2208, pruned_loss=0.04112, over 4900.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2165, pruned_loss=0.03649, over 971978.67 frames.], batch size: 18, lr: 2.66e-04 +2022-05-06 02:43:28,430 INFO [train.py:715] (7/8) Epoch 8, batch 6150, loss[loss=0.1334, simple_loss=0.2089, pruned_loss=0.02892, over 4881.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03639, over 972768.46 frames.], batch size: 16, lr: 2.66e-04 +2022-05-06 02:44:08,983 INFO [train.py:715] (7/8) Epoch 8, batch 6200, loss[loss=0.1468, simple_loss=0.2177, pruned_loss=0.0379, over 4751.00 frames.], tot_loss[loss=0.1452, simple_loss=0.217, pruned_loss=0.03664, over 973081.98 frames.], batch size: 19, lr: 2.66e-04 +2022-05-06 02:44:49,472 INFO [train.py:715] (7/8) Epoch 8, batch 6250, loss[loss=0.1257, simple_loss=0.1936, pruned_loss=0.0289, over 4966.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2165, pruned_loss=0.03659, over 972907.62 frames.], batch size: 24, lr: 2.66e-04 +2022-05-06 02:45:29,142 INFO [train.py:715] (7/8) Epoch 8, batch 6300, loss[loss=0.1356, simple_loss=0.2157, pruned_loss=0.02777, over 4973.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2162, pruned_loss=0.03626, over 972907.97 frames.], batch size: 24, lr: 2.66e-04 +2022-05-06 02:46:08,060 INFO [train.py:715] (7/8) Epoch 8, batch 6350, loss[loss=0.1343, simple_loss=0.213, pruned_loss=0.02784, over 4867.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2163, pruned_loss=0.03617, over 972316.14 frames.], batch size: 20, lr: 2.66e-04 +2022-05-06 02:46:47,831 INFO [train.py:715] (7/8) Epoch 8, batch 6400, loss[loss=0.1085, simple_loss=0.1802, pruned_loss=0.01838, over 4981.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2165, pruned_loss=0.036, over 972766.34 frames.], batch size: 25, lr: 2.66e-04 +2022-05-06 02:47:27,064 INFO [train.py:715] (7/8) Epoch 8, batch 6450, loss[loss=0.1502, simple_loss=0.2178, pruned_loss=0.04131, over 4911.00 frames.], tot_loss[loss=0.1457, simple_loss=0.218, pruned_loss=0.03672, over 973137.53 frames.], batch size: 19, lr: 2.66e-04 +2022-05-06 02:48:06,515 INFO [train.py:715] (7/8) Epoch 8, batch 6500, loss[loss=0.1601, simple_loss=0.2291, pruned_loss=0.04558, over 4828.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2178, pruned_loss=0.0366, over 972982.51 frames.], batch size: 13, lr: 2.66e-04 +2022-05-06 02:48:45,640 INFO [train.py:715] (7/8) Epoch 8, batch 6550, loss[loss=0.1522, simple_loss=0.222, pruned_loss=0.04119, over 4983.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2176, pruned_loss=0.03646, over 973214.48 frames.], batch size: 15, lr: 2.66e-04 +2022-05-06 02:49:25,294 INFO [train.py:715] (7/8) Epoch 8, batch 6600, loss[loss=0.1215, simple_loss=0.1957, pruned_loss=0.0237, over 4991.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2176, pruned_loss=0.0364, over 972673.86 frames.], batch size: 14, lr: 2.66e-04 +2022-05-06 02:50:04,619 INFO [train.py:715] (7/8) Epoch 8, batch 6650, loss[loss=0.163, simple_loss=0.2287, pruned_loss=0.04868, over 4955.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2179, pruned_loss=0.03649, over 973277.04 frames.], batch size: 15, lr: 2.66e-04 +2022-05-06 02:50:43,405 INFO [train.py:715] (7/8) Epoch 8, batch 6700, loss[loss=0.1264, simple_loss=0.1991, pruned_loss=0.02692, over 4810.00 frames.], tot_loss[loss=0.1456, simple_loss=0.218, pruned_loss=0.03658, over 972683.82 frames.], batch size: 26, lr: 2.66e-04 +2022-05-06 02:51:23,632 INFO [train.py:715] (7/8) Epoch 8, batch 6750, loss[loss=0.14, simple_loss=0.2108, pruned_loss=0.03457, over 4709.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2172, pruned_loss=0.03621, over 971482.20 frames.], batch size: 15, lr: 2.66e-04 +2022-05-06 02:52:03,057 INFO [train.py:715] (7/8) Epoch 8, batch 6800, loss[loss=0.1412, simple_loss=0.217, pruned_loss=0.03267, over 4951.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2172, pruned_loss=0.03615, over 971982.86 frames.], batch size: 21, lr: 2.66e-04 +2022-05-06 02:52:42,027 INFO [train.py:715] (7/8) Epoch 8, batch 6850, loss[loss=0.1843, simple_loss=0.242, pruned_loss=0.06327, over 4924.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2175, pruned_loss=0.03666, over 971906.40 frames.], batch size: 39, lr: 2.66e-04 +2022-05-06 02:53:21,949 INFO [train.py:715] (7/8) Epoch 8, batch 6900, loss[loss=0.1166, simple_loss=0.1881, pruned_loss=0.02256, over 4735.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.03621, over 972194.28 frames.], batch size: 12, lr: 2.66e-04 +2022-05-06 02:54:02,356 INFO [train.py:715] (7/8) Epoch 8, batch 6950, loss[loss=0.1361, simple_loss=0.2047, pruned_loss=0.0338, over 4835.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2164, pruned_loss=0.03595, over 972446.09 frames.], batch size: 15, lr: 2.66e-04 +2022-05-06 02:54:42,172 INFO [train.py:715] (7/8) Epoch 8, batch 7000, loss[loss=0.1282, simple_loss=0.2146, pruned_loss=0.02085, over 4975.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2164, pruned_loss=0.03594, over 971888.71 frames.], batch size: 15, lr: 2.65e-04 +2022-05-06 02:55:21,782 INFO [train.py:715] (7/8) Epoch 8, batch 7050, loss[loss=0.1412, simple_loss=0.2151, pruned_loss=0.03367, over 4960.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2168, pruned_loss=0.03623, over 972203.42 frames.], batch size: 24, lr: 2.65e-04 +2022-05-06 02:56:01,472 INFO [train.py:715] (7/8) Epoch 8, batch 7100, loss[loss=0.1514, simple_loss=0.2291, pruned_loss=0.03685, over 4827.00 frames.], tot_loss[loss=0.1449, simple_loss=0.217, pruned_loss=0.03641, over 972003.45 frames.], batch size: 27, lr: 2.65e-04 +2022-05-06 02:56:41,141 INFO [train.py:715] (7/8) Epoch 8, batch 7150, loss[loss=0.1475, simple_loss=0.2229, pruned_loss=0.03599, over 4880.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2172, pruned_loss=0.0365, over 971792.67 frames.], batch size: 16, lr: 2.65e-04 +2022-05-06 02:57:20,443 INFO [train.py:715] (7/8) Epoch 8, batch 7200, loss[loss=0.1826, simple_loss=0.2498, pruned_loss=0.05769, over 4737.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2167, pruned_loss=0.03618, over 971270.11 frames.], batch size: 16, lr: 2.65e-04 +2022-05-06 02:57:59,447 INFO [train.py:715] (7/8) Epoch 8, batch 7250, loss[loss=0.1304, simple_loss=0.2072, pruned_loss=0.02675, over 4782.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2164, pruned_loss=0.03594, over 971323.21 frames.], batch size: 18, lr: 2.65e-04 +2022-05-06 02:58:39,554 INFO [train.py:715] (7/8) Epoch 8, batch 7300, loss[loss=0.1345, simple_loss=0.195, pruned_loss=0.03697, over 4784.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03564, over 972265.11 frames.], batch size: 12, lr: 2.65e-04 +2022-05-06 02:59:18,930 INFO [train.py:715] (7/8) Epoch 8, batch 7350, loss[loss=0.1641, simple_loss=0.2255, pruned_loss=0.05138, over 4955.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2153, pruned_loss=0.03551, over 972199.62 frames.], batch size: 21, lr: 2.65e-04 +2022-05-06 02:59:58,523 INFO [train.py:715] (7/8) Epoch 8, batch 7400, loss[loss=0.1379, simple_loss=0.2198, pruned_loss=0.02798, over 4921.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2154, pruned_loss=0.03547, over 972503.95 frames.], batch size: 39, lr: 2.65e-04 +2022-05-06 03:00:38,455 INFO [train.py:715] (7/8) Epoch 8, batch 7450, loss[loss=0.1532, simple_loss=0.2313, pruned_loss=0.03753, over 4858.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2152, pruned_loss=0.03561, over 971777.04 frames.], batch size: 32, lr: 2.65e-04 +2022-05-06 03:01:18,181 INFO [train.py:715] (7/8) Epoch 8, batch 7500, loss[loss=0.1398, simple_loss=0.2175, pruned_loss=0.03103, over 4888.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2151, pruned_loss=0.0354, over 972134.07 frames.], batch size: 19, lr: 2.65e-04 +2022-05-06 03:01:57,872 INFO [train.py:715] (7/8) Epoch 8, batch 7550, loss[loss=0.1492, simple_loss=0.2205, pruned_loss=0.03894, over 4978.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03566, over 972922.54 frames.], batch size: 15, lr: 2.65e-04 +2022-05-06 03:02:37,820 INFO [train.py:715] (7/8) Epoch 8, batch 7600, loss[loss=0.1425, simple_loss=0.2142, pruned_loss=0.03545, over 4963.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2153, pruned_loss=0.03561, over 972852.31 frames.], batch size: 24, lr: 2.65e-04 +2022-05-06 03:03:17,990 INFO [train.py:715] (7/8) Epoch 8, batch 7650, loss[loss=0.1483, simple_loss=0.2274, pruned_loss=0.03455, over 4906.00 frames.], tot_loss[loss=0.144, simple_loss=0.2164, pruned_loss=0.03576, over 972833.27 frames.], batch size: 19, lr: 2.65e-04 +2022-05-06 03:03:57,440 INFO [train.py:715] (7/8) Epoch 8, batch 7700, loss[loss=0.1714, simple_loss=0.237, pruned_loss=0.05293, over 4976.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2164, pruned_loss=0.03577, over 971410.44 frames.], batch size: 39, lr: 2.65e-04 +2022-05-06 03:04:36,611 INFO [train.py:715] (7/8) Epoch 8, batch 7750, loss[loss=0.1457, simple_loss=0.2129, pruned_loss=0.03928, over 4942.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2156, pruned_loss=0.03532, over 971498.33 frames.], batch size: 35, lr: 2.65e-04 +2022-05-06 03:05:16,802 INFO [train.py:715] (7/8) Epoch 8, batch 7800, loss[loss=0.1368, simple_loss=0.2095, pruned_loss=0.03207, over 4980.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2162, pruned_loss=0.03571, over 971176.79 frames.], batch size: 28, lr: 2.65e-04 +2022-05-06 03:05:56,863 INFO [train.py:715] (7/8) Epoch 8, batch 7850, loss[loss=0.1337, simple_loss=0.2117, pruned_loss=0.02786, over 4964.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.03537, over 971560.15 frames.], batch size: 24, lr: 2.65e-04 +2022-05-06 03:06:35,516 INFO [train.py:715] (7/8) Epoch 8, batch 7900, loss[loss=0.134, simple_loss=0.2038, pruned_loss=0.03211, over 4926.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2162, pruned_loss=0.03579, over 971591.37 frames.], batch size: 23, lr: 2.65e-04 +2022-05-06 03:07:15,008 INFO [train.py:715] (7/8) Epoch 8, batch 7950, loss[loss=0.1457, simple_loss=0.2187, pruned_loss=0.03634, over 4933.00 frames.], tot_loss[loss=0.144, simple_loss=0.2164, pruned_loss=0.03582, over 972063.15 frames.], batch size: 23, lr: 2.65e-04 +2022-05-06 03:07:54,692 INFO [train.py:715] (7/8) Epoch 8, batch 8000, loss[loss=0.1595, simple_loss=0.231, pruned_loss=0.04407, over 4938.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.03595, over 972378.69 frames.], batch size: 23, lr: 2.65e-04 +2022-05-06 03:08:33,647 INFO [train.py:715] (7/8) Epoch 8, batch 8050, loss[loss=0.1308, simple_loss=0.1979, pruned_loss=0.03187, over 4860.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2162, pruned_loss=0.03548, over 972845.22 frames.], batch size: 32, lr: 2.65e-04 +2022-05-06 03:09:12,022 INFO [train.py:715] (7/8) Epoch 8, batch 8100, loss[loss=0.1218, simple_loss=0.197, pruned_loss=0.02331, over 4955.00 frames.], tot_loss[loss=0.144, simple_loss=0.2167, pruned_loss=0.03568, over 972104.67 frames.], batch size: 24, lr: 2.65e-04 +2022-05-06 03:09:51,247 INFO [train.py:715] (7/8) Epoch 8, batch 8150, loss[loss=0.137, simple_loss=0.2058, pruned_loss=0.03405, over 4858.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2162, pruned_loss=0.03559, over 972139.27 frames.], batch size: 32, lr: 2.65e-04 +2022-05-06 03:10:31,280 INFO [train.py:715] (7/8) Epoch 8, batch 8200, loss[loss=0.1174, simple_loss=0.1831, pruned_loss=0.02589, over 4839.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2161, pruned_loss=0.03576, over 972423.75 frames.], batch size: 13, lr: 2.65e-04 +2022-05-06 03:11:09,919 INFO [train.py:715] (7/8) Epoch 8, batch 8250, loss[loss=0.1178, simple_loss=0.1914, pruned_loss=0.02213, over 4770.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2158, pruned_loss=0.03576, over 972737.91 frames.], batch size: 14, lr: 2.65e-04 +2022-05-06 03:11:48,873 INFO [train.py:715] (7/8) Epoch 8, batch 8300, loss[loss=0.1589, simple_loss=0.23, pruned_loss=0.04392, over 4786.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2164, pruned_loss=0.03616, over 972822.15 frames.], batch size: 18, lr: 2.65e-04 +2022-05-06 03:12:28,297 INFO [train.py:715] (7/8) Epoch 8, batch 8350, loss[loss=0.1361, simple_loss=0.2176, pruned_loss=0.02729, over 4977.00 frames.], tot_loss[loss=0.1439, simple_loss=0.216, pruned_loss=0.03591, over 972038.17 frames.], batch size: 28, lr: 2.65e-04 +2022-05-06 03:13:07,313 INFO [train.py:715] (7/8) Epoch 8, batch 8400, loss[loss=0.1281, simple_loss=0.1882, pruned_loss=0.03397, over 4776.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03635, over 971045.13 frames.], batch size: 12, lr: 2.65e-04 +2022-05-06 03:13:45,970 INFO [train.py:715] (7/8) Epoch 8, batch 8450, loss[loss=0.1214, simple_loss=0.1934, pruned_loss=0.02466, over 4699.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2169, pruned_loss=0.03646, over 971011.68 frames.], batch size: 15, lr: 2.65e-04 +2022-05-06 03:14:25,532 INFO [train.py:715] (7/8) Epoch 8, batch 8500, loss[loss=0.1777, simple_loss=0.2385, pruned_loss=0.05845, over 4848.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2167, pruned_loss=0.03632, over 970389.22 frames.], batch size: 13, lr: 2.65e-04 +2022-05-06 03:15:05,500 INFO [train.py:715] (7/8) Epoch 8, batch 8550, loss[loss=0.1378, simple_loss=0.2251, pruned_loss=0.0252, over 4893.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03618, over 970636.35 frames.], batch size: 19, lr: 2.65e-04 +2022-05-06 03:15:44,164 INFO [train.py:715] (7/8) Epoch 8, batch 8600, loss[loss=0.1057, simple_loss=0.1863, pruned_loss=0.01254, over 4823.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2161, pruned_loss=0.03569, over 971599.42 frames.], batch size: 26, lr: 2.65e-04 +2022-05-06 03:16:23,285 INFO [train.py:715] (7/8) Epoch 8, batch 8650, loss[loss=0.1428, simple_loss=0.2087, pruned_loss=0.03844, over 4747.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2168, pruned_loss=0.03571, over 972386.10 frames.], batch size: 16, lr: 2.65e-04 +2022-05-06 03:17:02,902 INFO [train.py:715] (7/8) Epoch 8, batch 8700, loss[loss=0.1405, simple_loss=0.2074, pruned_loss=0.03686, over 4974.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2156, pruned_loss=0.03524, over 972684.25 frames.], batch size: 28, lr: 2.65e-04 +2022-05-06 03:17:41,702 INFO [train.py:715] (7/8) Epoch 8, batch 8750, loss[loss=0.1713, simple_loss=0.2584, pruned_loss=0.04213, over 4787.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.03546, over 972782.14 frames.], batch size: 17, lr: 2.65e-04 +2022-05-06 03:18:20,675 INFO [train.py:715] (7/8) Epoch 8, batch 8800, loss[loss=0.1547, simple_loss=0.2168, pruned_loss=0.04634, over 4902.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.0353, over 972148.88 frames.], batch size: 17, lr: 2.65e-04 +2022-05-06 03:19:00,219 INFO [train.py:715] (7/8) Epoch 8, batch 8850, loss[loss=0.1419, simple_loss=0.2103, pruned_loss=0.03682, over 4868.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2154, pruned_loss=0.03544, over 971436.92 frames.], batch size: 32, lr: 2.65e-04 +2022-05-06 03:19:39,731 INFO [train.py:715] (7/8) Epoch 8, batch 8900, loss[loss=0.1386, simple_loss=0.213, pruned_loss=0.0321, over 4961.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.03549, over 971527.08 frames.], batch size: 15, lr: 2.65e-04 +2022-05-06 03:20:18,232 INFO [train.py:715] (7/8) Epoch 8, batch 8950, loss[loss=0.1244, simple_loss=0.2007, pruned_loss=0.02402, over 4970.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2159, pruned_loss=0.03614, over 971167.60 frames.], batch size: 24, lr: 2.65e-04 +2022-05-06 03:20:57,340 INFO [train.py:715] (7/8) Epoch 8, batch 9000, loss[loss=0.1378, simple_loss=0.2161, pruned_loss=0.02977, over 4953.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2157, pruned_loss=0.03601, over 971825.16 frames.], batch size: 35, lr: 2.65e-04 +2022-05-06 03:20:57,340 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 03:21:06,883 INFO [train.py:742] (7/8) Epoch 8, validation: loss=0.1075, simple_loss=0.1922, pruned_loss=0.01144, over 914524.00 frames. +2022-05-06 03:21:46,748 INFO [train.py:715] (7/8) Epoch 8, batch 9050, loss[loss=0.1406, simple_loss=0.2065, pruned_loss=0.03736, over 4746.00 frames.], tot_loss[loss=0.144, simple_loss=0.2161, pruned_loss=0.03599, over 971284.94 frames.], batch size: 16, lr: 2.65e-04 +2022-05-06 03:22:26,224 INFO [train.py:715] (7/8) Epoch 8, batch 9100, loss[loss=0.1621, simple_loss=0.2279, pruned_loss=0.04813, over 4971.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2163, pruned_loss=0.03625, over 971983.51 frames.], batch size: 39, lr: 2.65e-04 +2022-05-06 03:23:05,924 INFO [train.py:715] (7/8) Epoch 8, batch 9150, loss[loss=0.1407, simple_loss=0.2043, pruned_loss=0.03854, over 4806.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2151, pruned_loss=0.03583, over 971629.28 frames.], batch size: 12, lr: 2.64e-04 +2022-05-06 03:23:44,125 INFO [train.py:715] (7/8) Epoch 8, batch 9200, loss[loss=0.1703, simple_loss=0.2324, pruned_loss=0.05408, over 4838.00 frames.], tot_loss[loss=0.143, simple_loss=0.2145, pruned_loss=0.0357, over 971175.27 frames.], batch size: 32, lr: 2.64e-04 +2022-05-06 03:24:23,670 INFO [train.py:715] (7/8) Epoch 8, batch 9250, loss[loss=0.1281, simple_loss=0.1967, pruned_loss=0.02977, over 4819.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2146, pruned_loss=0.03543, over 970813.17 frames.], batch size: 12, lr: 2.64e-04 +2022-05-06 03:25:03,203 INFO [train.py:715] (7/8) Epoch 8, batch 9300, loss[loss=0.1223, simple_loss=0.1918, pruned_loss=0.02638, over 4862.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.03604, over 970546.93 frames.], batch size: 12, lr: 2.64e-04 +2022-05-06 03:25:42,061 INFO [train.py:715] (7/8) Epoch 8, batch 9350, loss[loss=0.1331, simple_loss=0.2026, pruned_loss=0.0318, over 4956.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03569, over 971503.96 frames.], batch size: 29, lr: 2.64e-04 +2022-05-06 03:26:20,918 INFO [train.py:715] (7/8) Epoch 8, batch 9400, loss[loss=0.1485, simple_loss=0.2196, pruned_loss=0.03867, over 4783.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2156, pruned_loss=0.03533, over 971768.98 frames.], batch size: 18, lr: 2.64e-04 +2022-05-06 03:27:00,378 INFO [train.py:715] (7/8) Epoch 8, batch 9450, loss[loss=0.1346, simple_loss=0.2095, pruned_loss=0.02983, over 4845.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2164, pruned_loss=0.03551, over 971683.87 frames.], batch size: 13, lr: 2.64e-04 +2022-05-06 03:27:40,548 INFO [train.py:715] (7/8) Epoch 8, batch 9500, loss[loss=0.1417, simple_loss=0.2166, pruned_loss=0.03341, over 4985.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2153, pruned_loss=0.03506, over 972833.86 frames.], batch size: 25, lr: 2.64e-04 +2022-05-06 03:28:21,700 INFO [train.py:715] (7/8) Epoch 8, batch 9550, loss[loss=0.1379, simple_loss=0.2154, pruned_loss=0.03017, over 4910.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2158, pruned_loss=0.03531, over 972845.96 frames.], batch size: 17, lr: 2.64e-04 +2022-05-06 03:29:01,736 INFO [train.py:715] (7/8) Epoch 8, batch 9600, loss[loss=0.1021, simple_loss=0.1648, pruned_loss=0.01965, over 4819.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2158, pruned_loss=0.03567, over 973737.96 frames.], batch size: 13, lr: 2.64e-04 +2022-05-06 03:29:41,771 INFO [train.py:715] (7/8) Epoch 8, batch 9650, loss[loss=0.1383, simple_loss=0.2157, pruned_loss=0.03038, over 4797.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.03568, over 974626.70 frames.], batch size: 24, lr: 2.64e-04 +2022-05-06 03:30:21,098 INFO [train.py:715] (7/8) Epoch 8, batch 9700, loss[loss=0.1324, simple_loss=0.2124, pruned_loss=0.02618, over 4894.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2155, pruned_loss=0.0356, over 973394.52 frames.], batch size: 19, lr: 2.64e-04 +2022-05-06 03:30:59,868 INFO [train.py:715] (7/8) Epoch 8, batch 9750, loss[loss=0.1333, simple_loss=0.2005, pruned_loss=0.03307, over 4770.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2157, pruned_loss=0.03537, over 972870.26 frames.], batch size: 14, lr: 2.64e-04 +2022-05-06 03:31:39,482 INFO [train.py:715] (7/8) Epoch 8, batch 9800, loss[loss=0.1287, simple_loss=0.2099, pruned_loss=0.02379, over 4978.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2154, pruned_loss=0.03498, over 971645.21 frames.], batch size: 35, lr: 2.64e-04 +2022-05-06 03:32:18,972 INFO [train.py:715] (7/8) Epoch 8, batch 9850, loss[loss=0.1629, simple_loss=0.2174, pruned_loss=0.05423, over 4886.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2158, pruned_loss=0.03541, over 971508.96 frames.], batch size: 16, lr: 2.64e-04 +2022-05-06 03:32:58,277 INFO [train.py:715] (7/8) Epoch 8, batch 9900, loss[loss=0.144, simple_loss=0.2264, pruned_loss=0.03082, over 4895.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2172, pruned_loss=0.03587, over 971469.91 frames.], batch size: 22, lr: 2.64e-04 +2022-05-06 03:33:37,621 INFO [train.py:715] (7/8) Epoch 8, batch 9950, loss[loss=0.1558, simple_loss=0.2226, pruned_loss=0.04447, over 4802.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2172, pruned_loss=0.03609, over 971459.47 frames.], batch size: 21, lr: 2.64e-04 +2022-05-06 03:34:17,532 INFO [train.py:715] (7/8) Epoch 8, batch 10000, loss[loss=0.1384, simple_loss=0.2085, pruned_loss=0.03416, over 4987.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2169, pruned_loss=0.036, over 971151.40 frames.], batch size: 25, lr: 2.64e-04 +2022-05-06 03:34:56,515 INFO [train.py:715] (7/8) Epoch 8, batch 10050, loss[loss=0.1436, simple_loss=0.2099, pruned_loss=0.0387, over 4947.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2165, pruned_loss=0.03584, over 972102.12 frames.], batch size: 29, lr: 2.64e-04 +2022-05-06 03:35:35,063 INFO [train.py:715] (7/8) Epoch 8, batch 10100, loss[loss=0.1859, simple_loss=0.258, pruned_loss=0.05694, over 4750.00 frames.], tot_loss[loss=0.144, simple_loss=0.2163, pruned_loss=0.03591, over 972293.65 frames.], batch size: 16, lr: 2.64e-04 +2022-05-06 03:36:15,141 INFO [train.py:715] (7/8) Epoch 8, batch 10150, loss[loss=0.1261, simple_loss=0.1961, pruned_loss=0.02805, over 4877.00 frames.], tot_loss[loss=0.1435, simple_loss=0.216, pruned_loss=0.0355, over 971885.25 frames.], batch size: 16, lr: 2.64e-04 +2022-05-06 03:36:55,127 INFO [train.py:715] (7/8) Epoch 8, batch 10200, loss[loss=0.1429, simple_loss=0.2192, pruned_loss=0.03332, over 4791.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2158, pruned_loss=0.03566, over 971259.90 frames.], batch size: 24, lr: 2.64e-04 +2022-05-06 03:37:34,627 INFO [train.py:715] (7/8) Epoch 8, batch 10250, loss[loss=0.1534, simple_loss=0.2287, pruned_loss=0.03905, over 4781.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2154, pruned_loss=0.03569, over 971997.49 frames.], batch size: 17, lr: 2.64e-04 +2022-05-06 03:38:14,431 INFO [train.py:715] (7/8) Epoch 8, batch 10300, loss[loss=0.1363, simple_loss=0.207, pruned_loss=0.03277, over 4883.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2153, pruned_loss=0.03548, over 971534.67 frames.], batch size: 19, lr: 2.64e-04 +2022-05-06 03:38:53,949 INFO [train.py:715] (7/8) Epoch 8, batch 10350, loss[loss=0.1762, simple_loss=0.2399, pruned_loss=0.05619, over 4829.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2148, pruned_loss=0.03512, over 971757.24 frames.], batch size: 15, lr: 2.64e-04 +2022-05-06 03:39:32,638 INFO [train.py:715] (7/8) Epoch 8, batch 10400, loss[loss=0.1456, simple_loss=0.2192, pruned_loss=0.03594, over 4904.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2137, pruned_loss=0.03462, over 971009.78 frames.], batch size: 19, lr: 2.64e-04 +2022-05-06 03:40:12,241 INFO [train.py:715] (7/8) Epoch 8, batch 10450, loss[loss=0.1252, simple_loss=0.2047, pruned_loss=0.02286, over 4776.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2143, pruned_loss=0.03528, over 971202.11 frames.], batch size: 17, lr: 2.64e-04 +2022-05-06 03:40:51,307 INFO [train.py:715] (7/8) Epoch 8, batch 10500, loss[loss=0.1257, simple_loss=0.2025, pruned_loss=0.02441, over 4802.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2152, pruned_loss=0.03602, over 971232.65 frames.], batch size: 24, lr: 2.64e-04 +2022-05-06 03:41:30,156 INFO [train.py:715] (7/8) Epoch 8, batch 10550, loss[loss=0.1496, simple_loss=0.2295, pruned_loss=0.03484, over 4821.00 frames.], tot_loss[loss=0.1423, simple_loss=0.214, pruned_loss=0.03527, over 972147.14 frames.], batch size: 25, lr: 2.64e-04 +2022-05-06 03:42:08,780 INFO [train.py:715] (7/8) Epoch 8, batch 10600, loss[loss=0.163, simple_loss=0.2415, pruned_loss=0.0423, over 4787.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2156, pruned_loss=0.03603, over 973300.48 frames.], batch size: 17, lr: 2.64e-04 +2022-05-06 03:42:48,073 INFO [train.py:715] (7/8) Epoch 8, batch 10650, loss[loss=0.1456, simple_loss=0.2142, pruned_loss=0.03853, over 4922.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2159, pruned_loss=0.03622, over 972261.67 frames.], batch size: 39, lr: 2.64e-04 +2022-05-06 03:43:27,257 INFO [train.py:715] (7/8) Epoch 8, batch 10700, loss[loss=0.1733, simple_loss=0.2441, pruned_loss=0.05126, over 4768.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2158, pruned_loss=0.03579, over 971907.89 frames.], batch size: 14, lr: 2.64e-04 +2022-05-06 03:44:06,356 INFO [train.py:715] (7/8) Epoch 8, batch 10750, loss[loss=0.1604, simple_loss=0.2244, pruned_loss=0.04817, over 4832.00 frames.], tot_loss[loss=0.1437, simple_loss=0.216, pruned_loss=0.03572, over 972096.61 frames.], batch size: 27, lr: 2.64e-04 +2022-05-06 03:44:46,295 INFO [train.py:715] (7/8) Epoch 8, batch 10800, loss[loss=0.1381, simple_loss=0.2106, pruned_loss=0.03278, over 4869.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.03529, over 973104.61 frames.], batch size: 16, lr: 2.64e-04 +2022-05-06 03:45:26,105 INFO [train.py:715] (7/8) Epoch 8, batch 10850, loss[loss=0.1275, simple_loss=0.199, pruned_loss=0.02796, over 4972.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2159, pruned_loss=0.03539, over 973037.39 frames.], batch size: 25, lr: 2.64e-04 +2022-05-06 03:46:05,370 INFO [train.py:715] (7/8) Epoch 8, batch 10900, loss[loss=0.1569, simple_loss=0.2328, pruned_loss=0.04051, over 4868.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2164, pruned_loss=0.03562, over 972113.30 frames.], batch size: 20, lr: 2.64e-04 +2022-05-06 03:46:44,375 INFO [train.py:715] (7/8) Epoch 8, batch 10950, loss[loss=0.13, simple_loss=0.195, pruned_loss=0.03249, over 4982.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2158, pruned_loss=0.03556, over 973355.21 frames.], batch size: 14, lr: 2.64e-04 +2022-05-06 03:47:24,377 INFO [train.py:715] (7/8) Epoch 8, batch 11000, loss[loss=0.1551, simple_loss=0.2201, pruned_loss=0.04507, over 4747.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2168, pruned_loss=0.03624, over 972423.56 frames.], batch size: 19, lr: 2.64e-04 +2022-05-06 03:48:03,909 INFO [train.py:715] (7/8) Epoch 8, batch 11050, loss[loss=0.1632, simple_loss=0.2169, pruned_loss=0.05471, over 4852.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2162, pruned_loss=0.03598, over 972598.34 frames.], batch size: 15, lr: 2.64e-04 +2022-05-06 03:48:42,670 INFO [train.py:715] (7/8) Epoch 8, batch 11100, loss[loss=0.1525, simple_loss=0.2145, pruned_loss=0.04529, over 4961.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2144, pruned_loss=0.03491, over 972531.39 frames.], batch size: 15, lr: 2.64e-04 +2022-05-06 03:49:22,147 INFO [train.py:715] (7/8) Epoch 8, batch 11150, loss[loss=0.1466, simple_loss=0.2151, pruned_loss=0.03902, over 4838.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2135, pruned_loss=0.03444, over 971876.85 frames.], batch size: 30, lr: 2.64e-04 +2022-05-06 03:50:01,944 INFO [train.py:715] (7/8) Epoch 8, batch 11200, loss[loss=0.1152, simple_loss=0.1866, pruned_loss=0.02191, over 4950.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2147, pruned_loss=0.03493, over 972704.00 frames.], batch size: 15, lr: 2.64e-04 +2022-05-06 03:50:40,570 INFO [train.py:715] (7/8) Epoch 8, batch 11250, loss[loss=0.1606, simple_loss=0.2358, pruned_loss=0.04269, over 4756.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2149, pruned_loss=0.03532, over 972369.27 frames.], batch size: 19, lr: 2.64e-04 +2022-05-06 03:51:19,595 INFO [train.py:715] (7/8) Epoch 8, batch 11300, loss[loss=0.1709, simple_loss=0.2279, pruned_loss=0.05698, over 4981.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2145, pruned_loss=0.0354, over 973383.20 frames.], batch size: 15, lr: 2.64e-04 +2022-05-06 03:51:58,928 INFO [train.py:715] (7/8) Epoch 8, batch 11350, loss[loss=0.1461, simple_loss=0.2268, pruned_loss=0.03275, over 4868.00 frames.], tot_loss[loss=0.1433, simple_loss=0.215, pruned_loss=0.0358, over 973332.38 frames.], batch size: 20, lr: 2.63e-04 +2022-05-06 03:52:37,407 INFO [train.py:715] (7/8) Epoch 8, batch 11400, loss[loss=0.1273, simple_loss=0.2037, pruned_loss=0.02548, over 4922.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2146, pruned_loss=0.03586, over 973141.86 frames.], batch size: 23, lr: 2.63e-04 +2022-05-06 03:53:16,050 INFO [train.py:715] (7/8) Epoch 8, batch 11450, loss[loss=0.134, simple_loss=0.2105, pruned_loss=0.02876, over 4943.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2149, pruned_loss=0.03593, over 972150.64 frames.], batch size: 29, lr: 2.63e-04 +2022-05-06 03:53:55,354 INFO [train.py:715] (7/8) Epoch 8, batch 11500, loss[loss=0.1476, simple_loss=0.2043, pruned_loss=0.04547, over 4912.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2153, pruned_loss=0.0365, over 972194.59 frames.], batch size: 17, lr: 2.63e-04 +2022-05-06 03:54:34,456 INFO [train.py:715] (7/8) Epoch 8, batch 11550, loss[loss=0.1528, simple_loss=0.215, pruned_loss=0.04534, over 4979.00 frames.], tot_loss[loss=0.144, simple_loss=0.2151, pruned_loss=0.03648, over 972621.68 frames.], batch size: 24, lr: 2.63e-04 +2022-05-06 03:55:13,513 INFO [train.py:715] (7/8) Epoch 8, batch 11600, loss[loss=0.1412, simple_loss=0.2085, pruned_loss=0.03695, over 4797.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2155, pruned_loss=0.03618, over 972049.95 frames.], batch size: 21, lr: 2.63e-04 +2022-05-06 03:55:53,446 INFO [train.py:715] (7/8) Epoch 8, batch 11650, loss[loss=0.1221, simple_loss=0.1897, pruned_loss=0.02727, over 4909.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03585, over 971540.94 frames.], batch size: 19, lr: 2.63e-04 +2022-05-06 03:56:33,837 INFO [train.py:715] (7/8) Epoch 8, batch 11700, loss[loss=0.135, simple_loss=0.2033, pruned_loss=0.03334, over 4843.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2163, pruned_loss=0.03603, over 971129.73 frames.], batch size: 32, lr: 2.63e-04 +2022-05-06 03:57:13,272 INFO [train.py:715] (7/8) Epoch 8, batch 11750, loss[loss=0.138, simple_loss=0.2079, pruned_loss=0.03402, over 4814.00 frames.], tot_loss[loss=0.144, simple_loss=0.2164, pruned_loss=0.03576, over 971200.94 frames.], batch size: 25, lr: 2.63e-04 +2022-05-06 03:57:52,307 INFO [train.py:715] (7/8) Epoch 8, batch 11800, loss[loss=0.161, simple_loss=0.2404, pruned_loss=0.04083, over 4807.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2165, pruned_loss=0.03591, over 972194.11 frames.], batch size: 25, lr: 2.63e-04 +2022-05-06 03:58:32,050 INFO [train.py:715] (7/8) Epoch 8, batch 11850, loss[loss=0.1634, simple_loss=0.2265, pruned_loss=0.05014, over 4847.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2168, pruned_loss=0.0361, over 971869.05 frames.], batch size: 32, lr: 2.63e-04 +2022-05-06 03:59:11,747 INFO [train.py:715] (7/8) Epoch 8, batch 11900, loss[loss=0.1508, simple_loss=0.219, pruned_loss=0.0413, over 4820.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03559, over 971264.19 frames.], batch size: 27, lr: 2.63e-04 +2022-05-06 03:59:51,346 INFO [train.py:715] (7/8) Epoch 8, batch 11950, loss[loss=0.1466, simple_loss=0.222, pruned_loss=0.0356, over 4809.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03527, over 971315.79 frames.], batch size: 25, lr: 2.63e-04 +2022-05-06 04:00:30,529 INFO [train.py:715] (7/8) Epoch 8, batch 12000, loss[loss=0.1391, simple_loss=0.22, pruned_loss=0.02906, over 4944.00 frames.], tot_loss[loss=0.1426, simple_loss=0.215, pruned_loss=0.03511, over 971941.29 frames.], batch size: 29, lr: 2.63e-04 +2022-05-06 04:00:30,530 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 04:00:40,091 INFO [train.py:742] (7/8) Epoch 8, validation: loss=0.1076, simple_loss=0.1923, pruned_loss=0.0115, over 914524.00 frames. +2022-05-06 04:01:19,842 INFO [train.py:715] (7/8) Epoch 8, batch 12050, loss[loss=0.1618, simple_loss=0.2174, pruned_loss=0.05313, over 4780.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2151, pruned_loss=0.03523, over 972299.44 frames.], batch size: 12, lr: 2.63e-04 +2022-05-06 04:01:59,446 INFO [train.py:715] (7/8) Epoch 8, batch 12100, loss[loss=0.1426, simple_loss=0.2311, pruned_loss=0.02707, over 4952.00 frames.], tot_loss[loss=0.143, simple_loss=0.2153, pruned_loss=0.03535, over 972342.76 frames.], batch size: 21, lr: 2.63e-04 +2022-05-06 04:02:38,522 INFO [train.py:715] (7/8) Epoch 8, batch 12150, loss[loss=0.1461, simple_loss=0.2216, pruned_loss=0.0353, over 4916.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2152, pruned_loss=0.0349, over 972356.61 frames.], batch size: 23, lr: 2.63e-04 +2022-05-06 04:03:17,592 INFO [train.py:715] (7/8) Epoch 8, batch 12200, loss[loss=0.133, simple_loss=0.201, pruned_loss=0.0325, over 4787.00 frames.], tot_loss[loss=0.143, simple_loss=0.2152, pruned_loss=0.03536, over 972021.15 frames.], batch size: 14, lr: 2.63e-04 +2022-05-06 04:03:57,162 INFO [train.py:715] (7/8) Epoch 8, batch 12250, loss[loss=0.1301, simple_loss=0.2008, pruned_loss=0.02969, over 4823.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2147, pruned_loss=0.03533, over 972977.76 frames.], batch size: 25, lr: 2.63e-04 +2022-05-06 04:04:36,394 INFO [train.py:715] (7/8) Epoch 8, batch 12300, loss[loss=0.1478, simple_loss=0.2212, pruned_loss=0.03717, over 4786.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2148, pruned_loss=0.03557, over 973067.34 frames.], batch size: 14, lr: 2.63e-04 +2022-05-06 04:05:15,236 INFO [train.py:715] (7/8) Epoch 8, batch 12350, loss[loss=0.1412, simple_loss=0.2058, pruned_loss=0.03829, over 4898.00 frames.], tot_loss[loss=0.1418, simple_loss=0.214, pruned_loss=0.03484, over 972707.54 frames.], batch size: 22, lr: 2.63e-04 +2022-05-06 04:05:54,659 INFO [train.py:715] (7/8) Epoch 8, batch 12400, loss[loss=0.1842, simple_loss=0.2362, pruned_loss=0.06605, over 4808.00 frames.], tot_loss[loss=0.1428, simple_loss=0.215, pruned_loss=0.03532, over 972521.86 frames.], batch size: 14, lr: 2.63e-04 +2022-05-06 04:06:34,253 INFO [train.py:715] (7/8) Epoch 8, batch 12450, loss[loss=0.125, simple_loss=0.1992, pruned_loss=0.02538, over 4878.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2152, pruned_loss=0.03536, over 973345.35 frames.], batch size: 22, lr: 2.63e-04 +2022-05-06 04:07:13,257 INFO [train.py:715] (7/8) Epoch 8, batch 12500, loss[loss=0.138, simple_loss=0.2102, pruned_loss=0.03289, over 4713.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2153, pruned_loss=0.03563, over 972674.17 frames.], batch size: 15, lr: 2.63e-04 +2022-05-06 04:07:52,127 INFO [train.py:715] (7/8) Epoch 8, batch 12550, loss[loss=0.1314, simple_loss=0.205, pruned_loss=0.02891, over 4972.00 frames.], tot_loss[loss=0.1441, simple_loss=0.216, pruned_loss=0.03609, over 972103.68 frames.], batch size: 15, lr: 2.63e-04 +2022-05-06 04:08:31,831 INFO [train.py:715] (7/8) Epoch 8, batch 12600, loss[loss=0.1499, simple_loss=0.2195, pruned_loss=0.04017, over 4824.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2159, pruned_loss=0.03615, over 971917.80 frames.], batch size: 26, lr: 2.63e-04 +2022-05-06 04:09:10,878 INFO [train.py:715] (7/8) Epoch 8, batch 12650, loss[loss=0.127, simple_loss=0.1982, pruned_loss=0.02787, over 4989.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2153, pruned_loss=0.03563, over 972154.60 frames.], batch size: 25, lr: 2.63e-04 +2022-05-06 04:09:50,738 INFO [train.py:715] (7/8) Epoch 8, batch 12700, loss[loss=0.1411, simple_loss=0.2193, pruned_loss=0.03151, over 4765.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2158, pruned_loss=0.03588, over 972028.91 frames.], batch size: 19, lr: 2.63e-04 +2022-05-06 04:10:30,126 INFO [train.py:715] (7/8) Epoch 8, batch 12750, loss[loss=0.1999, simple_loss=0.2666, pruned_loss=0.06659, over 4811.00 frames.], tot_loss[loss=0.1443, simple_loss=0.216, pruned_loss=0.03625, over 971389.19 frames.], batch size: 25, lr: 2.63e-04 +2022-05-06 04:11:10,324 INFO [train.py:715] (7/8) Epoch 8, batch 12800, loss[loss=0.1748, simple_loss=0.2366, pruned_loss=0.05644, over 4964.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2155, pruned_loss=0.03607, over 971572.87 frames.], batch size: 39, lr: 2.63e-04 +2022-05-06 04:11:48,983 INFO [train.py:715] (7/8) Epoch 8, batch 12850, loss[loss=0.1721, simple_loss=0.2413, pruned_loss=0.0515, over 4835.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2159, pruned_loss=0.03624, over 971346.32 frames.], batch size: 27, lr: 2.63e-04 +2022-05-06 04:12:28,015 INFO [train.py:715] (7/8) Epoch 8, batch 12900, loss[loss=0.1535, simple_loss=0.2261, pruned_loss=0.04049, over 4916.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2156, pruned_loss=0.03607, over 971956.65 frames.], batch size: 23, lr: 2.63e-04 +2022-05-06 04:13:07,526 INFO [train.py:715] (7/8) Epoch 8, batch 12950, loss[loss=0.1502, simple_loss=0.2186, pruned_loss=0.04086, over 4913.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.03598, over 972288.72 frames.], batch size: 18, lr: 2.63e-04 +2022-05-06 04:13:46,913 INFO [train.py:715] (7/8) Epoch 8, batch 13000, loss[loss=0.1476, simple_loss=0.2116, pruned_loss=0.04182, over 4969.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.03597, over 972187.51 frames.], batch size: 35, lr: 2.63e-04 +2022-05-06 04:14:26,215 INFO [train.py:715] (7/8) Epoch 8, batch 13050, loss[loss=0.1436, simple_loss=0.2141, pruned_loss=0.03651, over 4777.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2158, pruned_loss=0.03583, over 971235.30 frames.], batch size: 18, lr: 2.63e-04 +2022-05-06 04:15:05,643 INFO [train.py:715] (7/8) Epoch 8, batch 13100, loss[loss=0.1796, simple_loss=0.262, pruned_loss=0.0486, over 4851.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03566, over 971317.81 frames.], batch size: 15, lr: 2.63e-04 +2022-05-06 04:15:45,374 INFO [train.py:715] (7/8) Epoch 8, batch 13150, loss[loss=0.1481, simple_loss=0.2265, pruned_loss=0.0348, over 4938.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2165, pruned_loss=0.03615, over 971310.26 frames.], batch size: 35, lr: 2.63e-04 +2022-05-06 04:16:24,327 INFO [train.py:715] (7/8) Epoch 8, batch 13200, loss[loss=0.1616, simple_loss=0.2358, pruned_loss=0.04367, over 4778.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2163, pruned_loss=0.03607, over 971048.02 frames.], batch size: 18, lr: 2.63e-04 +2022-05-06 04:17:03,717 INFO [train.py:715] (7/8) Epoch 8, batch 13250, loss[loss=0.1058, simple_loss=0.1819, pruned_loss=0.0149, over 4941.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2156, pruned_loss=0.03548, over 972520.11 frames.], batch size: 29, lr: 2.63e-04 +2022-05-06 04:17:43,334 INFO [train.py:715] (7/8) Epoch 8, batch 13300, loss[loss=0.1412, simple_loss=0.2049, pruned_loss=0.03868, over 4846.00 frames.], tot_loss[loss=0.143, simple_loss=0.2148, pruned_loss=0.03561, over 972298.49 frames.], batch size: 30, lr: 2.63e-04 +2022-05-06 04:18:22,359 INFO [train.py:715] (7/8) Epoch 8, batch 13350, loss[loss=0.1464, simple_loss=0.2264, pruned_loss=0.03316, over 4710.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2151, pruned_loss=0.03559, over 972571.27 frames.], batch size: 15, lr: 2.63e-04 +2022-05-06 04:19:01,001 INFO [train.py:715] (7/8) Epoch 8, batch 13400, loss[loss=0.1612, simple_loss=0.2356, pruned_loss=0.04345, over 4953.00 frames.], tot_loss[loss=0.1443, simple_loss=0.216, pruned_loss=0.0363, over 973133.41 frames.], batch size: 39, lr: 2.63e-04 +2022-05-06 04:19:39,801 INFO [train.py:715] (7/8) Epoch 8, batch 13450, loss[loss=0.1225, simple_loss=0.2007, pruned_loss=0.02209, over 4774.00 frames.], tot_loss[loss=0.1442, simple_loss=0.216, pruned_loss=0.03616, over 972841.41 frames.], batch size: 18, lr: 2.63e-04 +2022-05-06 04:20:19,854 INFO [train.py:715] (7/8) Epoch 8, batch 13500, loss[loss=0.1649, simple_loss=0.2246, pruned_loss=0.05264, over 4821.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03586, over 973371.80 frames.], batch size: 13, lr: 2.63e-04 +2022-05-06 04:20:58,646 INFO [train.py:715] (7/8) Epoch 8, batch 13550, loss[loss=0.1377, simple_loss=0.208, pruned_loss=0.03376, over 4641.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2161, pruned_loss=0.03608, over 973307.82 frames.], batch size: 13, lr: 2.62e-04 +2022-05-06 04:21:37,840 INFO [train.py:715] (7/8) Epoch 8, batch 13600, loss[loss=0.1515, simple_loss=0.2201, pruned_loss=0.04147, over 4937.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2161, pruned_loss=0.03632, over 973415.36 frames.], batch size: 29, lr: 2.62e-04 +2022-05-06 04:22:16,978 INFO [train.py:715] (7/8) Epoch 8, batch 13650, loss[loss=0.1667, simple_loss=0.2393, pruned_loss=0.04708, over 4839.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2167, pruned_loss=0.03679, over 973184.20 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:22:56,129 INFO [train.py:715] (7/8) Epoch 8, batch 13700, loss[loss=0.1702, simple_loss=0.2255, pruned_loss=0.05749, over 4777.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2162, pruned_loss=0.03661, over 972554.51 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:23:34,769 INFO [train.py:715] (7/8) Epoch 8, batch 13750, loss[loss=0.1182, simple_loss=0.186, pruned_loss=0.02517, over 4887.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2162, pruned_loss=0.0365, over 971926.30 frames.], batch size: 22, lr: 2.62e-04 +2022-05-06 04:24:13,494 INFO [train.py:715] (7/8) Epoch 8, batch 13800, loss[loss=0.14, simple_loss=0.212, pruned_loss=0.03399, over 4909.00 frames.], tot_loss[loss=0.144, simple_loss=0.2156, pruned_loss=0.03624, over 972687.51 frames.], batch size: 19, lr: 2.62e-04 +2022-05-06 04:24:52,948 INFO [train.py:715] (7/8) Epoch 8, batch 13850, loss[loss=0.1484, simple_loss=0.2215, pruned_loss=0.03762, over 4920.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2157, pruned_loss=0.03609, over 972466.25 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:25:31,241 INFO [train.py:715] (7/8) Epoch 8, batch 13900, loss[loss=0.1519, simple_loss=0.2099, pruned_loss=0.04697, over 4849.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2158, pruned_loss=0.03588, over 972472.40 frames.], batch size: 32, lr: 2.62e-04 +2022-05-06 04:26:10,333 INFO [train.py:715] (7/8) Epoch 8, batch 13950, loss[loss=0.1415, simple_loss=0.2115, pruned_loss=0.03576, over 4813.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.03553, over 972493.13 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:26:49,431 INFO [train.py:715] (7/8) Epoch 8, batch 14000, loss[loss=0.1745, simple_loss=0.2521, pruned_loss=0.04844, over 4983.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2164, pruned_loss=0.03596, over 973068.96 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:27:28,487 INFO [train.py:715] (7/8) Epoch 8, batch 14050, loss[loss=0.1456, simple_loss=0.2145, pruned_loss=0.03837, over 4912.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2165, pruned_loss=0.0363, over 973086.84 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:28:06,677 INFO [train.py:715] (7/8) Epoch 8, batch 14100, loss[loss=0.1405, simple_loss=0.2081, pruned_loss=0.03643, over 4703.00 frames.], tot_loss[loss=0.1451, simple_loss=0.217, pruned_loss=0.03664, over 972116.00 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:28:45,330 INFO [train.py:715] (7/8) Epoch 8, batch 14150, loss[loss=0.1404, simple_loss=0.2009, pruned_loss=0.03991, over 4973.00 frames.], tot_loss[loss=0.1452, simple_loss=0.217, pruned_loss=0.03671, over 971957.83 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:29:25,593 INFO [train.py:715] (7/8) Epoch 8, batch 14200, loss[loss=0.1375, simple_loss=0.2122, pruned_loss=0.03134, over 4917.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2163, pruned_loss=0.03617, over 972264.68 frames.], batch size: 23, lr: 2.62e-04 +2022-05-06 04:30:04,164 INFO [train.py:715] (7/8) Epoch 8, batch 14250, loss[loss=0.1499, simple_loss=0.2193, pruned_loss=0.04028, over 4966.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2157, pruned_loss=0.03585, over 972772.01 frames.], batch size: 21, lr: 2.62e-04 +2022-05-06 04:30:44,070 INFO [train.py:715] (7/8) Epoch 8, batch 14300, loss[loss=0.1408, simple_loss=0.2201, pruned_loss=0.03076, over 4688.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2164, pruned_loss=0.03606, over 972109.32 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:31:23,532 INFO [train.py:715] (7/8) Epoch 8, batch 14350, loss[loss=0.1597, simple_loss=0.223, pruned_loss=0.0482, over 4883.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2161, pruned_loss=0.03612, over 972157.18 frames.], batch size: 19, lr: 2.62e-04 +2022-05-06 04:32:02,825 INFO [train.py:715] (7/8) Epoch 8, batch 14400, loss[loss=0.1259, simple_loss=0.1982, pruned_loss=0.02679, over 4817.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2167, pruned_loss=0.03627, over 972187.52 frames.], batch size: 21, lr: 2.62e-04 +2022-05-06 04:32:41,519 INFO [train.py:715] (7/8) Epoch 8, batch 14450, loss[loss=0.1287, simple_loss=0.2072, pruned_loss=0.02506, over 4930.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2164, pruned_loss=0.03612, over 972823.17 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:33:20,780 INFO [train.py:715] (7/8) Epoch 8, batch 14500, loss[loss=0.1337, simple_loss=0.2042, pruned_loss=0.03166, over 4798.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2167, pruned_loss=0.03608, over 971884.64 frames.], batch size: 24, lr: 2.62e-04 +2022-05-06 04:34:00,255 INFO [train.py:715] (7/8) Epoch 8, batch 14550, loss[loss=0.1654, simple_loss=0.2338, pruned_loss=0.04845, over 4880.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2174, pruned_loss=0.03651, over 972686.27 frames.], batch size: 19, lr: 2.62e-04 +2022-05-06 04:34:38,291 INFO [train.py:715] (7/8) Epoch 8, batch 14600, loss[loss=0.1553, simple_loss=0.2182, pruned_loss=0.04622, over 4699.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2174, pruned_loss=0.03658, over 972415.00 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:35:17,878 INFO [train.py:715] (7/8) Epoch 8, batch 14650, loss[loss=0.1536, simple_loss=0.2274, pruned_loss=0.03995, over 4823.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2167, pruned_loss=0.03619, over 971998.23 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:35:57,138 INFO [train.py:715] (7/8) Epoch 8, batch 14700, loss[loss=0.1363, simple_loss=0.2073, pruned_loss=0.03264, over 4708.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2158, pruned_loss=0.03592, over 971707.77 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:36:35,958 INFO [train.py:715] (7/8) Epoch 8, batch 14750, loss[loss=0.1283, simple_loss=0.2009, pruned_loss=0.0278, over 4895.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2162, pruned_loss=0.03581, over 971967.61 frames.], batch size: 19, lr: 2.62e-04 +2022-05-06 04:37:14,354 INFO [train.py:715] (7/8) Epoch 8, batch 14800, loss[loss=0.1631, simple_loss=0.2262, pruned_loss=0.05007, over 4804.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2162, pruned_loss=0.03582, over 972420.22 frames.], batch size: 21, lr: 2.62e-04 +2022-05-06 04:37:54,167 INFO [train.py:715] (7/8) Epoch 8, batch 14850, loss[loss=0.1216, simple_loss=0.202, pruned_loss=0.02061, over 4648.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2164, pruned_loss=0.03576, over 972393.16 frames.], batch size: 13, lr: 2.62e-04 +2022-05-06 04:38:33,084 INFO [train.py:715] (7/8) Epoch 8, batch 14900, loss[loss=0.1557, simple_loss=0.2303, pruned_loss=0.04055, over 4806.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2163, pruned_loss=0.03576, over 972322.98 frames.], batch size: 21, lr: 2.62e-04 +2022-05-06 04:39:11,873 INFO [train.py:715] (7/8) Epoch 8, batch 14950, loss[loss=0.1425, simple_loss=0.2085, pruned_loss=0.0383, over 4962.00 frames.], tot_loss[loss=0.144, simple_loss=0.2158, pruned_loss=0.03617, over 972719.29 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:39:51,076 INFO [train.py:715] (7/8) Epoch 8, batch 15000, loss[loss=0.1736, simple_loss=0.2328, pruned_loss=0.05719, over 4910.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2161, pruned_loss=0.0366, over 972629.90 frames.], batch size: 17, lr: 2.62e-04 +2022-05-06 04:39:51,077 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 04:40:00,792 INFO [train.py:742] (7/8) Epoch 8, validation: loss=0.1076, simple_loss=0.1921, pruned_loss=0.01153, over 914524.00 frames. +2022-05-06 04:40:40,558 INFO [train.py:715] (7/8) Epoch 8, batch 15050, loss[loss=0.1516, simple_loss=0.2341, pruned_loss=0.03458, over 4928.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2168, pruned_loss=0.03646, over 971449.15 frames.], batch size: 21, lr: 2.62e-04 +2022-05-06 04:41:19,876 INFO [train.py:715] (7/8) Epoch 8, batch 15100, loss[loss=0.1132, simple_loss=0.1886, pruned_loss=0.01884, over 4922.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2169, pruned_loss=0.03673, over 971136.99 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:41:59,417 INFO [train.py:715] (7/8) Epoch 8, batch 15150, loss[loss=0.1536, simple_loss=0.2223, pruned_loss=0.04247, over 4969.00 frames.], tot_loss[loss=0.1456, simple_loss=0.2175, pruned_loss=0.03685, over 972013.99 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:42:38,835 INFO [train.py:715] (7/8) Epoch 8, batch 15200, loss[loss=0.1096, simple_loss=0.1792, pruned_loss=0.02002, over 4853.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2163, pruned_loss=0.03646, over 972247.01 frames.], batch size: 13, lr: 2.62e-04 +2022-05-06 04:43:18,562 INFO [train.py:715] (7/8) Epoch 8, batch 15250, loss[loss=0.1481, simple_loss=0.2095, pruned_loss=0.04338, over 4866.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2152, pruned_loss=0.03593, over 972661.03 frames.], batch size: 30, lr: 2.62e-04 +2022-05-06 04:43:58,534 INFO [train.py:715] (7/8) Epoch 8, batch 15300, loss[loss=0.1427, simple_loss=0.2205, pruned_loss=0.03243, over 4921.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2154, pruned_loss=0.03598, over 972312.62 frames.], batch size: 23, lr: 2.62e-04 +2022-05-06 04:44:37,108 INFO [train.py:715] (7/8) Epoch 8, batch 15350, loss[loss=0.1393, simple_loss=0.2198, pruned_loss=0.02935, over 4929.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2163, pruned_loss=0.03641, over 972166.47 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:45:16,997 INFO [train.py:715] (7/8) Epoch 8, batch 15400, loss[loss=0.125, simple_loss=0.1957, pruned_loss=0.02718, over 4785.00 frames.], tot_loss[loss=0.1452, simple_loss=0.217, pruned_loss=0.03666, over 972852.56 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:45:55,983 INFO [train.py:715] (7/8) Epoch 8, batch 15450, loss[loss=0.167, simple_loss=0.2407, pruned_loss=0.04666, over 4962.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2156, pruned_loss=0.03581, over 972977.24 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:46:34,944 INFO [train.py:715] (7/8) Epoch 8, batch 15500, loss[loss=0.1223, simple_loss=0.2018, pruned_loss=0.02142, over 4804.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2149, pruned_loss=0.03582, over 973164.99 frames.], batch size: 24, lr: 2.62e-04 +2022-05-06 04:47:13,675 INFO [train.py:715] (7/8) Epoch 8, batch 15550, loss[loss=0.1342, simple_loss=0.2023, pruned_loss=0.03309, over 4810.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2148, pruned_loss=0.0358, over 972296.74 frames.], batch size: 27, lr: 2.62e-04 +2022-05-06 04:47:52,420 INFO [train.py:715] (7/8) Epoch 8, batch 15600, loss[loss=0.154, simple_loss=0.2325, pruned_loss=0.03775, over 4937.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2153, pruned_loss=0.03578, over 972582.97 frames.], batch size: 23, lr: 2.62e-04 +2022-05-06 04:48:32,585 INFO [train.py:715] (7/8) Epoch 8, batch 15650, loss[loss=0.1329, simple_loss=0.2104, pruned_loss=0.02768, over 4788.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2154, pruned_loss=0.03571, over 972353.01 frames.], batch size: 18, lr: 2.62e-04 +2022-05-06 04:49:11,088 INFO [train.py:715] (7/8) Epoch 8, batch 15700, loss[loss=0.1444, simple_loss=0.2165, pruned_loss=0.03615, over 4754.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2154, pruned_loss=0.03539, over 971674.93 frames.], batch size: 19, lr: 2.62e-04 +2022-05-06 04:49:50,913 INFO [train.py:715] (7/8) Epoch 8, batch 15750, loss[loss=0.1636, simple_loss=0.2335, pruned_loss=0.04691, over 4826.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2155, pruned_loss=0.03555, over 971590.95 frames.], batch size: 15, lr: 2.62e-04 +2022-05-06 04:50:30,390 INFO [train.py:715] (7/8) Epoch 8, batch 15800, loss[loss=0.1444, simple_loss=0.221, pruned_loss=0.0339, over 4766.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2158, pruned_loss=0.03546, over 972346.16 frames.], batch size: 17, lr: 2.61e-04 +2022-05-06 04:51:09,454 INFO [train.py:715] (7/8) Epoch 8, batch 15850, loss[loss=0.1446, simple_loss=0.2153, pruned_loss=0.03697, over 4881.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2161, pruned_loss=0.03552, over 971406.90 frames.], batch size: 32, lr: 2.61e-04 +2022-05-06 04:51:48,555 INFO [train.py:715] (7/8) Epoch 8, batch 15900, loss[loss=0.161, simple_loss=0.2315, pruned_loss=0.04521, over 4752.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2163, pruned_loss=0.03573, over 970905.42 frames.], batch size: 16, lr: 2.61e-04 +2022-05-06 04:52:27,777 INFO [train.py:715] (7/8) Epoch 8, batch 15950, loss[loss=0.1176, simple_loss=0.2, pruned_loss=0.01763, over 4906.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03582, over 970561.48 frames.], batch size: 18, lr: 2.61e-04 +2022-05-06 04:53:07,058 INFO [train.py:715] (7/8) Epoch 8, batch 16000, loss[loss=0.1347, simple_loss=0.1997, pruned_loss=0.03483, over 4779.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2168, pruned_loss=0.03614, over 971039.01 frames.], batch size: 17, lr: 2.61e-04 +2022-05-06 04:53:45,657 INFO [train.py:715] (7/8) Epoch 8, batch 16050, loss[loss=0.1517, simple_loss=0.2232, pruned_loss=0.04015, over 4821.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2161, pruned_loss=0.03562, over 970212.55 frames.], batch size: 13, lr: 2.61e-04 +2022-05-06 04:54:25,527 INFO [train.py:715] (7/8) Epoch 8, batch 16100, loss[loss=0.1738, simple_loss=0.2421, pruned_loss=0.0528, over 4905.00 frames.], tot_loss[loss=0.1437, simple_loss=0.216, pruned_loss=0.03577, over 970579.74 frames.], batch size: 17, lr: 2.61e-04 +2022-05-06 04:55:04,004 INFO [train.py:715] (7/8) Epoch 8, batch 16150, loss[loss=0.1537, simple_loss=0.2246, pruned_loss=0.0414, over 4695.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2159, pruned_loss=0.03555, over 970297.52 frames.], batch size: 15, lr: 2.61e-04 +2022-05-06 04:55:43,546 INFO [train.py:715] (7/8) Epoch 8, batch 16200, loss[loss=0.1488, simple_loss=0.2316, pruned_loss=0.03295, over 4979.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2159, pruned_loss=0.03556, over 971668.96 frames.], batch size: 25, lr: 2.61e-04 +2022-05-06 04:56:21,933 INFO [train.py:715] (7/8) Epoch 8, batch 16250, loss[loss=0.1366, simple_loss=0.2124, pruned_loss=0.03039, over 4762.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2157, pruned_loss=0.03526, over 971976.43 frames.], batch size: 19, lr: 2.61e-04 +2022-05-06 04:57:01,392 INFO [train.py:715] (7/8) Epoch 8, batch 16300, loss[loss=0.1593, simple_loss=0.2303, pruned_loss=0.04415, over 4850.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2164, pruned_loss=0.03566, over 973064.64 frames.], batch size: 30, lr: 2.61e-04 +2022-05-06 04:57:40,825 INFO [train.py:715] (7/8) Epoch 8, batch 16350, loss[loss=0.1599, simple_loss=0.2289, pruned_loss=0.04551, over 4914.00 frames.], tot_loss[loss=0.144, simple_loss=0.2166, pruned_loss=0.03567, over 973096.16 frames.], batch size: 18, lr: 2.61e-04 +2022-05-06 04:58:19,599 INFO [train.py:715] (7/8) Epoch 8, batch 16400, loss[loss=0.1494, simple_loss=0.2139, pruned_loss=0.04244, over 4921.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.0354, over 973343.38 frames.], batch size: 23, lr: 2.61e-04 +2022-05-06 04:58:58,714 INFO [train.py:715] (7/8) Epoch 8, batch 16450, loss[loss=0.129, simple_loss=0.1964, pruned_loss=0.03078, over 4778.00 frames.], tot_loss[loss=0.144, simple_loss=0.2163, pruned_loss=0.03584, over 973725.87 frames.], batch size: 14, lr: 2.61e-04 +2022-05-06 04:59:37,560 INFO [train.py:715] (7/8) Epoch 8, batch 16500, loss[loss=0.1429, simple_loss=0.2128, pruned_loss=0.03646, over 4970.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2156, pruned_loss=0.03546, over 972323.14 frames.], batch size: 15, lr: 2.61e-04 +2022-05-06 05:00:17,264 INFO [train.py:715] (7/8) Epoch 8, batch 16550, loss[loss=0.1361, simple_loss=0.2071, pruned_loss=0.03249, over 4762.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03582, over 972182.23 frames.], batch size: 19, lr: 2.61e-04 +2022-05-06 05:00:56,286 INFO [train.py:715] (7/8) Epoch 8, batch 16600, loss[loss=0.1818, simple_loss=0.2551, pruned_loss=0.05423, over 4785.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2169, pruned_loss=0.03604, over 972296.30 frames.], batch size: 18, lr: 2.61e-04 +2022-05-06 05:01:35,313 INFO [train.py:715] (7/8) Epoch 8, batch 16650, loss[loss=0.1469, simple_loss=0.246, pruned_loss=0.02385, over 4891.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2162, pruned_loss=0.03567, over 971912.51 frames.], batch size: 17, lr: 2.61e-04 +2022-05-06 05:02:14,557 INFO [train.py:715] (7/8) Epoch 8, batch 16700, loss[loss=0.1473, simple_loss=0.2311, pruned_loss=0.0318, over 4785.00 frames.], tot_loss[loss=0.1435, simple_loss=0.216, pruned_loss=0.03555, over 971933.97 frames.], batch size: 12, lr: 2.61e-04 +2022-05-06 05:02:53,474 INFO [train.py:715] (7/8) Epoch 8, batch 16750, loss[loss=0.161, simple_loss=0.2416, pruned_loss=0.04021, over 4875.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2163, pruned_loss=0.03576, over 971943.16 frames.], batch size: 16, lr: 2.61e-04 +2022-05-06 05:03:33,069 INFO [train.py:715] (7/8) Epoch 8, batch 16800, loss[loss=0.1412, simple_loss=0.2102, pruned_loss=0.03607, over 4953.00 frames.], tot_loss[loss=0.144, simple_loss=0.2165, pruned_loss=0.03571, over 972118.06 frames.], batch size: 24, lr: 2.61e-04 +2022-05-06 05:04:12,044 INFO [train.py:715] (7/8) Epoch 8, batch 16850, loss[loss=0.1775, simple_loss=0.2415, pruned_loss=0.05675, over 4956.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2169, pruned_loss=0.03606, over 971733.61 frames.], batch size: 39, lr: 2.61e-04 +2022-05-06 05:04:51,957 INFO [train.py:715] (7/8) Epoch 8, batch 16900, loss[loss=0.1216, simple_loss=0.1827, pruned_loss=0.03026, over 4969.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2162, pruned_loss=0.03555, over 971767.37 frames.], batch size: 14, lr: 2.61e-04 +2022-05-06 05:05:30,453 INFO [train.py:715] (7/8) Epoch 8, batch 16950, loss[loss=0.1318, simple_loss=0.2098, pruned_loss=0.02688, over 4777.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03519, over 971248.26 frames.], batch size: 17, lr: 2.61e-04 +2022-05-06 05:06:10,148 INFO [train.py:715] (7/8) Epoch 8, batch 17000, loss[loss=0.155, simple_loss=0.2429, pruned_loss=0.03359, over 4773.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03518, over 970778.26 frames.], batch size: 18, lr: 2.61e-04 +2022-05-06 05:06:49,662 INFO [train.py:715] (7/8) Epoch 8, batch 17050, loss[loss=0.1613, simple_loss=0.2265, pruned_loss=0.04802, over 4850.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2152, pruned_loss=0.03515, over 971472.49 frames.], batch size: 30, lr: 2.61e-04 +2022-05-06 05:07:28,339 INFO [train.py:715] (7/8) Epoch 8, batch 17100, loss[loss=0.1539, simple_loss=0.2281, pruned_loss=0.03983, over 4917.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2162, pruned_loss=0.03548, over 971691.82 frames.], batch size: 23, lr: 2.61e-04 +2022-05-06 05:08:08,033 INFO [train.py:715] (7/8) Epoch 8, batch 17150, loss[loss=0.1373, simple_loss=0.2151, pruned_loss=0.02978, over 4820.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2159, pruned_loss=0.03535, over 971681.36 frames.], batch size: 25, lr: 2.61e-04 +2022-05-06 05:08:47,210 INFO [train.py:715] (7/8) Epoch 8, batch 17200, loss[loss=0.1875, simple_loss=0.259, pruned_loss=0.05801, over 4686.00 frames.], tot_loss[loss=0.1443, simple_loss=0.217, pruned_loss=0.03579, over 971283.92 frames.], batch size: 15, lr: 2.61e-04 +2022-05-06 05:09:26,326 INFO [train.py:715] (7/8) Epoch 8, batch 17250, loss[loss=0.1407, simple_loss=0.2126, pruned_loss=0.03441, over 4867.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2165, pruned_loss=0.03581, over 971693.43 frames.], batch size: 16, lr: 2.61e-04 +2022-05-06 05:10:04,659 INFO [train.py:715] (7/8) Epoch 8, batch 17300, loss[loss=0.1573, simple_loss=0.2249, pruned_loss=0.04485, over 4932.00 frames.], tot_loss[loss=0.1442, simple_loss=0.217, pruned_loss=0.03571, over 971491.28 frames.], batch size: 21, lr: 2.61e-04 +2022-05-06 05:10:44,497 INFO [train.py:715] (7/8) Epoch 8, batch 17350, loss[loss=0.1374, simple_loss=0.2068, pruned_loss=0.03397, over 4798.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2168, pruned_loss=0.03568, over 971205.41 frames.], batch size: 17, lr: 2.61e-04 +2022-05-06 05:11:23,597 INFO [train.py:715] (7/8) Epoch 8, batch 17400, loss[loss=0.1615, simple_loss=0.2375, pruned_loss=0.04274, over 4773.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2173, pruned_loss=0.03597, over 971745.46 frames.], batch size: 17, lr: 2.61e-04 +2022-05-06 05:12:02,693 INFO [train.py:715] (7/8) Epoch 8, batch 17450, loss[loss=0.1101, simple_loss=0.1823, pruned_loss=0.01895, over 4918.00 frames.], tot_loss[loss=0.1436, simple_loss=0.216, pruned_loss=0.03565, over 971185.01 frames.], batch size: 29, lr: 2.61e-04 +2022-05-06 05:12:42,122 INFO [train.py:715] (7/8) Epoch 8, batch 17500, loss[loss=0.1734, simple_loss=0.243, pruned_loss=0.05188, over 4878.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2151, pruned_loss=0.03539, over 970775.30 frames.], batch size: 16, lr: 2.61e-04 +2022-05-06 05:13:23,166 INFO [train.py:715] (7/8) Epoch 8, batch 17550, loss[loss=0.1137, simple_loss=0.1829, pruned_loss=0.02231, over 4830.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2156, pruned_loss=0.03544, over 971299.92 frames.], batch size: 26, lr: 2.61e-04 +2022-05-06 05:14:02,977 INFO [train.py:715] (7/8) Epoch 8, batch 17600, loss[loss=0.1324, simple_loss=0.2095, pruned_loss=0.02768, over 4860.00 frames.], tot_loss[loss=0.1438, simple_loss=0.216, pruned_loss=0.03577, over 971297.95 frames.], batch size: 20, lr: 2.61e-04 +2022-05-06 05:14:41,723 INFO [train.py:715] (7/8) Epoch 8, batch 17650, loss[loss=0.1625, simple_loss=0.2231, pruned_loss=0.05097, over 4750.00 frames.], tot_loss[loss=0.1439, simple_loss=0.216, pruned_loss=0.03588, over 972125.14 frames.], batch size: 19, lr: 2.61e-04 +2022-05-06 05:15:22,841 INFO [train.py:715] (7/8) Epoch 8, batch 17700, loss[loss=0.203, simple_loss=0.2675, pruned_loss=0.06922, over 4980.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2152, pruned_loss=0.03567, over 971536.21 frames.], batch size: 15, lr: 2.61e-04 +2022-05-06 05:16:02,817 INFO [train.py:715] (7/8) Epoch 8, batch 17750, loss[loss=0.1537, simple_loss=0.222, pruned_loss=0.04268, over 4898.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2146, pruned_loss=0.03548, over 970885.87 frames.], batch size: 22, lr: 2.61e-04 +2022-05-06 05:16:43,289 INFO [train.py:715] (7/8) Epoch 8, batch 17800, loss[loss=0.1462, simple_loss=0.2134, pruned_loss=0.03947, over 4888.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2151, pruned_loss=0.03585, over 970555.97 frames.], batch size: 16, lr: 2.61e-04 +2022-05-06 05:17:23,947 INFO [train.py:715] (7/8) Epoch 8, batch 17850, loss[loss=0.09938, simple_loss=0.1822, pruned_loss=0.008254, over 4801.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2151, pruned_loss=0.03557, over 971498.27 frames.], batch size: 21, lr: 2.61e-04 +2022-05-06 05:18:04,819 INFO [train.py:715] (7/8) Epoch 8, batch 17900, loss[loss=0.1326, simple_loss=0.2074, pruned_loss=0.02893, over 4953.00 frames.], tot_loss[loss=0.142, simple_loss=0.2147, pruned_loss=0.03464, over 971798.48 frames.], batch size: 29, lr: 2.61e-04 +2022-05-06 05:18:46,219 INFO [train.py:715] (7/8) Epoch 8, batch 17950, loss[loss=0.1309, simple_loss=0.2073, pruned_loss=0.02721, over 4856.00 frames.], tot_loss[loss=0.143, simple_loss=0.2155, pruned_loss=0.03523, over 971969.13 frames.], batch size: 20, lr: 2.61e-04 +2022-05-06 05:19:26,624 INFO [train.py:715] (7/8) Epoch 8, batch 18000, loss[loss=0.1486, simple_loss=0.2235, pruned_loss=0.03679, over 4878.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2159, pruned_loss=0.03592, over 971996.03 frames.], batch size: 16, lr: 2.61e-04 +2022-05-06 05:19:26,625 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 05:19:36,398 INFO [train.py:742] (7/8) Epoch 8, validation: loss=0.1073, simple_loss=0.1919, pruned_loss=0.01138, over 914524.00 frames. +2022-05-06 05:20:17,012 INFO [train.py:715] (7/8) Epoch 8, batch 18050, loss[loss=0.1268, simple_loss=0.2015, pruned_loss=0.02605, over 4852.00 frames.], tot_loss[loss=0.143, simple_loss=0.2151, pruned_loss=0.03538, over 972011.19 frames.], batch size: 20, lr: 2.60e-04 +2022-05-06 05:20:59,056 INFO [train.py:715] (7/8) Epoch 8, batch 18100, loss[loss=0.1451, simple_loss=0.2166, pruned_loss=0.03679, over 4902.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03616, over 971910.41 frames.], batch size: 17, lr: 2.60e-04 +2022-05-06 05:21:40,108 INFO [train.py:715] (7/8) Epoch 8, batch 18150, loss[loss=0.1592, simple_loss=0.2355, pruned_loss=0.04145, over 4847.00 frames.], tot_loss[loss=0.1442, simple_loss=0.216, pruned_loss=0.03623, over 971756.21 frames.], batch size: 32, lr: 2.60e-04 +2022-05-06 05:22:21,016 INFO [train.py:715] (7/8) Epoch 8, batch 18200, loss[loss=0.1334, simple_loss=0.2051, pruned_loss=0.03089, over 4777.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2159, pruned_loss=0.03597, over 970650.54 frames.], batch size: 18, lr: 2.60e-04 +2022-05-06 05:23:02,797 INFO [train.py:715] (7/8) Epoch 8, batch 18250, loss[loss=0.1275, simple_loss=0.2062, pruned_loss=0.02442, over 4873.00 frames.], tot_loss[loss=0.144, simple_loss=0.2159, pruned_loss=0.03608, over 970799.29 frames.], batch size: 16, lr: 2.60e-04 +2022-05-06 05:23:43,814 INFO [train.py:715] (7/8) Epoch 8, batch 18300, loss[loss=0.1277, simple_loss=0.207, pruned_loss=0.02417, over 4761.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2162, pruned_loss=0.03578, over 970826.23 frames.], batch size: 19, lr: 2.60e-04 +2022-05-06 05:24:25,296 INFO [train.py:715] (7/8) Epoch 8, batch 18350, loss[loss=0.1215, simple_loss=0.2025, pruned_loss=0.02028, over 4891.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2157, pruned_loss=0.03554, over 970507.83 frames.], batch size: 19, lr: 2.60e-04 +2022-05-06 05:25:06,151 INFO [train.py:715] (7/8) Epoch 8, batch 18400, loss[loss=0.167, simple_loss=0.2422, pruned_loss=0.04594, over 4745.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03569, over 970919.46 frames.], batch size: 19, lr: 2.60e-04 +2022-05-06 05:25:47,836 INFO [train.py:715] (7/8) Epoch 8, batch 18450, loss[loss=0.1392, simple_loss=0.2175, pruned_loss=0.03051, over 4970.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2154, pruned_loss=0.03571, over 971290.88 frames.], batch size: 15, lr: 2.60e-04 +2022-05-06 05:26:28,561 INFO [train.py:715] (7/8) Epoch 8, batch 18500, loss[loss=0.1559, simple_loss=0.2252, pruned_loss=0.04328, over 4916.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2152, pruned_loss=0.03561, over 971114.03 frames.], batch size: 23, lr: 2.60e-04 +2022-05-06 05:27:08,960 INFO [train.py:715] (7/8) Epoch 8, batch 18550, loss[loss=0.1626, simple_loss=0.226, pruned_loss=0.04961, over 4898.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2153, pruned_loss=0.03552, over 970914.63 frames.], batch size: 17, lr: 2.60e-04 +2022-05-06 05:27:50,211 INFO [train.py:715] (7/8) Epoch 8, batch 18600, loss[loss=0.1451, simple_loss=0.2213, pruned_loss=0.03444, over 4871.00 frames.], tot_loss[loss=0.1428, simple_loss=0.215, pruned_loss=0.03533, over 971710.57 frames.], batch size: 13, lr: 2.60e-04 +2022-05-06 05:28:30,421 INFO [train.py:715] (7/8) Epoch 8, batch 18650, loss[loss=0.2077, simple_loss=0.2723, pruned_loss=0.07153, over 4778.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2155, pruned_loss=0.03562, over 971991.51 frames.], batch size: 18, lr: 2.60e-04 +2022-05-06 05:29:09,925 INFO [train.py:715] (7/8) Epoch 8, batch 18700, loss[loss=0.1453, simple_loss=0.2261, pruned_loss=0.03226, over 4971.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2155, pruned_loss=0.03589, over 972554.61 frames.], batch size: 25, lr: 2.60e-04 +2022-05-06 05:29:49,897 INFO [train.py:715] (7/8) Epoch 8, batch 18750, loss[loss=0.1361, simple_loss=0.2155, pruned_loss=0.02832, over 4931.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.03546, over 972817.00 frames.], batch size: 23, lr: 2.60e-04 +2022-05-06 05:30:30,988 INFO [train.py:715] (7/8) Epoch 8, batch 18800, loss[loss=0.1466, simple_loss=0.2236, pruned_loss=0.03481, over 4741.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.03524, over 973054.80 frames.], batch size: 16, lr: 2.60e-04 +2022-05-06 05:31:10,616 INFO [train.py:715] (7/8) Epoch 8, batch 18850, loss[loss=0.1374, simple_loss=0.2012, pruned_loss=0.03676, over 4933.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2151, pruned_loss=0.03539, over 972461.59 frames.], batch size: 18, lr: 2.60e-04 +2022-05-06 05:31:50,017 INFO [train.py:715] (7/8) Epoch 8, batch 18900, loss[loss=0.1378, simple_loss=0.2086, pruned_loss=0.03355, over 4908.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2158, pruned_loss=0.03555, over 972707.28 frames.], batch size: 17, lr: 2.60e-04 +2022-05-06 05:32:30,289 INFO [train.py:715] (7/8) Epoch 8, batch 18950, loss[loss=0.1457, simple_loss=0.2157, pruned_loss=0.03789, over 4691.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2163, pruned_loss=0.03544, over 971501.59 frames.], batch size: 15, lr: 2.60e-04 +2022-05-06 05:33:10,177 INFO [train.py:715] (7/8) Epoch 8, batch 19000, loss[loss=0.1513, simple_loss=0.2166, pruned_loss=0.04307, over 4890.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03568, over 971523.38 frames.], batch size: 17, lr: 2.60e-04 +2022-05-06 05:33:50,117 INFO [train.py:715] (7/8) Epoch 8, batch 19050, loss[loss=0.1269, simple_loss=0.2035, pruned_loss=0.02513, over 4871.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2163, pruned_loss=0.03567, over 971900.81 frames.], batch size: 22, lr: 2.60e-04 +2022-05-06 05:34:31,415 INFO [train.py:715] (7/8) Epoch 8, batch 19100, loss[loss=0.1721, simple_loss=0.2481, pruned_loss=0.04802, over 4795.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2156, pruned_loss=0.03537, over 972043.05 frames.], batch size: 18, lr: 2.60e-04 +2022-05-06 05:35:13,329 INFO [train.py:715] (7/8) Epoch 8, batch 19150, loss[loss=0.1448, simple_loss=0.2162, pruned_loss=0.03669, over 4906.00 frames.], tot_loss[loss=0.1431, simple_loss=0.216, pruned_loss=0.03515, over 972773.02 frames.], batch size: 17, lr: 2.60e-04 +2022-05-06 05:35:55,040 INFO [train.py:715] (7/8) Epoch 8, batch 19200, loss[loss=0.1116, simple_loss=0.1827, pruned_loss=0.0203, over 4867.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2152, pruned_loss=0.03487, over 972791.62 frames.], batch size: 20, lr: 2.60e-04 +2022-05-06 05:36:35,261 INFO [train.py:715] (7/8) Epoch 8, batch 19250, loss[loss=0.1497, simple_loss=0.2246, pruned_loss=0.03743, over 4930.00 frames.], tot_loss[loss=0.143, simple_loss=0.2158, pruned_loss=0.03514, over 974318.20 frames.], batch size: 39, lr: 2.60e-04 +2022-05-06 05:37:17,451 INFO [train.py:715] (7/8) Epoch 8, batch 19300, loss[loss=0.1669, simple_loss=0.2394, pruned_loss=0.04725, over 4981.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2153, pruned_loss=0.03505, over 973410.43 frames.], batch size: 14, lr: 2.60e-04 +2022-05-06 05:37:58,607 INFO [train.py:715] (7/8) Epoch 8, batch 19350, loss[loss=0.1361, simple_loss=0.2027, pruned_loss=0.03479, over 4894.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2155, pruned_loss=0.0355, over 972724.65 frames.], batch size: 19, lr: 2.60e-04 +2022-05-06 05:38:39,845 INFO [train.py:715] (7/8) Epoch 8, batch 19400, loss[loss=0.1691, simple_loss=0.2324, pruned_loss=0.05292, over 4884.00 frames.], tot_loss[loss=0.1438, simple_loss=0.216, pruned_loss=0.03574, over 972435.39 frames.], batch size: 22, lr: 2.60e-04 +2022-05-06 05:39:21,788 INFO [train.py:715] (7/8) Epoch 8, batch 19450, loss[loss=0.146, simple_loss=0.2183, pruned_loss=0.0369, over 4873.00 frames.], tot_loss[loss=0.1438, simple_loss=0.216, pruned_loss=0.03578, over 972755.59 frames.], batch size: 16, lr: 2.60e-04 +2022-05-06 05:40:03,273 INFO [train.py:715] (7/8) Epoch 8, batch 19500, loss[loss=0.1904, simple_loss=0.2565, pruned_loss=0.06219, over 4871.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2173, pruned_loss=0.03623, over 972157.40 frames.], batch size: 16, lr: 2.60e-04 +2022-05-06 05:40:44,564 INFO [train.py:715] (7/8) Epoch 8, batch 19550, loss[loss=0.1364, simple_loss=0.212, pruned_loss=0.03037, over 4808.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2176, pruned_loss=0.03666, over 972701.43 frames.], batch size: 24, lr: 2.60e-04 +2022-05-06 05:41:25,036 INFO [train.py:715] (7/8) Epoch 8, batch 19600, loss[loss=0.1309, simple_loss=0.2037, pruned_loss=0.02905, over 4968.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2172, pruned_loss=0.0365, over 972472.60 frames.], batch size: 15, lr: 2.60e-04 +2022-05-06 05:42:06,544 INFO [train.py:715] (7/8) Epoch 8, batch 19650, loss[loss=0.123, simple_loss=0.1959, pruned_loss=0.02503, over 4775.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2164, pruned_loss=0.0362, over 972794.00 frames.], batch size: 18, lr: 2.60e-04 +2022-05-06 05:42:47,228 INFO [train.py:715] (7/8) Epoch 8, batch 19700, loss[loss=0.1401, simple_loss=0.2192, pruned_loss=0.03048, over 4777.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2154, pruned_loss=0.03565, over 972491.94 frames.], batch size: 14, lr: 2.60e-04 +2022-05-06 05:43:28,184 INFO [train.py:715] (7/8) Epoch 8, batch 19750, loss[loss=0.1192, simple_loss=0.1795, pruned_loss=0.02947, over 4705.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2145, pruned_loss=0.03516, over 972094.37 frames.], batch size: 15, lr: 2.60e-04 +2022-05-06 05:44:09,861 INFO [train.py:715] (7/8) Epoch 8, batch 19800, loss[loss=0.161, simple_loss=0.2285, pruned_loss=0.04679, over 4986.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.03497, over 973588.54 frames.], batch size: 25, lr: 2.60e-04 +2022-05-06 05:44:50,896 INFO [train.py:715] (7/8) Epoch 8, batch 19850, loss[loss=0.1829, simple_loss=0.2571, pruned_loss=0.05429, over 4795.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2162, pruned_loss=0.03571, over 973723.83 frames.], batch size: 24, lr: 2.60e-04 +2022-05-06 05:45:31,214 INFO [train.py:715] (7/8) Epoch 8, batch 19900, loss[loss=0.1481, simple_loss=0.2153, pruned_loss=0.04044, over 4770.00 frames.], tot_loss[loss=0.144, simple_loss=0.216, pruned_loss=0.03606, over 972328.02 frames.], batch size: 14, lr: 2.60e-04 +2022-05-06 05:46:10,975 INFO [train.py:715] (7/8) Epoch 8, batch 19950, loss[loss=0.1463, simple_loss=0.2176, pruned_loss=0.03754, over 4931.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2166, pruned_loss=0.03619, over 973398.04 frames.], batch size: 29, lr: 2.60e-04 +2022-05-06 05:46:51,595 INFO [train.py:715] (7/8) Epoch 8, batch 20000, loss[loss=0.1582, simple_loss=0.2265, pruned_loss=0.04494, over 4787.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03586, over 973594.52 frames.], batch size: 18, lr: 2.60e-04 +2022-05-06 05:47:32,115 INFO [train.py:715] (7/8) Epoch 8, batch 20050, loss[loss=0.1439, simple_loss=0.2184, pruned_loss=0.03475, over 4986.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2158, pruned_loss=0.03555, over 974285.64 frames.], batch size: 14, lr: 2.60e-04 +2022-05-06 05:48:12,631 INFO [train.py:715] (7/8) Epoch 8, batch 20100, loss[loss=0.1206, simple_loss=0.1917, pruned_loss=0.02471, over 4800.00 frames.], tot_loss[loss=0.1426, simple_loss=0.215, pruned_loss=0.03507, over 973426.13 frames.], batch size: 24, lr: 2.60e-04 +2022-05-06 05:48:53,762 INFO [train.py:715] (7/8) Epoch 8, batch 20150, loss[loss=0.1536, simple_loss=0.2303, pruned_loss=0.03842, over 4912.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2152, pruned_loss=0.03521, over 972355.67 frames.], batch size: 18, lr: 2.60e-04 +2022-05-06 05:49:34,584 INFO [train.py:715] (7/8) Epoch 8, batch 20200, loss[loss=0.128, simple_loss=0.2017, pruned_loss=0.0271, over 4774.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03578, over 972290.60 frames.], batch size: 18, lr: 2.60e-04 +2022-05-06 05:50:15,441 INFO [train.py:715] (7/8) Epoch 8, batch 20250, loss[loss=0.1391, simple_loss=0.2066, pruned_loss=0.03577, over 4783.00 frames.], tot_loss[loss=0.1427, simple_loss=0.215, pruned_loss=0.03522, over 972773.41 frames.], batch size: 18, lr: 2.60e-04 +2022-05-06 05:50:56,711 INFO [train.py:715] (7/8) Epoch 8, batch 20300, loss[loss=0.1276, simple_loss=0.1934, pruned_loss=0.03093, over 4803.00 frames.], tot_loss[loss=0.1425, simple_loss=0.215, pruned_loss=0.03497, over 973150.64 frames.], batch size: 12, lr: 2.60e-04 +2022-05-06 05:51:37,708 INFO [train.py:715] (7/8) Epoch 8, batch 20350, loss[loss=0.1606, simple_loss=0.2464, pruned_loss=0.03742, over 4868.00 frames.], tot_loss[loss=0.142, simple_loss=0.2144, pruned_loss=0.03475, over 972125.39 frames.], batch size: 16, lr: 2.59e-04 +2022-05-06 05:52:18,259 INFO [train.py:715] (7/8) Epoch 8, batch 20400, loss[loss=0.1732, simple_loss=0.2359, pruned_loss=0.05521, over 4783.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2151, pruned_loss=0.03511, over 971910.04 frames.], batch size: 18, lr: 2.59e-04 +2022-05-06 05:52:58,522 INFO [train.py:715] (7/8) Epoch 8, batch 20450, loss[loss=0.1526, simple_loss=0.2266, pruned_loss=0.03934, over 4781.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2162, pruned_loss=0.03567, over 970850.87 frames.], batch size: 17, lr: 2.59e-04 +2022-05-06 05:53:39,599 INFO [train.py:715] (7/8) Epoch 8, batch 20500, loss[loss=0.1494, simple_loss=0.2208, pruned_loss=0.03899, over 4913.00 frames.], tot_loss[loss=0.144, simple_loss=0.2163, pruned_loss=0.03582, over 971737.12 frames.], batch size: 18, lr: 2.59e-04 +2022-05-06 05:54:20,091 INFO [train.py:715] (7/8) Epoch 8, batch 20550, loss[loss=0.1451, simple_loss=0.2122, pruned_loss=0.03901, over 4809.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2161, pruned_loss=0.03574, over 972047.03 frames.], batch size: 26, lr: 2.59e-04 +2022-05-06 05:55:00,455 INFO [train.py:715] (7/8) Epoch 8, batch 20600, loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03033, over 4903.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2163, pruned_loss=0.03567, over 972310.54 frames.], batch size: 19, lr: 2.59e-04 +2022-05-06 05:55:41,410 INFO [train.py:715] (7/8) Epoch 8, batch 20650, loss[loss=0.167, simple_loss=0.2467, pruned_loss=0.04364, over 4789.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2158, pruned_loss=0.03544, over 972932.43 frames.], batch size: 14, lr: 2.59e-04 +2022-05-06 05:56:22,562 INFO [train.py:715] (7/8) Epoch 8, batch 20700, loss[loss=0.1377, simple_loss=0.2041, pruned_loss=0.03565, over 4860.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.03562, over 972321.01 frames.], batch size: 34, lr: 2.59e-04 +2022-05-06 05:57:02,760 INFO [train.py:715] (7/8) Epoch 8, batch 20750, loss[loss=0.1783, simple_loss=0.2377, pruned_loss=0.05942, over 4925.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2169, pruned_loss=0.0362, over 972360.62 frames.], batch size: 29, lr: 2.59e-04 +2022-05-06 05:57:42,962 INFO [train.py:715] (7/8) Epoch 8, batch 20800, loss[loss=0.1319, simple_loss=0.2042, pruned_loss=0.02982, over 4822.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2168, pruned_loss=0.0362, over 972847.74 frames.], batch size: 26, lr: 2.59e-04 +2022-05-06 05:58:24,044 INFO [train.py:715] (7/8) Epoch 8, batch 20850, loss[loss=0.1471, simple_loss=0.2142, pruned_loss=0.04, over 4800.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03584, over 973108.38 frames.], batch size: 14, lr: 2.59e-04 +2022-05-06 05:59:04,432 INFO [train.py:715] (7/8) Epoch 8, batch 20900, loss[loss=0.1302, simple_loss=0.2052, pruned_loss=0.02762, over 4932.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.03598, over 973570.42 frames.], batch size: 29, lr: 2.59e-04 +2022-05-06 05:59:43,024 INFO [train.py:715] (7/8) Epoch 8, batch 20950, loss[loss=0.1256, simple_loss=0.1939, pruned_loss=0.02865, over 4757.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2158, pruned_loss=0.03564, over 973022.03 frames.], batch size: 12, lr: 2.59e-04 +2022-05-06 06:00:22,706 INFO [train.py:715] (7/8) Epoch 8, batch 21000, loss[loss=0.156, simple_loss=0.2222, pruned_loss=0.04485, over 4805.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2154, pruned_loss=0.03559, over 972610.46 frames.], batch size: 17, lr: 2.59e-04 +2022-05-06 06:00:22,707 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 06:00:32,254 INFO [train.py:742] (7/8) Epoch 8, validation: loss=0.1072, simple_loss=0.1919, pruned_loss=0.01129, over 914524.00 frames. +2022-05-06 06:01:12,648 INFO [train.py:715] (7/8) Epoch 8, batch 21050, loss[loss=0.1542, simple_loss=0.2336, pruned_loss=0.0374, over 4844.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2167, pruned_loss=0.03603, over 972317.88 frames.], batch size: 15, lr: 2.59e-04 +2022-05-06 06:01:52,991 INFO [train.py:715] (7/8) Epoch 8, batch 21100, loss[loss=0.1354, simple_loss=0.2105, pruned_loss=0.03019, over 4759.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2169, pruned_loss=0.03582, over 971575.85 frames.], batch size: 19, lr: 2.59e-04 +2022-05-06 06:02:31,466 INFO [train.py:715] (7/8) Epoch 8, batch 21150, loss[loss=0.1582, simple_loss=0.2345, pruned_loss=0.04098, over 4805.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2162, pruned_loss=0.03541, over 972257.05 frames.], batch size: 25, lr: 2.59e-04 +2022-05-06 06:03:10,265 INFO [train.py:715] (7/8) Epoch 8, batch 21200, loss[loss=0.1239, simple_loss=0.1927, pruned_loss=0.02758, over 4853.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2172, pruned_loss=0.03587, over 972009.55 frames.], batch size: 20, lr: 2.59e-04 +2022-05-06 06:03:49,969 INFO [train.py:715] (7/8) Epoch 8, batch 21250, loss[loss=0.1723, simple_loss=0.2279, pruned_loss=0.05837, over 4984.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2171, pruned_loss=0.03598, over 972469.56 frames.], batch size: 14, lr: 2.59e-04 +2022-05-06 06:04:29,231 INFO [train.py:715] (7/8) Epoch 8, batch 21300, loss[loss=0.1566, simple_loss=0.2273, pruned_loss=0.04293, over 4895.00 frames.], tot_loss[loss=0.145, simple_loss=0.2173, pruned_loss=0.03634, over 972324.48 frames.], batch size: 19, lr: 2.59e-04 +2022-05-06 06:05:07,760 INFO [train.py:715] (7/8) Epoch 8, batch 21350, loss[loss=0.1389, simple_loss=0.2108, pruned_loss=0.0335, over 4945.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2164, pruned_loss=0.03614, over 972155.01 frames.], batch size: 18, lr: 2.59e-04 +2022-05-06 06:05:47,406 INFO [train.py:715] (7/8) Epoch 8, batch 21400, loss[loss=0.1012, simple_loss=0.18, pruned_loss=0.01121, over 4892.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2168, pruned_loss=0.0363, over 971662.04 frames.], batch size: 19, lr: 2.59e-04 +2022-05-06 06:06:27,496 INFO [train.py:715] (7/8) Epoch 8, batch 21450, loss[loss=0.1307, simple_loss=0.2022, pruned_loss=0.02962, over 4803.00 frames.], tot_loss[loss=0.144, simple_loss=0.2161, pruned_loss=0.03591, over 972083.92 frames.], batch size: 18, lr: 2.59e-04 +2022-05-06 06:07:06,790 INFO [train.py:715] (7/8) Epoch 8, batch 21500, loss[loss=0.1238, simple_loss=0.1899, pruned_loss=0.02885, over 4877.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2155, pruned_loss=0.036, over 972196.25 frames.], batch size: 13, lr: 2.59e-04 +2022-05-06 06:07:45,793 INFO [train.py:715] (7/8) Epoch 8, batch 21550, loss[loss=0.122, simple_loss=0.1919, pruned_loss=0.02605, over 4918.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2158, pruned_loss=0.03592, over 971590.17 frames.], batch size: 19, lr: 2.59e-04 +2022-05-06 06:08:25,816 INFO [train.py:715] (7/8) Epoch 8, batch 21600, loss[loss=0.135, simple_loss=0.214, pruned_loss=0.02802, over 4978.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2156, pruned_loss=0.0358, over 972368.10 frames.], batch size: 35, lr: 2.59e-04 +2022-05-06 06:09:04,797 INFO [train.py:715] (7/8) Epoch 8, batch 21650, loss[loss=0.172, simple_loss=0.2288, pruned_loss=0.05754, over 4705.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03567, over 972295.60 frames.], batch size: 15, lr: 2.59e-04 +2022-05-06 06:09:43,494 INFO [train.py:715] (7/8) Epoch 8, batch 21700, loss[loss=0.1327, simple_loss=0.1995, pruned_loss=0.03296, over 4777.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.03533, over 972047.17 frames.], batch size: 14, lr: 2.59e-04 +2022-05-06 06:10:23,864 INFO [train.py:715] (7/8) Epoch 8, batch 21750, loss[loss=0.1679, simple_loss=0.2442, pruned_loss=0.04574, over 4964.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2158, pruned_loss=0.03558, over 973038.95 frames.], batch size: 24, lr: 2.59e-04 +2022-05-06 06:11:03,700 INFO [train.py:715] (7/8) Epoch 8, batch 21800, loss[loss=0.122, simple_loss=0.1946, pruned_loss=0.02474, over 4800.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.0355, over 971935.63 frames.], batch size: 21, lr: 2.59e-04 +2022-05-06 06:11:42,813 INFO [train.py:715] (7/8) Epoch 8, batch 21850, loss[loss=0.151, simple_loss=0.2296, pruned_loss=0.03616, over 4798.00 frames.], tot_loss[loss=0.1427, simple_loss=0.215, pruned_loss=0.03524, over 971467.23 frames.], batch size: 24, lr: 2.59e-04 +2022-05-06 06:12:21,179 INFO [train.py:715] (7/8) Epoch 8, batch 21900, loss[loss=0.1354, simple_loss=0.2152, pruned_loss=0.02783, over 4982.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.03559, over 972086.32 frames.], batch size: 28, lr: 2.59e-04 +2022-05-06 06:13:00,620 INFO [train.py:715] (7/8) Epoch 8, batch 21950, loss[loss=0.1469, simple_loss=0.2237, pruned_loss=0.03504, over 4775.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.0354, over 972889.89 frames.], batch size: 14, lr: 2.59e-04 +2022-05-06 06:13:39,702 INFO [train.py:715] (7/8) Epoch 8, batch 22000, loss[loss=0.1601, simple_loss=0.2323, pruned_loss=0.04399, over 4927.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03588, over 973232.89 frames.], batch size: 23, lr: 2.59e-04 +2022-05-06 06:14:18,328 INFO [train.py:715] (7/8) Epoch 8, batch 22050, loss[loss=0.1686, simple_loss=0.2463, pruned_loss=0.0454, over 4832.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2164, pruned_loss=0.0363, over 973369.98 frames.], batch size: 15, lr: 2.59e-04 +2022-05-06 06:14:58,046 INFO [train.py:715] (7/8) Epoch 8, batch 22100, loss[loss=0.1683, simple_loss=0.2398, pruned_loss=0.04841, over 4913.00 frames.], tot_loss[loss=0.1453, simple_loss=0.2168, pruned_loss=0.03683, over 973469.19 frames.], batch size: 17, lr: 2.59e-04 +2022-05-06 06:15:37,422 INFO [train.py:715] (7/8) Epoch 8, batch 22150, loss[loss=0.1507, simple_loss=0.2285, pruned_loss=0.03641, over 4943.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2168, pruned_loss=0.03701, over 973566.54 frames.], batch size: 39, lr: 2.59e-04 +2022-05-06 06:16:16,520 INFO [train.py:715] (7/8) Epoch 8, batch 22200, loss[loss=0.1144, simple_loss=0.1845, pruned_loss=0.0222, over 4860.00 frames.], tot_loss[loss=0.1454, simple_loss=0.2171, pruned_loss=0.0368, over 973781.23 frames.], batch size: 13, lr: 2.59e-04 +2022-05-06 06:16:55,350 INFO [train.py:715] (7/8) Epoch 8, batch 22250, loss[loss=0.1272, simple_loss=0.2107, pruned_loss=0.02188, over 4913.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2173, pruned_loss=0.03655, over 974211.39 frames.], batch size: 18, lr: 2.59e-04 +2022-05-06 06:17:34,565 INFO [train.py:715] (7/8) Epoch 8, batch 22300, loss[loss=0.1363, simple_loss=0.2087, pruned_loss=0.03195, over 4967.00 frames.], tot_loss[loss=0.145, simple_loss=0.2174, pruned_loss=0.0363, over 974285.89 frames.], batch size: 35, lr: 2.59e-04 +2022-05-06 06:18:13,310 INFO [train.py:715] (7/8) Epoch 8, batch 22350, loss[loss=0.1512, simple_loss=0.2177, pruned_loss=0.04232, over 4818.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2175, pruned_loss=0.03639, over 973091.04 frames.], batch size: 26, lr: 2.59e-04 +2022-05-06 06:18:51,905 INFO [train.py:715] (7/8) Epoch 8, batch 22400, loss[loss=0.1252, simple_loss=0.2002, pruned_loss=0.02512, over 4985.00 frames.], tot_loss[loss=0.1457, simple_loss=0.2182, pruned_loss=0.03661, over 972352.18 frames.], batch size: 14, lr: 2.59e-04 +2022-05-06 06:19:31,235 INFO [train.py:715] (7/8) Epoch 8, batch 22450, loss[loss=0.1544, simple_loss=0.2182, pruned_loss=0.04533, over 4794.00 frames.], tot_loss[loss=0.1451, simple_loss=0.2175, pruned_loss=0.03639, over 971326.10 frames.], batch size: 17, lr: 2.59e-04 +2022-05-06 06:20:10,736 INFO [train.py:715] (7/8) Epoch 8, batch 22500, loss[loss=0.1222, simple_loss=0.1956, pruned_loss=0.02442, over 4848.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2162, pruned_loss=0.03641, over 970680.13 frames.], batch size: 20, lr: 2.59e-04 +2022-05-06 06:20:49,335 INFO [train.py:715] (7/8) Epoch 8, batch 22550, loss[loss=0.1149, simple_loss=0.1907, pruned_loss=0.01952, over 4959.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2157, pruned_loss=0.03597, over 970420.84 frames.], batch size: 24, lr: 2.59e-04 +2022-05-06 06:21:28,254 INFO [train.py:715] (7/8) Epoch 8, batch 22600, loss[loss=0.1386, simple_loss=0.2186, pruned_loss=0.02925, over 4966.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2159, pruned_loss=0.03572, over 970318.64 frames.], batch size: 15, lr: 2.59e-04 +2022-05-06 06:22:07,736 INFO [train.py:715] (7/8) Epoch 8, batch 22650, loss[loss=0.1335, simple_loss=0.2084, pruned_loss=0.0293, over 4977.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2154, pruned_loss=0.03548, over 971557.17 frames.], batch size: 15, lr: 2.58e-04 +2022-05-06 06:22:46,461 INFO [train.py:715] (7/8) Epoch 8, batch 22700, loss[loss=0.1297, simple_loss=0.2043, pruned_loss=0.02756, over 4903.00 frames.], tot_loss[loss=0.143, simple_loss=0.2151, pruned_loss=0.0355, over 972416.43 frames.], batch size: 17, lr: 2.58e-04 +2022-05-06 06:23:24,778 INFO [train.py:715] (7/8) Epoch 8, batch 22750, loss[loss=0.1561, simple_loss=0.2386, pruned_loss=0.03673, over 4772.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2146, pruned_loss=0.03506, over 971240.52 frames.], batch size: 18, lr: 2.58e-04 +2022-05-06 06:24:04,598 INFO [train.py:715] (7/8) Epoch 8, batch 22800, loss[loss=0.1358, simple_loss=0.2082, pruned_loss=0.03169, over 4969.00 frames.], tot_loss[loss=0.142, simple_loss=0.2141, pruned_loss=0.0349, over 972124.32 frames.], batch size: 24, lr: 2.58e-04 +2022-05-06 06:24:43,772 INFO [train.py:715] (7/8) Epoch 8, batch 22850, loss[loss=0.1292, simple_loss=0.2031, pruned_loss=0.02767, over 4925.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2143, pruned_loss=0.03501, over 971509.18 frames.], batch size: 18, lr: 2.58e-04 +2022-05-06 06:25:22,845 INFO [train.py:715] (7/8) Epoch 8, batch 22900, loss[loss=0.167, simple_loss=0.2394, pruned_loss=0.04727, over 4800.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2146, pruned_loss=0.03506, over 972143.17 frames.], batch size: 21, lr: 2.58e-04 +2022-05-06 06:26:01,959 INFO [train.py:715] (7/8) Epoch 8, batch 22950, loss[loss=0.161, simple_loss=0.2292, pruned_loss=0.0464, over 4766.00 frames.], tot_loss[loss=0.1427, simple_loss=0.215, pruned_loss=0.03518, over 972340.93 frames.], batch size: 19, lr: 2.58e-04 +2022-05-06 06:26:41,734 INFO [train.py:715] (7/8) Epoch 8, batch 23000, loss[loss=0.1299, simple_loss=0.1978, pruned_loss=0.03105, over 4763.00 frames.], tot_loss[loss=0.1407, simple_loss=0.213, pruned_loss=0.03424, over 972209.52 frames.], batch size: 17, lr: 2.58e-04 +2022-05-06 06:27:20,527 INFO [train.py:715] (7/8) Epoch 8, batch 23050, loss[loss=0.1468, simple_loss=0.2116, pruned_loss=0.04096, over 4980.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2141, pruned_loss=0.03472, over 971876.15 frames.], batch size: 14, lr: 2.58e-04 +2022-05-06 06:27:59,223 INFO [train.py:715] (7/8) Epoch 8, batch 23100, loss[loss=0.1741, simple_loss=0.2445, pruned_loss=0.05187, over 4810.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2141, pruned_loss=0.03452, over 971920.33 frames.], batch size: 25, lr: 2.58e-04 +2022-05-06 06:28:39,377 INFO [train.py:715] (7/8) Epoch 8, batch 23150, loss[loss=0.1216, simple_loss=0.1989, pruned_loss=0.02218, over 4883.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03412, over 971622.37 frames.], batch size: 38, lr: 2.58e-04 +2022-05-06 06:29:18,754 INFO [train.py:715] (7/8) Epoch 8, batch 23200, loss[loss=0.1358, simple_loss=0.2106, pruned_loss=0.03048, over 4929.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2148, pruned_loss=0.03484, over 972248.46 frames.], batch size: 18, lr: 2.58e-04 +2022-05-06 06:29:57,397 INFO [train.py:715] (7/8) Epoch 8, batch 23250, loss[loss=0.1233, simple_loss=0.1979, pruned_loss=0.02431, over 4947.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2147, pruned_loss=0.03475, over 972386.22 frames.], batch size: 21, lr: 2.58e-04 +2022-05-06 06:30:36,513 INFO [train.py:715] (7/8) Epoch 8, batch 23300, loss[loss=0.1325, simple_loss=0.204, pruned_loss=0.03046, over 4964.00 frames.], tot_loss[loss=0.143, simple_loss=0.2155, pruned_loss=0.03531, over 972755.25 frames.], batch size: 24, lr: 2.58e-04 +2022-05-06 06:31:16,265 INFO [train.py:715] (7/8) Epoch 8, batch 23350, loss[loss=0.1349, simple_loss=0.211, pruned_loss=0.02944, over 4857.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2158, pruned_loss=0.03579, over 971919.14 frames.], batch size: 16, lr: 2.58e-04 +2022-05-06 06:31:55,027 INFO [train.py:715] (7/8) Epoch 8, batch 23400, loss[loss=0.1303, simple_loss=0.2081, pruned_loss=0.02619, over 4980.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2155, pruned_loss=0.03548, over 971823.70 frames.], batch size: 25, lr: 2.58e-04 +2022-05-06 06:32:33,890 INFO [train.py:715] (7/8) Epoch 8, batch 23450, loss[loss=0.1304, simple_loss=0.1958, pruned_loss=0.0325, over 4988.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2149, pruned_loss=0.03546, over 972497.27 frames.], batch size: 14, lr: 2.58e-04 +2022-05-06 06:33:13,365 INFO [train.py:715] (7/8) Epoch 8, batch 23500, loss[loss=0.1465, simple_loss=0.2272, pruned_loss=0.03292, over 4788.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2155, pruned_loss=0.0358, over 972557.35 frames.], batch size: 17, lr: 2.58e-04 +2022-05-06 06:33:52,532 INFO [train.py:715] (7/8) Epoch 8, batch 23550, loss[loss=0.1423, simple_loss=0.2216, pruned_loss=0.03148, over 4758.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2151, pruned_loss=0.0354, over 972403.18 frames.], batch size: 19, lr: 2.58e-04 +2022-05-06 06:34:31,318 INFO [train.py:715] (7/8) Epoch 8, batch 23600, loss[loss=0.1204, simple_loss=0.2003, pruned_loss=0.02024, over 4831.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2151, pruned_loss=0.03532, over 971479.77 frames.], batch size: 12, lr: 2.58e-04 +2022-05-06 06:35:10,239 INFO [train.py:715] (7/8) Epoch 8, batch 23650, loss[loss=0.1406, simple_loss=0.2089, pruned_loss=0.03613, over 4753.00 frames.], tot_loss[loss=0.1432, simple_loss=0.215, pruned_loss=0.03569, over 970582.48 frames.], batch size: 16, lr: 2.58e-04 +2022-05-06 06:35:50,047 INFO [train.py:715] (7/8) Epoch 8, batch 23700, loss[loss=0.1181, simple_loss=0.1913, pruned_loss=0.02243, over 4987.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2148, pruned_loss=0.03552, over 971209.73 frames.], batch size: 14, lr: 2.58e-04 +2022-05-06 06:36:28,666 INFO [train.py:715] (7/8) Epoch 8, batch 23750, loss[loss=0.1423, simple_loss=0.2142, pruned_loss=0.03518, over 4962.00 frames.], tot_loss[loss=0.143, simple_loss=0.2153, pruned_loss=0.03534, over 971554.46 frames.], batch size: 14, lr: 2.58e-04 +2022-05-06 06:37:07,516 INFO [train.py:715] (7/8) Epoch 8, batch 23800, loss[loss=0.1505, simple_loss=0.2232, pruned_loss=0.03888, over 4826.00 frames.], tot_loss[loss=0.1439, simple_loss=0.216, pruned_loss=0.03586, over 972545.52 frames.], batch size: 25, lr: 2.58e-04 +2022-05-06 06:37:46,985 INFO [train.py:715] (7/8) Epoch 8, batch 23850, loss[loss=0.1639, simple_loss=0.2418, pruned_loss=0.04304, over 4807.00 frames.], tot_loss[loss=0.1447, simple_loss=0.217, pruned_loss=0.03622, over 972390.74 frames.], batch size: 21, lr: 2.58e-04 +2022-05-06 06:38:26,641 INFO [train.py:715] (7/8) Epoch 8, batch 23900, loss[loss=0.1333, simple_loss=0.2061, pruned_loss=0.03029, over 4929.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2171, pruned_loss=0.03617, over 972775.52 frames.], batch size: 18, lr: 2.58e-04 +2022-05-06 06:39:05,509 INFO [train.py:715] (7/8) Epoch 8, batch 23950, loss[loss=0.1249, simple_loss=0.2056, pruned_loss=0.02209, over 4940.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2172, pruned_loss=0.03606, over 973241.37 frames.], batch size: 29, lr: 2.58e-04 +2022-05-06 06:39:44,888 INFO [train.py:715] (7/8) Epoch 8, batch 24000, loss[loss=0.1591, simple_loss=0.2249, pruned_loss=0.04663, over 4917.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2165, pruned_loss=0.03611, over 972918.61 frames.], batch size: 29, lr: 2.58e-04 +2022-05-06 06:39:44,888 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 06:39:54,530 INFO [train.py:742] (7/8) Epoch 8, validation: loss=0.1075, simple_loss=0.192, pruned_loss=0.01146, over 914524.00 frames. +2022-05-06 06:40:33,719 INFO [train.py:715] (7/8) Epoch 8, batch 24050, loss[loss=0.1249, simple_loss=0.1924, pruned_loss=0.02867, over 4743.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2168, pruned_loss=0.03614, over 972622.48 frames.], batch size: 12, lr: 2.58e-04 +2022-05-06 06:41:13,151 INFO [train.py:715] (7/8) Epoch 8, batch 24100, loss[loss=0.1042, simple_loss=0.185, pruned_loss=0.01168, over 4755.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2159, pruned_loss=0.03554, over 973171.17 frames.], batch size: 19, lr: 2.58e-04 +2022-05-06 06:41:52,118 INFO [train.py:715] (7/8) Epoch 8, batch 24150, loss[loss=0.1277, simple_loss=0.2067, pruned_loss=0.02431, over 4925.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.035, over 974132.84 frames.], batch size: 18, lr: 2.58e-04 +2022-05-06 06:42:31,049 INFO [train.py:715] (7/8) Epoch 8, batch 24200, loss[loss=0.1551, simple_loss=0.2269, pruned_loss=0.04169, over 4767.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2147, pruned_loss=0.03553, over 973573.50 frames.], batch size: 19, lr: 2.58e-04 +2022-05-06 06:43:11,239 INFO [train.py:715] (7/8) Epoch 8, batch 24250, loss[loss=0.1744, simple_loss=0.2328, pruned_loss=0.05798, over 4843.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2135, pruned_loss=0.035, over 973204.07 frames.], batch size: 30, lr: 2.58e-04 +2022-05-06 06:43:50,604 INFO [train.py:715] (7/8) Epoch 8, batch 24300, loss[loss=0.1909, simple_loss=0.2539, pruned_loss=0.06389, over 4899.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2143, pruned_loss=0.03516, over 974195.28 frames.], batch size: 19, lr: 2.58e-04 +2022-05-06 06:44:29,315 INFO [train.py:715] (7/8) Epoch 8, batch 24350, loss[loss=0.1474, simple_loss=0.2309, pruned_loss=0.03196, over 4975.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2156, pruned_loss=0.03576, over 973279.66 frames.], batch size: 28, lr: 2.58e-04 +2022-05-06 06:45:08,115 INFO [train.py:715] (7/8) Epoch 8, batch 24400, loss[loss=0.1713, simple_loss=0.2412, pruned_loss=0.05068, over 4782.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2159, pruned_loss=0.03596, over 973133.85 frames.], batch size: 14, lr: 2.58e-04 +2022-05-06 06:45:47,157 INFO [train.py:715] (7/8) Epoch 8, batch 24450, loss[loss=0.167, simple_loss=0.2356, pruned_loss=0.04922, over 4902.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2163, pruned_loss=0.03634, over 972498.86 frames.], batch size: 39, lr: 2.58e-04 +2022-05-06 06:46:26,137 INFO [train.py:715] (7/8) Epoch 8, batch 24500, loss[loss=0.1475, simple_loss=0.2048, pruned_loss=0.04514, over 4687.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2158, pruned_loss=0.03606, over 972678.40 frames.], batch size: 15, lr: 2.58e-04 +2022-05-06 06:47:04,987 INFO [train.py:715] (7/8) Epoch 8, batch 24550, loss[loss=0.1146, simple_loss=0.1908, pruned_loss=0.01917, over 4808.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2152, pruned_loss=0.03557, over 972636.34 frames.], batch size: 26, lr: 2.58e-04 +2022-05-06 06:47:44,931 INFO [train.py:715] (7/8) Epoch 8, batch 24600, loss[loss=0.1484, simple_loss=0.2153, pruned_loss=0.04071, over 4883.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2154, pruned_loss=0.03582, over 971941.02 frames.], batch size: 16, lr: 2.58e-04 +2022-05-06 06:48:24,241 INFO [train.py:715] (7/8) Epoch 8, batch 24650, loss[loss=0.1461, simple_loss=0.2177, pruned_loss=0.0373, over 4808.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2154, pruned_loss=0.03588, over 971091.59 frames.], batch size: 25, lr: 2.58e-04 +2022-05-06 06:49:02,879 INFO [train.py:715] (7/8) Epoch 8, batch 24700, loss[loss=0.13, simple_loss=0.2108, pruned_loss=0.02454, over 4938.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2156, pruned_loss=0.0361, over 971460.63 frames.], batch size: 29, lr: 2.58e-04 +2022-05-06 06:49:42,051 INFO [train.py:715] (7/8) Epoch 8, batch 24750, loss[loss=0.1396, simple_loss=0.205, pruned_loss=0.03703, over 4969.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2159, pruned_loss=0.03632, over 972466.54 frames.], batch size: 35, lr: 2.58e-04 +2022-05-06 06:50:21,622 INFO [train.py:715] (7/8) Epoch 8, batch 24800, loss[loss=0.1664, simple_loss=0.2387, pruned_loss=0.0471, over 4934.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2162, pruned_loss=0.03626, over 971750.09 frames.], batch size: 39, lr: 2.58e-04 +2022-05-06 06:51:00,475 INFO [train.py:715] (7/8) Epoch 8, batch 24850, loss[loss=0.1324, simple_loss=0.2055, pruned_loss=0.02961, over 4934.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2154, pruned_loss=0.03588, over 972218.86 frames.], batch size: 21, lr: 2.58e-04 +2022-05-06 06:51:39,143 INFO [train.py:715] (7/8) Epoch 8, batch 24900, loss[loss=0.1705, simple_loss=0.2296, pruned_loss=0.05572, over 4896.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2143, pruned_loss=0.03541, over 972897.00 frames.], batch size: 17, lr: 2.58e-04 +2022-05-06 06:52:19,145 INFO [train.py:715] (7/8) Epoch 8, batch 24950, loss[loss=0.1625, simple_loss=0.2248, pruned_loss=0.05009, over 4958.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2155, pruned_loss=0.03594, over 973463.60 frames.], batch size: 35, lr: 2.58e-04 +2022-05-06 06:52:58,631 INFO [train.py:715] (7/8) Epoch 8, batch 25000, loss[loss=0.153, simple_loss=0.2307, pruned_loss=0.03759, over 4888.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2149, pruned_loss=0.03541, over 973660.10 frames.], batch size: 38, lr: 2.57e-04 +2022-05-06 06:53:37,565 INFO [train.py:715] (7/8) Epoch 8, batch 25050, loss[loss=0.1363, simple_loss=0.2161, pruned_loss=0.02829, over 4810.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2154, pruned_loss=0.03495, over 973558.44 frames.], batch size: 26, lr: 2.57e-04 +2022-05-06 06:54:16,393 INFO [train.py:715] (7/8) Epoch 8, batch 25100, loss[loss=0.1564, simple_loss=0.2199, pruned_loss=0.04644, over 4699.00 frames.], tot_loss[loss=0.142, simple_loss=0.2148, pruned_loss=0.03463, over 972733.36 frames.], batch size: 15, lr: 2.57e-04 +2022-05-06 06:54:55,807 INFO [train.py:715] (7/8) Epoch 8, batch 25150, loss[loss=0.1496, simple_loss=0.2238, pruned_loss=0.03772, over 4833.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03479, over 972899.98 frames.], batch size: 15, lr: 2.57e-04 +2022-05-06 06:55:34,832 INFO [train.py:715] (7/8) Epoch 8, batch 25200, loss[loss=0.1355, simple_loss=0.2107, pruned_loss=0.03015, over 4858.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2152, pruned_loss=0.03514, over 973129.88 frames.], batch size: 20, lr: 2.57e-04 +2022-05-06 06:56:13,822 INFO [train.py:715] (7/8) Epoch 8, batch 25250, loss[loss=0.1464, simple_loss=0.2189, pruned_loss=0.03696, over 4734.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2158, pruned_loss=0.03542, over 972363.75 frames.], batch size: 16, lr: 2.57e-04 +2022-05-06 06:56:53,391 INFO [train.py:715] (7/8) Epoch 8, batch 25300, loss[loss=0.1125, simple_loss=0.1769, pruned_loss=0.02408, over 4987.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2165, pruned_loss=0.03596, over 972645.44 frames.], batch size: 28, lr: 2.57e-04 +2022-05-06 06:57:32,356 INFO [train.py:715] (7/8) Epoch 8, batch 25350, loss[loss=0.1915, simple_loss=0.2518, pruned_loss=0.06557, over 4859.00 frames.], tot_loss[loss=0.1447, simple_loss=0.217, pruned_loss=0.0362, over 973260.91 frames.], batch size: 30, lr: 2.57e-04 +2022-05-06 06:58:11,172 INFO [train.py:715] (7/8) Epoch 8, batch 25400, loss[loss=0.1498, simple_loss=0.2293, pruned_loss=0.03513, over 4789.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2176, pruned_loss=0.03695, over 972922.30 frames.], batch size: 18, lr: 2.57e-04 +2022-05-06 06:58:50,230 INFO [train.py:715] (7/8) Epoch 8, batch 25450, loss[loss=0.1372, simple_loss=0.1986, pruned_loss=0.03787, over 4929.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2166, pruned_loss=0.03648, over 972785.94 frames.], batch size: 18, lr: 2.57e-04 +2022-05-06 06:59:30,374 INFO [train.py:715] (7/8) Epoch 8, batch 25500, loss[loss=0.1261, simple_loss=0.1939, pruned_loss=0.0291, over 4648.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2166, pruned_loss=0.0361, over 971785.98 frames.], batch size: 13, lr: 2.57e-04 +2022-05-06 07:00:12,382 INFO [train.py:715] (7/8) Epoch 8, batch 25550, loss[loss=0.1449, simple_loss=0.2131, pruned_loss=0.03833, over 4971.00 frames.], tot_loss[loss=0.144, simple_loss=0.2164, pruned_loss=0.03577, over 972245.82 frames.], batch size: 25, lr: 2.57e-04 +2022-05-06 07:00:51,655 INFO [train.py:715] (7/8) Epoch 8, batch 25600, loss[loss=0.174, simple_loss=0.2202, pruned_loss=0.06388, over 4828.00 frames.], tot_loss[loss=0.144, simple_loss=0.2164, pruned_loss=0.03576, over 972462.30 frames.], batch size: 12, lr: 2.57e-04 +2022-05-06 07:01:30,739 INFO [train.py:715] (7/8) Epoch 8, batch 25650, loss[loss=0.1321, simple_loss=0.2074, pruned_loss=0.02841, over 4790.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2158, pruned_loss=0.03569, over 972169.77 frames.], batch size: 17, lr: 2.57e-04 +2022-05-06 07:02:09,697 INFO [train.py:715] (7/8) Epoch 8, batch 25700, loss[loss=0.1445, simple_loss=0.2091, pruned_loss=0.03991, over 4884.00 frames.], tot_loss[loss=0.144, simple_loss=0.2162, pruned_loss=0.0359, over 972855.60 frames.], batch size: 22, lr: 2.57e-04 +2022-05-06 07:02:48,865 INFO [train.py:715] (7/8) Epoch 8, batch 25750, loss[loss=0.13, simple_loss=0.2037, pruned_loss=0.02813, over 4933.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2165, pruned_loss=0.03601, over 973645.78 frames.], batch size: 21, lr: 2.57e-04 +2022-05-06 07:03:27,689 INFO [train.py:715] (7/8) Epoch 8, batch 25800, loss[loss=0.1458, simple_loss=0.2186, pruned_loss=0.03653, over 4775.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2158, pruned_loss=0.03574, over 974072.67 frames.], batch size: 14, lr: 2.57e-04 +2022-05-06 07:04:06,655 INFO [train.py:715] (7/8) Epoch 8, batch 25850, loss[loss=0.142, simple_loss=0.2186, pruned_loss=0.03266, over 4975.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2156, pruned_loss=0.0354, over 973597.39 frames.], batch size: 14, lr: 2.57e-04 +2022-05-06 07:04:45,940 INFO [train.py:715] (7/8) Epoch 8, batch 25900, loss[loss=0.1154, simple_loss=0.1954, pruned_loss=0.01771, over 4759.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2156, pruned_loss=0.03534, over 973730.53 frames.], batch size: 18, lr: 2.57e-04 +2022-05-06 07:05:24,610 INFO [train.py:715] (7/8) Epoch 8, batch 25950, loss[loss=0.1256, simple_loss=0.1911, pruned_loss=0.03008, over 4783.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2164, pruned_loss=0.0362, over 973729.16 frames.], batch size: 17, lr: 2.57e-04 +2022-05-06 07:06:03,742 INFO [train.py:715] (7/8) Epoch 8, batch 26000, loss[loss=0.1303, simple_loss=0.2064, pruned_loss=0.02707, over 4778.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2162, pruned_loss=0.03609, over 972987.62 frames.], batch size: 18, lr: 2.57e-04 +2022-05-06 07:06:42,909 INFO [train.py:715] (7/8) Epoch 8, batch 26050, loss[loss=0.1445, simple_loss=0.2179, pruned_loss=0.03558, over 4775.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.03553, over 972902.11 frames.], batch size: 17, lr: 2.57e-04 +2022-05-06 07:07:21,669 INFO [train.py:715] (7/8) Epoch 8, batch 26100, loss[loss=0.1292, simple_loss=0.1945, pruned_loss=0.032, over 4966.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2145, pruned_loss=0.03501, over 972808.09 frames.], batch size: 15, lr: 2.57e-04 +2022-05-06 07:08:01,301 INFO [train.py:715] (7/8) Epoch 8, batch 26150, loss[loss=0.1446, simple_loss=0.2261, pruned_loss=0.03151, over 4793.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2144, pruned_loss=0.03472, over 973136.13 frames.], batch size: 24, lr: 2.57e-04 +2022-05-06 07:08:40,494 INFO [train.py:715] (7/8) Epoch 8, batch 26200, loss[loss=0.1551, simple_loss=0.2301, pruned_loss=0.04007, over 4811.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2138, pruned_loss=0.0343, over 972709.89 frames.], batch size: 15, lr: 2.57e-04 +2022-05-06 07:09:19,623 INFO [train.py:715] (7/8) Epoch 8, batch 26250, loss[loss=0.147, simple_loss=0.2342, pruned_loss=0.02989, over 4884.00 frames.], tot_loss[loss=0.142, simple_loss=0.2143, pruned_loss=0.03481, over 972752.58 frames.], batch size: 22, lr: 2.57e-04 +2022-05-06 07:09:57,937 INFO [train.py:715] (7/8) Epoch 8, batch 26300, loss[loss=0.1647, simple_loss=0.2269, pruned_loss=0.05124, over 4819.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2148, pruned_loss=0.03519, over 973096.93 frames.], batch size: 26, lr: 2.57e-04 +2022-05-06 07:10:37,573 INFO [train.py:715] (7/8) Epoch 8, batch 26350, loss[loss=0.147, simple_loss=0.223, pruned_loss=0.03556, over 4969.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2158, pruned_loss=0.03582, over 973725.22 frames.], batch size: 15, lr: 2.57e-04 +2022-05-06 07:11:16,888 INFO [train.py:715] (7/8) Epoch 8, batch 26400, loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.02997, over 4979.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2169, pruned_loss=0.03601, over 973627.68 frames.], batch size: 25, lr: 2.57e-04 +2022-05-06 07:11:55,839 INFO [train.py:715] (7/8) Epoch 8, batch 26450, loss[loss=0.1533, simple_loss=0.2358, pruned_loss=0.03541, over 4935.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2167, pruned_loss=0.03574, over 973335.66 frames.], batch size: 21, lr: 2.57e-04 +2022-05-06 07:12:34,674 INFO [train.py:715] (7/8) Epoch 8, batch 26500, loss[loss=0.1711, simple_loss=0.237, pruned_loss=0.05259, over 4945.00 frames.], tot_loss[loss=0.1444, simple_loss=0.217, pruned_loss=0.03591, over 972886.80 frames.], batch size: 39, lr: 2.57e-04 +2022-05-06 07:13:13,275 INFO [train.py:715] (7/8) Epoch 8, batch 26550, loss[loss=0.1465, simple_loss=0.2155, pruned_loss=0.03878, over 4766.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2175, pruned_loss=0.03567, over 972524.78 frames.], batch size: 19, lr: 2.57e-04 +2022-05-06 07:13:52,658 INFO [train.py:715] (7/8) Epoch 8, batch 26600, loss[loss=0.1483, simple_loss=0.2242, pruned_loss=0.03616, over 4926.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2172, pruned_loss=0.03586, over 972065.14 frames.], batch size: 29, lr: 2.57e-04 +2022-05-06 07:14:30,716 INFO [train.py:715] (7/8) Epoch 8, batch 26650, loss[loss=0.1276, simple_loss=0.2073, pruned_loss=0.02391, over 4891.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2168, pruned_loss=0.03549, over 971900.66 frames.], batch size: 22, lr: 2.57e-04 +2022-05-06 07:15:10,079 INFO [train.py:715] (7/8) Epoch 8, batch 26700, loss[loss=0.1906, simple_loss=0.2426, pruned_loss=0.06927, over 4955.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2169, pruned_loss=0.03586, over 972295.19 frames.], batch size: 21, lr: 2.57e-04 +2022-05-06 07:15:49,151 INFO [train.py:715] (7/8) Epoch 8, batch 26750, loss[loss=0.1659, simple_loss=0.2463, pruned_loss=0.04273, over 4938.00 frames.], tot_loss[loss=0.144, simple_loss=0.2169, pruned_loss=0.03559, over 971845.43 frames.], batch size: 23, lr: 2.57e-04 +2022-05-06 07:16:27,932 INFO [train.py:715] (7/8) Epoch 8, batch 26800, loss[loss=0.1282, simple_loss=0.2032, pruned_loss=0.02659, over 4922.00 frames.], tot_loss[loss=0.144, simple_loss=0.217, pruned_loss=0.03554, over 971805.09 frames.], batch size: 29, lr: 2.57e-04 +2022-05-06 07:17:07,167 INFO [train.py:715] (7/8) Epoch 8, batch 26850, loss[loss=0.1498, simple_loss=0.2223, pruned_loss=0.03869, over 4844.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2165, pruned_loss=0.03566, over 971871.43 frames.], batch size: 32, lr: 2.57e-04 +2022-05-06 07:17:46,415 INFO [train.py:715] (7/8) Epoch 8, batch 26900, loss[loss=0.1328, simple_loss=0.2085, pruned_loss=0.02853, over 4755.00 frames.], tot_loss[loss=0.143, simple_loss=0.2158, pruned_loss=0.03511, over 971881.26 frames.], batch size: 16, lr: 2.57e-04 +2022-05-06 07:18:25,466 INFO [train.py:715] (7/8) Epoch 8, batch 26950, loss[loss=0.1244, simple_loss=0.1938, pruned_loss=0.02744, over 4813.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2163, pruned_loss=0.03562, over 971769.64 frames.], batch size: 13, lr: 2.57e-04 +2022-05-06 07:19:04,352 INFO [train.py:715] (7/8) Epoch 8, batch 27000, loss[loss=0.1416, simple_loss=0.2116, pruned_loss=0.03577, over 4822.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.03591, over 971568.48 frames.], batch size: 21, lr: 2.57e-04 +2022-05-06 07:19:04,353 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 07:19:13,680 INFO [train.py:742] (7/8) Epoch 8, validation: loss=0.1072, simple_loss=0.1919, pruned_loss=0.01129, over 914524.00 frames. +2022-05-06 07:19:52,527 INFO [train.py:715] (7/8) Epoch 8, batch 27050, loss[loss=0.1281, simple_loss=0.2025, pruned_loss=0.02683, over 4938.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2159, pruned_loss=0.03575, over 972642.53 frames.], batch size: 21, lr: 2.57e-04 +2022-05-06 07:20:31,871 INFO [train.py:715] (7/8) Epoch 8, batch 27100, loss[loss=0.1443, simple_loss=0.2104, pruned_loss=0.03904, over 4752.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2156, pruned_loss=0.03554, over 972888.66 frames.], batch size: 16, lr: 2.57e-04 +2022-05-06 07:21:10,971 INFO [train.py:715] (7/8) Epoch 8, batch 27150, loss[loss=0.1434, simple_loss=0.2147, pruned_loss=0.03603, over 4916.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2159, pruned_loss=0.03578, over 971860.43 frames.], batch size: 23, lr: 2.57e-04 +2022-05-06 07:21:49,181 INFO [train.py:715] (7/8) Epoch 8, batch 27200, loss[loss=0.1381, simple_loss=0.2084, pruned_loss=0.0339, over 4789.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2164, pruned_loss=0.03619, over 972901.93 frames.], batch size: 14, lr: 2.57e-04 +2022-05-06 07:22:28,511 INFO [train.py:715] (7/8) Epoch 8, batch 27250, loss[loss=0.1727, simple_loss=0.2381, pruned_loss=0.0536, over 4859.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2165, pruned_loss=0.03593, over 972787.07 frames.], batch size: 32, lr: 2.57e-04 +2022-05-06 07:23:07,827 INFO [train.py:715] (7/8) Epoch 8, batch 27300, loss[loss=0.1474, simple_loss=0.2264, pruned_loss=0.0342, over 4816.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2169, pruned_loss=0.03576, over 973181.50 frames.], batch size: 26, lr: 2.57e-04 +2022-05-06 07:23:46,494 INFO [train.py:715] (7/8) Epoch 8, batch 27350, loss[loss=0.132, simple_loss=0.2038, pruned_loss=0.03012, over 4912.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2164, pruned_loss=0.03574, over 972525.19 frames.], batch size: 29, lr: 2.57e-04 +2022-05-06 07:24:25,183 INFO [train.py:715] (7/8) Epoch 8, batch 27400, loss[loss=0.1455, simple_loss=0.2176, pruned_loss=0.03668, over 4792.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2166, pruned_loss=0.03593, over 972568.92 frames.], batch size: 21, lr: 2.56e-04 +2022-05-06 07:25:04,325 INFO [train.py:715] (7/8) Epoch 8, batch 27450, loss[loss=0.1374, simple_loss=0.2134, pruned_loss=0.03073, over 4927.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2174, pruned_loss=0.03653, over 973591.24 frames.], batch size: 29, lr: 2.56e-04 +2022-05-06 07:25:43,019 INFO [train.py:715] (7/8) Epoch 8, batch 27500, loss[loss=0.122, simple_loss=0.1994, pruned_loss=0.02231, over 4970.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2162, pruned_loss=0.03599, over 974025.89 frames.], batch size: 35, lr: 2.56e-04 +2022-05-06 07:26:21,673 INFO [train.py:715] (7/8) Epoch 8, batch 27550, loss[loss=0.1631, simple_loss=0.2301, pruned_loss=0.04802, over 4911.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2162, pruned_loss=0.03607, over 974548.52 frames.], batch size: 39, lr: 2.56e-04 +2022-05-06 07:27:01,334 INFO [train.py:715] (7/8) Epoch 8, batch 27600, loss[loss=0.1453, simple_loss=0.2165, pruned_loss=0.03707, over 4884.00 frames.], tot_loss[loss=0.1441, simple_loss=0.216, pruned_loss=0.0361, over 974116.49 frames.], batch size: 16, lr: 2.56e-04 +2022-05-06 07:27:40,426 INFO [train.py:715] (7/8) Epoch 8, batch 27650, loss[loss=0.13, simple_loss=0.2022, pruned_loss=0.02896, over 4806.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2164, pruned_loss=0.03619, over 973524.29 frames.], batch size: 25, lr: 2.56e-04 +2022-05-06 07:28:19,093 INFO [train.py:715] (7/8) Epoch 8, batch 27700, loss[loss=0.1477, simple_loss=0.2174, pruned_loss=0.03904, over 4991.00 frames.], tot_loss[loss=0.1444, simple_loss=0.216, pruned_loss=0.0364, over 972826.55 frames.], batch size: 20, lr: 2.56e-04 +2022-05-06 07:28:58,325 INFO [train.py:715] (7/8) Epoch 8, batch 27750, loss[loss=0.1327, simple_loss=0.2104, pruned_loss=0.02753, over 4822.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2162, pruned_loss=0.03651, over 973010.90 frames.], batch size: 26, lr: 2.56e-04 +2022-05-06 07:29:38,021 INFO [train.py:715] (7/8) Epoch 8, batch 27800, loss[loss=0.1389, simple_loss=0.2116, pruned_loss=0.0331, over 4888.00 frames.], tot_loss[loss=0.1433, simple_loss=0.215, pruned_loss=0.03582, over 972535.72 frames.], batch size: 22, lr: 2.56e-04 +2022-05-06 07:30:16,795 INFO [train.py:715] (7/8) Epoch 8, batch 27850, loss[loss=0.143, simple_loss=0.2157, pruned_loss=0.03515, over 4789.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2158, pruned_loss=0.03606, over 972279.74 frames.], batch size: 18, lr: 2.56e-04 +2022-05-06 07:30:54,916 INFO [train.py:715] (7/8) Epoch 8, batch 27900, loss[loss=0.122, simple_loss=0.1927, pruned_loss=0.02565, over 4960.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2153, pruned_loss=0.03582, over 972078.57 frames.], batch size: 15, lr: 2.56e-04 +2022-05-06 07:31:34,149 INFO [train.py:715] (7/8) Epoch 8, batch 27950, loss[loss=0.1622, simple_loss=0.2282, pruned_loss=0.04815, over 4751.00 frames.], tot_loss[loss=0.1442, simple_loss=0.216, pruned_loss=0.03617, over 971942.84 frames.], batch size: 16, lr: 2.56e-04 +2022-05-06 07:32:13,475 INFO [train.py:715] (7/8) Epoch 8, batch 28000, loss[loss=0.1382, simple_loss=0.2111, pruned_loss=0.03269, over 4950.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2159, pruned_loss=0.03639, over 971843.19 frames.], batch size: 39, lr: 2.56e-04 +2022-05-06 07:32:51,691 INFO [train.py:715] (7/8) Epoch 8, batch 28050, loss[loss=0.1662, simple_loss=0.2233, pruned_loss=0.05456, over 4785.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2156, pruned_loss=0.03629, over 972353.47 frames.], batch size: 12, lr: 2.56e-04 +2022-05-06 07:33:31,446 INFO [train.py:715] (7/8) Epoch 8, batch 28100, loss[loss=0.1242, simple_loss=0.1947, pruned_loss=0.02683, over 4868.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2156, pruned_loss=0.03591, over 972341.94 frames.], batch size: 16, lr: 2.56e-04 +2022-05-06 07:34:10,513 INFO [train.py:715] (7/8) Epoch 8, batch 28150, loss[loss=0.1313, simple_loss=0.1972, pruned_loss=0.03269, over 4813.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2158, pruned_loss=0.03561, over 972684.13 frames.], batch size: 13, lr: 2.56e-04 +2022-05-06 07:34:49,972 INFO [train.py:715] (7/8) Epoch 8, batch 28200, loss[loss=0.1435, simple_loss=0.2101, pruned_loss=0.03844, over 4911.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2149, pruned_loss=0.03521, over 972459.30 frames.], batch size: 29, lr: 2.56e-04 +2022-05-06 07:35:29,404 INFO [train.py:715] (7/8) Epoch 8, batch 28250, loss[loss=0.1263, simple_loss=0.2052, pruned_loss=0.02366, over 4818.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2152, pruned_loss=0.03528, over 972964.61 frames.], batch size: 13, lr: 2.56e-04 +2022-05-06 07:36:09,673 INFO [train.py:715] (7/8) Epoch 8, batch 28300, loss[loss=0.1456, simple_loss=0.2149, pruned_loss=0.0381, over 4815.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2153, pruned_loss=0.03551, over 972057.02 frames.], batch size: 26, lr: 2.56e-04 +2022-05-06 07:36:49,590 INFO [train.py:715] (7/8) Epoch 8, batch 28350, loss[loss=0.1202, simple_loss=0.1981, pruned_loss=0.02108, over 4907.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2145, pruned_loss=0.03481, over 972181.72 frames.], batch size: 19, lr: 2.56e-04 +2022-05-06 07:37:28,936 INFO [train.py:715] (7/8) Epoch 8, batch 28400, loss[loss=0.1437, simple_loss=0.2261, pruned_loss=0.03062, over 4822.00 frames.], tot_loss[loss=0.143, simple_loss=0.2151, pruned_loss=0.03541, over 972519.50 frames.], batch size: 15, lr: 2.56e-04 +2022-05-06 07:38:08,998 INFO [train.py:715] (7/8) Epoch 8, batch 28450, loss[loss=0.1642, simple_loss=0.2245, pruned_loss=0.052, over 4828.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.0353, over 972012.15 frames.], batch size: 13, lr: 2.56e-04 +2022-05-06 07:38:48,160 INFO [train.py:715] (7/8) Epoch 8, batch 28500, loss[loss=0.1565, simple_loss=0.2315, pruned_loss=0.04071, over 4803.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.03537, over 971604.66 frames.], batch size: 21, lr: 2.56e-04 +2022-05-06 07:39:26,866 INFO [train.py:715] (7/8) Epoch 8, batch 28550, loss[loss=0.1483, simple_loss=0.2145, pruned_loss=0.04105, over 4867.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2152, pruned_loss=0.03506, over 972382.59 frames.], batch size: 20, lr: 2.56e-04 +2022-05-06 07:40:05,726 INFO [train.py:715] (7/8) Epoch 8, batch 28600, loss[loss=0.1587, simple_loss=0.2223, pruned_loss=0.04756, over 4983.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2155, pruned_loss=0.0348, over 972689.01 frames.], batch size: 14, lr: 2.56e-04 +2022-05-06 07:40:45,400 INFO [train.py:715] (7/8) Epoch 8, batch 28650, loss[loss=0.147, simple_loss=0.2142, pruned_loss=0.03984, over 4873.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03529, over 971280.18 frames.], batch size: 16, lr: 2.56e-04 +2022-05-06 07:41:24,253 INFO [train.py:715] (7/8) Epoch 8, batch 28700, loss[loss=0.1386, simple_loss=0.2107, pruned_loss=0.03319, over 4893.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03561, over 970559.43 frames.], batch size: 19, lr: 2.56e-04 +2022-05-06 07:42:02,604 INFO [train.py:715] (7/8) Epoch 8, batch 28750, loss[loss=0.1597, simple_loss=0.231, pruned_loss=0.04425, over 4912.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2157, pruned_loss=0.03528, over 971329.97 frames.], batch size: 19, lr: 2.56e-04 +2022-05-06 07:42:42,144 INFO [train.py:715] (7/8) Epoch 8, batch 28800, loss[loss=0.1548, simple_loss=0.2303, pruned_loss=0.03972, over 4780.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2158, pruned_loss=0.03526, over 972135.12 frames.], batch size: 18, lr: 2.56e-04 +2022-05-06 07:43:21,539 INFO [train.py:715] (7/8) Epoch 8, batch 28850, loss[loss=0.1375, simple_loss=0.212, pruned_loss=0.0315, over 4920.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2159, pruned_loss=0.03564, over 972447.79 frames.], batch size: 39, lr: 2.56e-04 +2022-05-06 07:44:00,550 INFO [train.py:715] (7/8) Epoch 8, batch 28900, loss[loss=0.1494, simple_loss=0.2343, pruned_loss=0.03223, over 4771.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2167, pruned_loss=0.0358, over 972069.29 frames.], batch size: 18, lr: 2.56e-04 +2022-05-06 07:44:39,171 INFO [train.py:715] (7/8) Epoch 8, batch 28950, loss[loss=0.1415, simple_loss=0.2213, pruned_loss=0.0308, over 4806.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2156, pruned_loss=0.03506, over 972797.78 frames.], batch size: 25, lr: 2.56e-04 +2022-05-06 07:45:18,518 INFO [train.py:715] (7/8) Epoch 8, batch 29000, loss[loss=0.1296, simple_loss=0.2121, pruned_loss=0.02349, over 4947.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2147, pruned_loss=0.03483, over 972715.95 frames.], batch size: 29, lr: 2.56e-04 +2022-05-06 07:45:57,179 INFO [train.py:715] (7/8) Epoch 8, batch 29050, loss[loss=0.1443, simple_loss=0.2159, pruned_loss=0.0363, over 4860.00 frames.], tot_loss[loss=0.143, simple_loss=0.2155, pruned_loss=0.03528, over 972083.34 frames.], batch size: 38, lr: 2.56e-04 +2022-05-06 07:46:36,418 INFO [train.py:715] (7/8) Epoch 8, batch 29100, loss[loss=0.1337, simple_loss=0.2152, pruned_loss=0.02611, over 4853.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2153, pruned_loss=0.03503, over 971406.57 frames.], batch size: 13, lr: 2.56e-04 +2022-05-06 07:47:14,943 INFO [train.py:715] (7/8) Epoch 8, batch 29150, loss[loss=0.1063, simple_loss=0.1797, pruned_loss=0.01648, over 4747.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2154, pruned_loss=0.03492, over 971190.14 frames.], batch size: 12, lr: 2.56e-04 +2022-05-06 07:47:54,244 INFO [train.py:715] (7/8) Epoch 8, batch 29200, loss[loss=0.1509, simple_loss=0.2227, pruned_loss=0.03953, over 4746.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2161, pruned_loss=0.03575, over 971024.07 frames.], batch size: 16, lr: 2.56e-04 +2022-05-06 07:48:32,868 INFO [train.py:715] (7/8) Epoch 8, batch 29250, loss[loss=0.1434, simple_loss=0.2191, pruned_loss=0.03391, over 4977.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2167, pruned_loss=0.03602, over 970866.77 frames.], batch size: 35, lr: 2.56e-04 +2022-05-06 07:49:11,138 INFO [train.py:715] (7/8) Epoch 8, batch 29300, loss[loss=0.1644, simple_loss=0.2456, pruned_loss=0.04157, over 4993.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2169, pruned_loss=0.03587, over 971357.86 frames.], batch size: 16, lr: 2.56e-04 +2022-05-06 07:49:50,323 INFO [train.py:715] (7/8) Epoch 8, batch 29350, loss[loss=0.1311, simple_loss=0.2011, pruned_loss=0.03052, over 4845.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2156, pruned_loss=0.03547, over 971801.03 frames.], batch size: 13, lr: 2.56e-04 +2022-05-06 07:50:29,152 INFO [train.py:715] (7/8) Epoch 8, batch 29400, loss[loss=0.1443, simple_loss=0.2243, pruned_loss=0.0322, over 4985.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2153, pruned_loss=0.03554, over 973226.33 frames.], batch size: 28, lr: 2.56e-04 +2022-05-06 07:51:08,798 INFO [train.py:715] (7/8) Epoch 8, batch 29450, loss[loss=0.1556, simple_loss=0.2164, pruned_loss=0.04736, over 4742.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2156, pruned_loss=0.03597, over 973532.70 frames.], batch size: 16, lr: 2.56e-04 +2022-05-06 07:51:48,081 INFO [train.py:715] (7/8) Epoch 8, batch 29500, loss[loss=0.1724, simple_loss=0.2479, pruned_loss=0.04845, over 4970.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2159, pruned_loss=0.03598, over 972118.53 frames.], batch size: 39, lr: 2.56e-04 +2022-05-06 07:52:27,550 INFO [train.py:715] (7/8) Epoch 8, batch 29550, loss[loss=0.1369, simple_loss=0.2113, pruned_loss=0.03123, over 4947.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2154, pruned_loss=0.0359, over 971712.36 frames.], batch size: 21, lr: 2.56e-04 +2022-05-06 07:53:06,116 INFO [train.py:715] (7/8) Epoch 8, batch 29600, loss[loss=0.152, simple_loss=0.2143, pruned_loss=0.04485, over 4877.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2162, pruned_loss=0.03595, over 972167.72 frames.], batch size: 22, lr: 2.56e-04 +2022-05-06 07:53:45,383 INFO [train.py:715] (7/8) Epoch 8, batch 29650, loss[loss=0.1929, simple_loss=0.2605, pruned_loss=0.06267, over 4871.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2162, pruned_loss=0.03634, over 972414.49 frames.], batch size: 16, lr: 2.56e-04 +2022-05-06 07:54:24,985 INFO [train.py:715] (7/8) Epoch 8, batch 29700, loss[loss=0.1298, simple_loss=0.1981, pruned_loss=0.0308, over 4912.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2158, pruned_loss=0.03598, over 972454.73 frames.], batch size: 22, lr: 2.56e-04 +2022-05-06 07:55:03,541 INFO [train.py:715] (7/8) Epoch 8, batch 29750, loss[loss=0.1655, simple_loss=0.2348, pruned_loss=0.04812, over 4895.00 frames.], tot_loss[loss=0.144, simple_loss=0.2161, pruned_loss=0.03597, over 972729.23 frames.], batch size: 19, lr: 2.56e-04 +2022-05-06 07:55:42,378 INFO [train.py:715] (7/8) Epoch 8, batch 29800, loss[loss=0.146, simple_loss=0.2239, pruned_loss=0.03405, over 4799.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2167, pruned_loss=0.03603, over 972747.89 frames.], batch size: 25, lr: 2.55e-04 +2022-05-06 07:56:21,284 INFO [train.py:715] (7/8) Epoch 8, batch 29850, loss[loss=0.1835, simple_loss=0.2495, pruned_loss=0.05879, over 4800.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2164, pruned_loss=0.03591, over 972015.19 frames.], batch size: 24, lr: 2.55e-04 +2022-05-06 07:57:00,654 INFO [train.py:715] (7/8) Epoch 8, batch 29900, loss[loss=0.1623, simple_loss=0.2347, pruned_loss=0.04493, over 4959.00 frames.], tot_loss[loss=0.1453, simple_loss=0.217, pruned_loss=0.0368, over 971982.79 frames.], batch size: 35, lr: 2.55e-04 +2022-05-06 07:57:39,546 INFO [train.py:715] (7/8) Epoch 8, batch 29950, loss[loss=0.1508, simple_loss=0.2194, pruned_loss=0.04107, over 4785.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2164, pruned_loss=0.03653, over 971673.91 frames.], batch size: 17, lr: 2.55e-04 +2022-05-06 07:58:18,656 INFO [train.py:715] (7/8) Epoch 8, batch 30000, loss[loss=0.161, simple_loss=0.2336, pruned_loss=0.04416, over 4887.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2169, pruned_loss=0.03673, over 971295.32 frames.], batch size: 19, lr: 2.55e-04 +2022-05-06 07:58:18,656 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 07:58:28,240 INFO [train.py:742] (7/8) Epoch 8, validation: loss=0.1073, simple_loss=0.1918, pruned_loss=0.01141, over 914524.00 frames. +2022-05-06 07:59:07,026 INFO [train.py:715] (7/8) Epoch 8, batch 30050, loss[loss=0.1655, simple_loss=0.2395, pruned_loss=0.04574, over 4933.00 frames.], tot_loss[loss=0.145, simple_loss=0.2169, pruned_loss=0.03654, over 971561.97 frames.], batch size: 39, lr: 2.55e-04 +2022-05-06 07:59:46,357 INFO [train.py:715] (7/8) Epoch 8, batch 30100, loss[loss=0.1429, simple_loss=0.2166, pruned_loss=0.03462, over 4765.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2161, pruned_loss=0.03647, over 972259.66 frames.], batch size: 14, lr: 2.55e-04 +2022-05-06 08:00:25,658 INFO [train.py:715] (7/8) Epoch 8, batch 30150, loss[loss=0.1362, simple_loss=0.1909, pruned_loss=0.04071, over 4832.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2165, pruned_loss=0.03653, over 972246.57 frames.], batch size: 15, lr: 2.55e-04 +2022-05-06 08:01:04,252 INFO [train.py:715] (7/8) Epoch 8, batch 30200, loss[loss=0.1666, simple_loss=0.2423, pruned_loss=0.04543, over 4905.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2161, pruned_loss=0.03615, over 972333.49 frames.], batch size: 17, lr: 2.55e-04 +2022-05-06 08:01:43,186 INFO [train.py:715] (7/8) Epoch 8, batch 30250, loss[loss=0.1418, simple_loss=0.22, pruned_loss=0.03181, over 4789.00 frames.], tot_loss[loss=0.1443, simple_loss=0.216, pruned_loss=0.03632, over 971834.28 frames.], batch size: 24, lr: 2.55e-04 +2022-05-06 08:02:22,870 INFO [train.py:715] (7/8) Epoch 8, batch 30300, loss[loss=0.1369, simple_loss=0.2074, pruned_loss=0.03317, over 4968.00 frames.], tot_loss[loss=0.1441, simple_loss=0.2157, pruned_loss=0.03621, over 972251.01 frames.], batch size: 35, lr: 2.55e-04 +2022-05-06 08:03:01,870 INFO [train.py:715] (7/8) Epoch 8, batch 30350, loss[loss=0.1321, simple_loss=0.1947, pruned_loss=0.03471, over 4845.00 frames.], tot_loss[loss=0.1448, simple_loss=0.2162, pruned_loss=0.03673, over 972075.43 frames.], batch size: 13, lr: 2.55e-04 +2022-05-06 08:03:40,564 INFO [train.py:715] (7/8) Epoch 8, batch 30400, loss[loss=0.141, simple_loss=0.218, pruned_loss=0.03197, over 4845.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2162, pruned_loss=0.03633, over 972635.86 frames.], batch size: 15, lr: 2.55e-04 +2022-05-06 08:04:19,870 INFO [train.py:715] (7/8) Epoch 8, batch 30450, loss[loss=0.1404, simple_loss=0.2167, pruned_loss=0.03208, over 4911.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2158, pruned_loss=0.03587, over 972895.58 frames.], batch size: 18, lr: 2.55e-04 +2022-05-06 08:04:58,852 INFO [train.py:715] (7/8) Epoch 8, batch 30500, loss[loss=0.1294, simple_loss=0.2068, pruned_loss=0.02597, over 4986.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2159, pruned_loss=0.03545, over 973155.37 frames.], batch size: 25, lr: 2.55e-04 +2022-05-06 08:05:37,499 INFO [train.py:715] (7/8) Epoch 8, batch 30550, loss[loss=0.1434, simple_loss=0.2102, pruned_loss=0.03833, over 4800.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2159, pruned_loss=0.03529, over 972631.10 frames.], batch size: 21, lr: 2.55e-04 +2022-05-06 08:06:16,537 INFO [train.py:715] (7/8) Epoch 8, batch 30600, loss[loss=0.1415, simple_loss=0.2164, pruned_loss=0.03329, over 4918.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.0353, over 972508.33 frames.], batch size: 17, lr: 2.55e-04 +2022-05-06 08:06:56,253 INFO [train.py:715] (7/8) Epoch 8, batch 30650, loss[loss=0.1568, simple_loss=0.2269, pruned_loss=0.04333, over 4776.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2148, pruned_loss=0.0349, over 972133.86 frames.], batch size: 17, lr: 2.55e-04 +2022-05-06 08:07:35,436 INFO [train.py:715] (7/8) Epoch 8, batch 30700, loss[loss=0.1356, simple_loss=0.2118, pruned_loss=0.02968, over 4798.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2143, pruned_loss=0.03454, over 971516.24 frames.], batch size: 24, lr: 2.55e-04 +2022-05-06 08:08:15,308 INFO [train.py:715] (7/8) Epoch 8, batch 30750, loss[loss=0.1602, simple_loss=0.2353, pruned_loss=0.04259, over 4896.00 frames.], tot_loss[loss=0.142, simple_loss=0.2148, pruned_loss=0.03455, over 972307.46 frames.], batch size: 19, lr: 2.55e-04 +2022-05-06 08:08:55,424 INFO [train.py:715] (7/8) Epoch 8, batch 30800, loss[loss=0.1184, simple_loss=0.1981, pruned_loss=0.01937, over 4977.00 frames.], tot_loss[loss=0.143, simple_loss=0.2159, pruned_loss=0.035, over 972195.69 frames.], batch size: 25, lr: 2.55e-04 +2022-05-06 08:09:33,883 INFO [train.py:715] (7/8) Epoch 8, batch 30850, loss[loss=0.1795, simple_loss=0.2524, pruned_loss=0.05333, over 4828.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2167, pruned_loss=0.03582, over 972339.22 frames.], batch size: 26, lr: 2.55e-04 +2022-05-06 08:10:12,784 INFO [train.py:715] (7/8) Epoch 8, batch 30900, loss[loss=0.1442, simple_loss=0.2044, pruned_loss=0.042, over 4885.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2156, pruned_loss=0.03531, over 973191.82 frames.], batch size: 19, lr: 2.55e-04 +2022-05-06 08:10:52,541 INFO [train.py:715] (7/8) Epoch 8, batch 30950, loss[loss=0.1385, simple_loss=0.2076, pruned_loss=0.03464, over 4961.00 frames.], tot_loss[loss=0.142, simple_loss=0.2145, pruned_loss=0.0347, over 973809.56 frames.], batch size: 15, lr: 2.55e-04 +2022-05-06 08:11:32,560 INFO [train.py:715] (7/8) Epoch 8, batch 31000, loss[loss=0.1893, simple_loss=0.2598, pruned_loss=0.05942, over 4980.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2156, pruned_loss=0.03499, over 973699.52 frames.], batch size: 39, lr: 2.55e-04 +2022-05-06 08:12:11,802 INFO [train.py:715] (7/8) Epoch 8, batch 31050, loss[loss=0.1256, simple_loss=0.1902, pruned_loss=0.0305, over 4859.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2154, pruned_loss=0.035, over 973430.92 frames.], batch size: 16, lr: 2.55e-04 +2022-05-06 08:12:51,406 INFO [train.py:715] (7/8) Epoch 8, batch 31100, loss[loss=0.1216, simple_loss=0.1982, pruned_loss=0.02251, over 4955.00 frames.], tot_loss[loss=0.1423, simple_loss=0.215, pruned_loss=0.03483, over 973305.39 frames.], batch size: 23, lr: 2.55e-04 +2022-05-06 08:13:30,938 INFO [train.py:715] (7/8) Epoch 8, batch 31150, loss[loss=0.1588, simple_loss=0.2235, pruned_loss=0.047, over 4735.00 frames.], tot_loss[loss=0.142, simple_loss=0.2148, pruned_loss=0.03466, over 973209.65 frames.], batch size: 16, lr: 2.55e-04 +2022-05-06 08:14:09,973 INFO [train.py:715] (7/8) Epoch 8, batch 31200, loss[loss=0.1269, simple_loss=0.2029, pruned_loss=0.02546, over 4921.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2138, pruned_loss=0.0345, over 972474.28 frames.], batch size: 23, lr: 2.55e-04 +2022-05-06 08:14:48,713 INFO [train.py:715] (7/8) Epoch 8, batch 31250, loss[loss=0.1734, simple_loss=0.242, pruned_loss=0.05241, over 4795.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2142, pruned_loss=0.03475, over 973115.00 frames.], batch size: 17, lr: 2.55e-04 +2022-05-06 08:15:28,186 INFO [train.py:715] (7/8) Epoch 8, batch 31300, loss[loss=0.1337, simple_loss=0.2122, pruned_loss=0.02762, over 4819.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.03508, over 973107.96 frames.], batch size: 13, lr: 2.55e-04 +2022-05-06 08:16:07,665 INFO [train.py:715] (7/8) Epoch 8, batch 31350, loss[loss=0.1475, simple_loss=0.2177, pruned_loss=0.03862, over 4947.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2151, pruned_loss=0.03513, over 973203.63 frames.], batch size: 35, lr: 2.55e-04 +2022-05-06 08:16:46,295 INFO [train.py:715] (7/8) Epoch 8, batch 31400, loss[loss=0.1346, simple_loss=0.1982, pruned_loss=0.0355, over 4813.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2146, pruned_loss=0.03511, over 973686.27 frames.], batch size: 12, lr: 2.55e-04 +2022-05-06 08:17:25,751 INFO [train.py:715] (7/8) Epoch 8, batch 31450, loss[loss=0.1316, simple_loss=0.2, pruned_loss=0.0316, over 4776.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2143, pruned_loss=0.03503, over 973483.35 frames.], batch size: 14, lr: 2.55e-04 +2022-05-06 08:18:05,874 INFO [train.py:715] (7/8) Epoch 8, batch 31500, loss[loss=0.1373, simple_loss=0.2076, pruned_loss=0.03344, over 4746.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2143, pruned_loss=0.03498, over 973483.73 frames.], batch size: 16, lr: 2.55e-04 +2022-05-06 08:18:45,119 INFO [train.py:715] (7/8) Epoch 8, batch 31550, loss[loss=0.1578, simple_loss=0.229, pruned_loss=0.04333, over 4876.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2143, pruned_loss=0.03492, over 973994.49 frames.], batch size: 38, lr: 2.55e-04 +2022-05-06 08:19:24,102 INFO [train.py:715] (7/8) Epoch 8, batch 31600, loss[loss=0.1419, simple_loss=0.204, pruned_loss=0.03993, over 4890.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2141, pruned_loss=0.03462, over 973809.23 frames.], batch size: 22, lr: 2.55e-04 +2022-05-06 08:20:03,755 INFO [train.py:715] (7/8) Epoch 8, batch 31650, loss[loss=0.1544, simple_loss=0.2145, pruned_loss=0.04715, over 4966.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2151, pruned_loss=0.03536, over 974437.08 frames.], batch size: 15, lr: 2.55e-04 +2022-05-06 08:20:43,077 INFO [train.py:715] (7/8) Epoch 8, batch 31700, loss[loss=0.1287, simple_loss=0.2016, pruned_loss=0.02793, over 4916.00 frames.], tot_loss[loss=0.142, simple_loss=0.2141, pruned_loss=0.03492, over 974706.35 frames.], batch size: 18, lr: 2.55e-04 +2022-05-06 08:21:22,757 INFO [train.py:715] (7/8) Epoch 8, batch 31750, loss[loss=0.2121, simple_loss=0.2946, pruned_loss=0.0648, over 4928.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2144, pruned_loss=0.03517, over 973771.27 frames.], batch size: 17, lr: 2.55e-04 +2022-05-06 08:22:01,969 INFO [train.py:715] (7/8) Epoch 8, batch 31800, loss[loss=0.1425, simple_loss=0.2195, pruned_loss=0.03272, over 4920.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2142, pruned_loss=0.03528, over 972846.78 frames.], batch size: 29, lr: 2.55e-04 +2022-05-06 08:22:41,013 INFO [train.py:715] (7/8) Epoch 8, batch 31850, loss[loss=0.1291, simple_loss=0.2066, pruned_loss=0.02582, over 4911.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2136, pruned_loss=0.03463, over 972290.06 frames.], batch size: 18, lr: 2.55e-04 +2022-05-06 08:23:19,922 INFO [train.py:715] (7/8) Epoch 8, batch 31900, loss[loss=0.1608, simple_loss=0.2444, pruned_loss=0.03861, over 4835.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2134, pruned_loss=0.03465, over 972746.15 frames.], batch size: 26, lr: 2.55e-04 +2022-05-06 08:23:58,319 INFO [train.py:715] (7/8) Epoch 8, batch 31950, loss[loss=0.1462, simple_loss=0.2178, pruned_loss=0.03728, over 4940.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2137, pruned_loss=0.03485, over 971362.44 frames.], batch size: 29, lr: 2.55e-04 +2022-05-06 08:24:37,607 INFO [train.py:715] (7/8) Epoch 8, batch 32000, loss[loss=0.1364, simple_loss=0.2105, pruned_loss=0.03115, over 4823.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2148, pruned_loss=0.03532, over 971909.51 frames.], batch size: 21, lr: 2.55e-04 +2022-05-06 08:25:17,171 INFO [train.py:715] (7/8) Epoch 8, batch 32050, loss[loss=0.164, simple_loss=0.2173, pruned_loss=0.05538, over 4976.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2151, pruned_loss=0.03563, over 972279.94 frames.], batch size: 35, lr: 2.55e-04 +2022-05-06 08:25:55,737 INFO [train.py:715] (7/8) Epoch 8, batch 32100, loss[loss=0.1486, simple_loss=0.2257, pruned_loss=0.03578, over 4836.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2157, pruned_loss=0.03581, over 972503.70 frames.], batch size: 15, lr: 2.55e-04 +2022-05-06 08:26:34,469 INFO [train.py:715] (7/8) Epoch 8, batch 32150, loss[loss=0.1485, simple_loss=0.2176, pruned_loss=0.03965, over 4745.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2156, pruned_loss=0.03573, over 972888.60 frames.], batch size: 19, lr: 2.55e-04 +2022-05-06 08:27:14,043 INFO [train.py:715] (7/8) Epoch 8, batch 32200, loss[loss=0.1415, simple_loss=0.2156, pruned_loss=0.03375, over 4734.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2159, pruned_loss=0.0358, over 972204.54 frames.], batch size: 16, lr: 2.54e-04 +2022-05-06 08:27:52,862 INFO [train.py:715] (7/8) Epoch 8, batch 32250, loss[loss=0.1539, simple_loss=0.2303, pruned_loss=0.03879, over 4819.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2158, pruned_loss=0.03597, over 971832.04 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:28:32,332 INFO [train.py:715] (7/8) Epoch 8, batch 32300, loss[loss=0.1411, simple_loss=0.2035, pruned_loss=0.03935, over 4899.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2145, pruned_loss=0.03564, over 972724.04 frames.], batch size: 17, lr: 2.54e-04 +2022-05-06 08:29:11,540 INFO [train.py:715] (7/8) Epoch 8, batch 32350, loss[loss=0.1336, simple_loss=0.2049, pruned_loss=0.0311, over 4906.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2156, pruned_loss=0.03635, over 972559.05 frames.], batch size: 19, lr: 2.54e-04 +2022-05-06 08:29:51,457 INFO [train.py:715] (7/8) Epoch 8, batch 32400, loss[loss=0.167, simple_loss=0.2317, pruned_loss=0.05114, over 4833.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2161, pruned_loss=0.03638, over 972213.79 frames.], batch size: 30, lr: 2.54e-04 +2022-05-06 08:30:30,384 INFO [train.py:715] (7/8) Epoch 8, batch 32450, loss[loss=0.1191, simple_loss=0.1927, pruned_loss=0.0228, over 4900.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2162, pruned_loss=0.03644, over 971596.73 frames.], batch size: 22, lr: 2.54e-04 +2022-05-06 08:31:09,405 INFO [train.py:715] (7/8) Epoch 8, batch 32500, loss[loss=0.1759, simple_loss=0.2604, pruned_loss=0.04571, over 4820.00 frames.], tot_loss[loss=0.143, simple_loss=0.2153, pruned_loss=0.03535, over 971094.52 frames.], batch size: 27, lr: 2.54e-04 +2022-05-06 08:31:48,953 INFO [train.py:715] (7/8) Epoch 8, batch 32550, loss[loss=0.1091, simple_loss=0.1829, pruned_loss=0.01771, over 4897.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2147, pruned_loss=0.03519, over 970469.94 frames.], batch size: 19, lr: 2.54e-04 +2022-05-06 08:32:27,500 INFO [train.py:715] (7/8) Epoch 8, batch 32600, loss[loss=0.1666, simple_loss=0.2491, pruned_loss=0.042, over 4863.00 frames.], tot_loss[loss=0.1436, simple_loss=0.216, pruned_loss=0.03559, over 970905.92 frames.], batch size: 16, lr: 2.54e-04 +2022-05-06 08:33:06,728 INFO [train.py:715] (7/8) Epoch 8, batch 32650, loss[loss=0.1532, simple_loss=0.224, pruned_loss=0.04118, over 4746.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2162, pruned_loss=0.03606, over 970673.10 frames.], batch size: 16, lr: 2.54e-04 +2022-05-06 08:33:45,983 INFO [train.py:715] (7/8) Epoch 8, batch 32700, loss[loss=0.1288, simple_loss=0.1989, pruned_loss=0.02936, over 4941.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2153, pruned_loss=0.03515, over 971034.49 frames.], batch size: 29, lr: 2.54e-04 +2022-05-06 08:34:26,179 INFO [train.py:715] (7/8) Epoch 8, batch 32750, loss[loss=0.1425, simple_loss=0.2054, pruned_loss=0.03985, over 4989.00 frames.], tot_loss[loss=0.142, simple_loss=0.2149, pruned_loss=0.03459, over 972083.76 frames.], batch size: 16, lr: 2.54e-04 +2022-05-06 08:35:04,665 INFO [train.py:715] (7/8) Epoch 8, batch 32800, loss[loss=0.1313, simple_loss=0.2151, pruned_loss=0.02373, over 4854.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2154, pruned_loss=0.03509, over 972254.21 frames.], batch size: 13, lr: 2.54e-04 +2022-05-06 08:35:43,308 INFO [train.py:715] (7/8) Epoch 8, batch 32850, loss[loss=0.1192, simple_loss=0.1991, pruned_loss=0.01967, over 4740.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2153, pruned_loss=0.03515, over 971589.07 frames.], batch size: 19, lr: 2.54e-04 +2022-05-06 08:36:22,466 INFO [train.py:715] (7/8) Epoch 8, batch 32900, loss[loss=0.1568, simple_loss=0.2298, pruned_loss=0.04186, over 4785.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2152, pruned_loss=0.03511, over 971444.01 frames.], batch size: 14, lr: 2.54e-04 +2022-05-06 08:37:00,746 INFO [train.py:715] (7/8) Epoch 8, batch 32950, loss[loss=0.1387, simple_loss=0.2199, pruned_loss=0.02872, over 4968.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2148, pruned_loss=0.03491, over 972022.81 frames.], batch size: 15, lr: 2.54e-04 +2022-05-06 08:37:39,628 INFO [train.py:715] (7/8) Epoch 8, batch 33000, loss[loss=0.1446, simple_loss=0.2163, pruned_loss=0.03642, over 4802.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2148, pruned_loss=0.03519, over 971878.75 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:37:39,629 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 08:37:52,640 INFO [train.py:742] (7/8) Epoch 8, validation: loss=0.1071, simple_loss=0.1917, pruned_loss=0.01126, over 914524.00 frames. +2022-05-06 08:38:31,973 INFO [train.py:715] (7/8) Epoch 8, batch 33050, loss[loss=0.1323, simple_loss=0.2092, pruned_loss=0.02771, over 4874.00 frames.], tot_loss[loss=0.143, simple_loss=0.2151, pruned_loss=0.03544, over 972005.90 frames.], batch size: 19, lr: 2.54e-04 +2022-05-06 08:39:10,831 INFO [train.py:715] (7/8) Epoch 8, batch 33100, loss[loss=0.1464, simple_loss=0.2213, pruned_loss=0.03573, over 4797.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2152, pruned_loss=0.03523, over 972272.13 frames.], batch size: 12, lr: 2.54e-04 +2022-05-06 08:39:50,123 INFO [train.py:715] (7/8) Epoch 8, batch 33150, loss[loss=0.1297, simple_loss=0.2032, pruned_loss=0.02809, over 4780.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2161, pruned_loss=0.03572, over 972269.18 frames.], batch size: 12, lr: 2.54e-04 +2022-05-06 08:40:28,830 INFO [train.py:715] (7/8) Epoch 8, batch 33200, loss[loss=0.1181, simple_loss=0.1947, pruned_loss=0.02071, over 4977.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2149, pruned_loss=0.03545, over 972123.06 frames.], batch size: 28, lr: 2.54e-04 +2022-05-06 08:41:08,506 INFO [train.py:715] (7/8) Epoch 8, batch 33250, loss[loss=0.1526, simple_loss=0.225, pruned_loss=0.04012, over 4915.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2153, pruned_loss=0.03587, over 972526.44 frames.], batch size: 39, lr: 2.54e-04 +2022-05-06 08:41:48,102 INFO [train.py:715] (7/8) Epoch 8, batch 33300, loss[loss=0.1372, simple_loss=0.2167, pruned_loss=0.02885, over 4954.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2164, pruned_loss=0.03665, over 972020.46 frames.], batch size: 35, lr: 2.54e-04 +2022-05-06 08:42:26,899 INFO [train.py:715] (7/8) Epoch 8, batch 33350, loss[loss=0.1472, simple_loss=0.2178, pruned_loss=0.03833, over 4914.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2158, pruned_loss=0.03649, over 972445.61 frames.], batch size: 18, lr: 2.54e-04 +2022-05-06 08:43:06,263 INFO [train.py:715] (7/8) Epoch 8, batch 33400, loss[loss=0.1471, simple_loss=0.2167, pruned_loss=0.03868, over 4985.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2161, pruned_loss=0.03653, over 972782.40 frames.], batch size: 31, lr: 2.54e-04 +2022-05-06 08:43:45,175 INFO [train.py:715] (7/8) Epoch 8, batch 33450, loss[loss=0.1902, simple_loss=0.252, pruned_loss=0.0642, over 4688.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2167, pruned_loss=0.0366, over 972645.38 frames.], batch size: 15, lr: 2.54e-04 +2022-05-06 08:44:24,008 INFO [train.py:715] (7/8) Epoch 8, batch 33500, loss[loss=0.1424, simple_loss=0.2208, pruned_loss=0.03203, over 4973.00 frames.], tot_loss[loss=0.1446, simple_loss=0.2167, pruned_loss=0.03627, over 972847.28 frames.], batch size: 24, lr: 2.54e-04 +2022-05-06 08:45:05,011 INFO [train.py:715] (7/8) Epoch 8, batch 33550, loss[loss=0.1291, simple_loss=0.2099, pruned_loss=0.0241, over 4834.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2157, pruned_loss=0.03598, over 972758.22 frames.], batch size: 12, lr: 2.54e-04 +2022-05-06 08:45:44,466 INFO [train.py:715] (7/8) Epoch 8, batch 33600, loss[loss=0.1592, simple_loss=0.2238, pruned_loss=0.04728, over 4774.00 frames.], tot_loss[loss=0.1449, simple_loss=0.2163, pruned_loss=0.03672, over 972507.86 frames.], batch size: 12, lr: 2.54e-04 +2022-05-06 08:46:23,910 INFO [train.py:715] (7/8) Epoch 8, batch 33650, loss[loss=0.2089, simple_loss=0.2723, pruned_loss=0.07272, over 4761.00 frames.], tot_loss[loss=0.1445, simple_loss=0.2161, pruned_loss=0.03646, over 972584.68 frames.], batch size: 12, lr: 2.54e-04 +2022-05-06 08:47:02,975 INFO [train.py:715] (7/8) Epoch 8, batch 33700, loss[loss=0.1363, simple_loss=0.2178, pruned_loss=0.02745, over 4804.00 frames.], tot_loss[loss=0.1452, simple_loss=0.2168, pruned_loss=0.03684, over 973025.39 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:47:41,964 INFO [train.py:715] (7/8) Epoch 8, batch 33750, loss[loss=0.1898, simple_loss=0.2439, pruned_loss=0.06787, over 4826.00 frames.], tot_loss[loss=0.1443, simple_loss=0.216, pruned_loss=0.03635, over 973555.89 frames.], batch size: 30, lr: 2.54e-04 +2022-05-06 08:48:20,686 INFO [train.py:715] (7/8) Epoch 8, batch 33800, loss[loss=0.1393, simple_loss=0.2019, pruned_loss=0.03838, over 4743.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2158, pruned_loss=0.03653, over 972903.46 frames.], batch size: 16, lr: 2.54e-04 +2022-05-06 08:48:59,307 INFO [train.py:715] (7/8) Epoch 8, batch 33850, loss[loss=0.1241, simple_loss=0.1979, pruned_loss=0.02516, over 4846.00 frames.], tot_loss[loss=0.1442, simple_loss=0.2155, pruned_loss=0.03643, over 972545.19 frames.], batch size: 13, lr: 2.54e-04 +2022-05-06 08:49:38,118 INFO [train.py:715] (7/8) Epoch 8, batch 33900, loss[loss=0.1563, simple_loss=0.2318, pruned_loss=0.04039, over 4890.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2147, pruned_loss=0.03593, over 972828.71 frames.], batch size: 19, lr: 2.54e-04 +2022-05-06 08:50:17,037 INFO [train.py:715] (7/8) Epoch 8, batch 33950, loss[loss=0.1375, simple_loss=0.2172, pruned_loss=0.02886, over 4806.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2149, pruned_loss=0.03578, over 972736.89 frames.], batch size: 25, lr: 2.54e-04 +2022-05-06 08:50:56,634 INFO [train.py:715] (7/8) Epoch 8, batch 34000, loss[loss=0.1307, simple_loss=0.2065, pruned_loss=0.02745, over 4814.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2145, pruned_loss=0.03569, over 972703.73 frames.], batch size: 25, lr: 2.54e-04 +2022-05-06 08:51:35,549 INFO [train.py:715] (7/8) Epoch 8, batch 34050, loss[loss=0.1498, simple_loss=0.2189, pruned_loss=0.04034, over 4767.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2148, pruned_loss=0.03553, over 972693.65 frames.], batch size: 14, lr: 2.54e-04 +2022-05-06 08:52:14,818 INFO [train.py:715] (7/8) Epoch 8, batch 34100, loss[loss=0.1438, simple_loss=0.2112, pruned_loss=0.0382, over 4930.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2152, pruned_loss=0.03562, over 972583.40 frames.], batch size: 29, lr: 2.54e-04 +2022-05-06 08:52:53,782 INFO [train.py:715] (7/8) Epoch 8, batch 34150, loss[loss=0.1617, simple_loss=0.228, pruned_loss=0.04763, over 4893.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2152, pruned_loss=0.03584, over 972029.36 frames.], batch size: 17, lr: 2.54e-04 +2022-05-06 08:53:32,398 INFO [train.py:715] (7/8) Epoch 8, batch 34200, loss[loss=0.1444, simple_loss=0.2201, pruned_loss=0.03438, over 4816.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2145, pruned_loss=0.03563, over 971931.16 frames.], batch size: 25, lr: 2.54e-04 +2022-05-06 08:54:11,304 INFO [train.py:715] (7/8) Epoch 8, batch 34250, loss[loss=0.175, simple_loss=0.2331, pruned_loss=0.05845, over 4752.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2154, pruned_loss=0.03604, over 972869.39 frames.], batch size: 19, lr: 2.54e-04 +2022-05-06 08:54:50,275 INFO [train.py:715] (7/8) Epoch 8, batch 34300, loss[loss=0.1868, simple_loss=0.253, pruned_loss=0.06025, over 4874.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2154, pruned_loss=0.03556, over 972716.38 frames.], batch size: 32, lr: 2.54e-04 +2022-05-06 08:55:29,027 INFO [train.py:715] (7/8) Epoch 8, batch 34350, loss[loss=0.1612, simple_loss=0.2326, pruned_loss=0.04488, over 4808.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2154, pruned_loss=0.03538, over 972727.19 frames.], batch size: 21, lr: 2.54e-04 +2022-05-06 08:56:07,456 INFO [train.py:715] (7/8) Epoch 8, batch 34400, loss[loss=0.1316, simple_loss=0.2134, pruned_loss=0.02487, over 4960.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.0354, over 972215.59 frames.], batch size: 15, lr: 2.54e-04 +2022-05-06 08:56:46,677 INFO [train.py:715] (7/8) Epoch 8, batch 34450, loss[loss=0.1719, simple_loss=0.2447, pruned_loss=0.04954, over 4836.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2155, pruned_loss=0.0357, over 972029.06 frames.], batch size: 15, lr: 2.54e-04 +2022-05-06 08:57:26,050 INFO [train.py:715] (7/8) Epoch 8, batch 34500, loss[loss=0.1273, simple_loss=0.206, pruned_loss=0.02435, over 4949.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2158, pruned_loss=0.03538, over 972085.32 frames.], batch size: 24, lr: 2.54e-04 +2022-05-06 08:58:04,294 INFO [train.py:715] (7/8) Epoch 8, batch 34550, loss[loss=0.1568, simple_loss=0.2291, pruned_loss=0.04226, over 4971.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.0352, over 972404.17 frames.], batch size: 15, lr: 2.54e-04 +2022-05-06 08:58:42,926 INFO [train.py:715] (7/8) Epoch 8, batch 34600, loss[loss=0.146, simple_loss=0.229, pruned_loss=0.03153, over 4699.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2149, pruned_loss=0.03514, over 972406.23 frames.], batch size: 15, lr: 2.54e-04 +2022-05-06 08:59:21,848 INFO [train.py:715] (7/8) Epoch 8, batch 34650, loss[loss=0.1796, simple_loss=0.2467, pruned_loss=0.05629, over 4776.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2154, pruned_loss=0.03544, over 972118.09 frames.], batch size: 17, lr: 2.53e-04 +2022-05-06 09:00:01,505 INFO [train.py:715] (7/8) Epoch 8, batch 34700, loss[loss=0.1596, simple_loss=0.2279, pruned_loss=0.04566, over 4900.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2151, pruned_loss=0.03533, over 971789.05 frames.], batch size: 22, lr: 2.53e-04 +2022-05-06 09:00:38,663 INFO [train.py:715] (7/8) Epoch 8, batch 34750, loss[loss=0.1499, simple_loss=0.2135, pruned_loss=0.04313, over 4767.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.03529, over 971766.64 frames.], batch size: 12, lr: 2.53e-04 +2022-05-06 09:01:15,263 INFO [train.py:715] (7/8) Epoch 8, batch 34800, loss[loss=0.1419, simple_loss=0.1981, pruned_loss=0.04291, over 4800.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2139, pruned_loss=0.0352, over 970817.92 frames.], batch size: 12, lr: 2.53e-04 +2022-05-06 09:02:04,639 INFO [train.py:715] (7/8) Epoch 9, batch 0, loss[loss=0.1447, simple_loss=0.2158, pruned_loss=0.03681, over 4747.00 frames.], tot_loss[loss=0.1447, simple_loss=0.2158, pruned_loss=0.03681, over 4747.00 frames.], batch size: 16, lr: 2.42e-04 +2022-05-06 09:02:43,975 INFO [train.py:715] (7/8) Epoch 9, batch 50, loss[loss=0.1267, simple_loss=0.1997, pruned_loss=0.02684, over 4877.00 frames.], tot_loss[loss=0.1488, simple_loss=0.2191, pruned_loss=0.03927, over 220185.25 frames.], batch size: 22, lr: 2.41e-04 +2022-05-06 09:03:23,613 INFO [train.py:715] (7/8) Epoch 9, batch 100, loss[loss=0.1408, simple_loss=0.2215, pruned_loss=0.03002, over 4806.00 frames.], tot_loss[loss=0.1444, simple_loss=0.2161, pruned_loss=0.03635, over 386943.30 frames.], batch size: 25, lr: 2.41e-04 +2022-05-06 09:04:02,104 INFO [train.py:715] (7/8) Epoch 9, batch 150, loss[loss=0.1268, simple_loss=0.2036, pruned_loss=0.02497, over 4821.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2164, pruned_loss=0.03557, over 517202.25 frames.], batch size: 26, lr: 2.41e-04 +2022-05-06 09:04:42,542 INFO [train.py:715] (7/8) Epoch 9, batch 200, loss[loss=0.165, simple_loss=0.2507, pruned_loss=0.03959, over 4807.00 frames.], tot_loss[loss=0.1427, simple_loss=0.215, pruned_loss=0.0352, over 617950.76 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:05:21,807 INFO [train.py:715] (7/8) Epoch 9, batch 250, loss[loss=0.1389, simple_loss=0.206, pruned_loss=0.03592, over 4950.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2163, pruned_loss=0.03563, over 695729.44 frames.], batch size: 23, lr: 2.41e-04 +2022-05-06 09:06:01,097 INFO [train.py:715] (7/8) Epoch 9, batch 300, loss[loss=0.1503, simple_loss=0.2182, pruned_loss=0.04118, over 4819.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.0354, over 756326.93 frames.], batch size: 27, lr: 2.41e-04 +2022-05-06 09:06:40,656 INFO [train.py:715] (7/8) Epoch 9, batch 350, loss[loss=0.1365, simple_loss=0.2149, pruned_loss=0.02907, over 4899.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03529, over 803410.07 frames.], batch size: 23, lr: 2.41e-04 +2022-05-06 09:07:20,405 INFO [train.py:715] (7/8) Epoch 9, batch 400, loss[loss=0.1577, simple_loss=0.2357, pruned_loss=0.03988, over 4983.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2141, pruned_loss=0.03457, over 840723.86 frames.], batch size: 25, lr: 2.41e-04 +2022-05-06 09:07:59,737 INFO [train.py:715] (7/8) Epoch 9, batch 450, loss[loss=0.1449, simple_loss=0.215, pruned_loss=0.03745, over 4748.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.035, over 870144.52 frames.], batch size: 16, lr: 2.41e-04 +2022-05-06 09:08:38,888 INFO [train.py:715] (7/8) Epoch 9, batch 500, loss[loss=0.1367, simple_loss=0.2077, pruned_loss=0.03283, over 4806.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2144, pruned_loss=0.03474, over 892726.64 frames.], batch size: 25, lr: 2.41e-04 +2022-05-06 09:09:19,207 INFO [train.py:715] (7/8) Epoch 9, batch 550, loss[loss=0.1425, simple_loss=0.2085, pruned_loss=0.03828, over 4970.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2136, pruned_loss=0.03475, over 910881.05 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:09:58,808 INFO [train.py:715] (7/8) Epoch 9, batch 600, loss[loss=0.1255, simple_loss=0.1996, pruned_loss=0.02572, over 4816.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2143, pruned_loss=0.03498, over 925424.27 frames.], batch size: 24, lr: 2.41e-04 +2022-05-06 09:10:37,827 INFO [train.py:715] (7/8) Epoch 9, batch 650, loss[loss=0.1814, simple_loss=0.2518, pruned_loss=0.05552, over 4835.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2153, pruned_loss=0.03543, over 935961.87 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:11:16,919 INFO [train.py:715] (7/8) Epoch 9, batch 700, loss[loss=0.1467, simple_loss=0.2148, pruned_loss=0.03928, over 4829.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2144, pruned_loss=0.03487, over 944057.05 frames.], batch size: 27, lr: 2.41e-04 +2022-05-06 09:11:56,399 INFO [train.py:715] (7/8) Epoch 9, batch 750, loss[loss=0.1588, simple_loss=0.2167, pruned_loss=0.05046, over 4772.00 frames.], tot_loss[loss=0.143, simple_loss=0.2155, pruned_loss=0.03529, over 950024.05 frames.], batch size: 14, lr: 2.41e-04 +2022-05-06 09:12:35,542 INFO [train.py:715] (7/8) Epoch 9, batch 800, loss[loss=0.1187, simple_loss=0.1952, pruned_loss=0.02113, over 4817.00 frames.], tot_loss[loss=0.142, simple_loss=0.2142, pruned_loss=0.03483, over 954439.54 frames.], batch size: 27, lr: 2.41e-04 +2022-05-06 09:13:14,322 INFO [train.py:715] (7/8) Epoch 9, batch 850, loss[loss=0.1256, simple_loss=0.2, pruned_loss=0.02564, over 4929.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2135, pruned_loss=0.03455, over 958280.30 frames.], batch size: 29, lr: 2.41e-04 +2022-05-06 09:13:53,321 INFO [train.py:715] (7/8) Epoch 9, batch 900, loss[loss=0.1291, simple_loss=0.1999, pruned_loss=0.02911, over 4926.00 frames.], tot_loss[loss=0.141, simple_loss=0.213, pruned_loss=0.03446, over 961260.63 frames.], batch size: 29, lr: 2.41e-04 +2022-05-06 09:14:32,597 INFO [train.py:715] (7/8) Epoch 9, batch 950, loss[loss=0.1757, simple_loss=0.253, pruned_loss=0.04913, over 4897.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2127, pruned_loss=0.03448, over 964246.93 frames.], batch size: 17, lr: 2.41e-04 +2022-05-06 09:15:12,211 INFO [train.py:715] (7/8) Epoch 9, batch 1000, loss[loss=0.1375, simple_loss=0.2198, pruned_loss=0.02756, over 4946.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2131, pruned_loss=0.03471, over 966191.59 frames.], batch size: 29, lr: 2.41e-04 +2022-05-06 09:15:50,372 INFO [train.py:715] (7/8) Epoch 9, batch 1050, loss[loss=0.1613, simple_loss=0.2276, pruned_loss=0.04753, over 4886.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2129, pruned_loss=0.0347, over 966960.67 frames.], batch size: 19, lr: 2.41e-04 +2022-05-06 09:16:30,512 INFO [train.py:715] (7/8) Epoch 9, batch 1100, loss[loss=0.115, simple_loss=0.1838, pruned_loss=0.02312, over 4815.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2132, pruned_loss=0.03457, over 968023.79 frames.], batch size: 25, lr: 2.41e-04 +2022-05-06 09:17:10,346 INFO [train.py:715] (7/8) Epoch 9, batch 1150, loss[loss=0.1264, simple_loss=0.1978, pruned_loss=0.02754, over 4829.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2139, pruned_loss=0.03472, over 969404.95 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:17:49,484 INFO [train.py:715] (7/8) Epoch 9, batch 1200, loss[loss=0.1121, simple_loss=0.1915, pruned_loss=0.01632, over 4969.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2138, pruned_loss=0.03452, over 970269.50 frames.], batch size: 14, lr: 2.41e-04 +2022-05-06 09:18:28,821 INFO [train.py:715] (7/8) Epoch 9, batch 1250, loss[loss=0.1404, simple_loss=0.2136, pruned_loss=0.03363, over 4843.00 frames.], tot_loss[loss=0.1412, simple_loss=0.214, pruned_loss=0.03424, over 971298.49 frames.], batch size: 34, lr: 2.41e-04 +2022-05-06 09:19:08,561 INFO [train.py:715] (7/8) Epoch 9, batch 1300, loss[loss=0.1335, simple_loss=0.2085, pruned_loss=0.02926, over 4982.00 frames.], tot_loss[loss=0.141, simple_loss=0.2137, pruned_loss=0.03414, over 972303.88 frames.], batch size: 25, lr: 2.41e-04 +2022-05-06 09:19:48,100 INFO [train.py:715] (7/8) Epoch 9, batch 1350, loss[loss=0.1411, simple_loss=0.2184, pruned_loss=0.03192, over 4893.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2127, pruned_loss=0.03376, over 972466.62 frames.], batch size: 22, lr: 2.41e-04 +2022-05-06 09:20:26,897 INFO [train.py:715] (7/8) Epoch 9, batch 1400, loss[loss=0.1576, simple_loss=0.2192, pruned_loss=0.04803, over 4897.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2131, pruned_loss=0.03398, over 972079.35 frames.], batch size: 19, lr: 2.41e-04 +2022-05-06 09:21:06,501 INFO [train.py:715] (7/8) Epoch 9, batch 1450, loss[loss=0.1259, simple_loss=0.1903, pruned_loss=0.03077, over 4914.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2135, pruned_loss=0.03406, over 973243.61 frames.], batch size: 19, lr: 2.41e-04 +2022-05-06 09:21:45,312 INFO [train.py:715] (7/8) Epoch 9, batch 1500, loss[loss=0.1273, simple_loss=0.2032, pruned_loss=0.02576, over 4813.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2143, pruned_loss=0.03468, over 973348.70 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:22:24,145 INFO [train.py:715] (7/8) Epoch 9, batch 1550, loss[loss=0.1491, simple_loss=0.2178, pruned_loss=0.04025, over 4977.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.03464, over 972772.89 frames.], batch size: 33, lr: 2.41e-04 +2022-05-06 09:23:03,179 INFO [train.py:715] (7/8) Epoch 9, batch 1600, loss[loss=0.1602, simple_loss=0.2456, pruned_loss=0.0374, over 4955.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2144, pruned_loss=0.03444, over 973049.75 frames.], batch size: 24, lr: 2.41e-04 +2022-05-06 09:23:42,083 INFO [train.py:715] (7/8) Epoch 9, batch 1650, loss[loss=0.1107, simple_loss=0.1898, pruned_loss=0.0158, over 4758.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2153, pruned_loss=0.03484, over 972492.68 frames.], batch size: 19, lr: 2.41e-04 +2022-05-06 09:24:21,076 INFO [train.py:715] (7/8) Epoch 9, batch 1700, loss[loss=0.15, simple_loss=0.2259, pruned_loss=0.03705, over 4813.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2149, pruned_loss=0.0348, over 972609.14 frames.], batch size: 25, lr: 2.41e-04 +2022-05-06 09:25:00,146 INFO [train.py:715] (7/8) Epoch 9, batch 1750, loss[loss=0.1591, simple_loss=0.2282, pruned_loss=0.04502, over 4854.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2153, pruned_loss=0.03474, over 973107.11 frames.], batch size: 20, lr: 2.41e-04 +2022-05-06 09:25:39,673 INFO [train.py:715] (7/8) Epoch 9, batch 1800, loss[loss=0.1577, simple_loss=0.2261, pruned_loss=0.04464, over 4832.00 frames.], tot_loss[loss=0.1433, simple_loss=0.216, pruned_loss=0.03534, over 972874.96 frames.], batch size: 25, lr: 2.41e-04 +2022-05-06 09:26:18,855 INFO [train.py:715] (7/8) Epoch 9, batch 1850, loss[loss=0.1328, simple_loss=0.2006, pruned_loss=0.03247, over 4934.00 frames.], tot_loss[loss=0.1443, simple_loss=0.2167, pruned_loss=0.03597, over 972171.39 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:26:57,986 INFO [train.py:715] (7/8) Epoch 9, batch 1900, loss[loss=0.1601, simple_loss=0.2285, pruned_loss=0.04582, over 4926.00 frames.], tot_loss[loss=0.1437, simple_loss=0.216, pruned_loss=0.03573, over 972069.72 frames.], batch size: 23, lr: 2.41e-04 +2022-05-06 09:27:37,987 INFO [train.py:715] (7/8) Epoch 9, batch 1950, loss[loss=0.1592, simple_loss=0.2299, pruned_loss=0.04419, over 4824.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2157, pruned_loss=0.03536, over 971594.00 frames.], batch size: 26, lr: 2.41e-04 +2022-05-06 09:28:17,646 INFO [train.py:715] (7/8) Epoch 9, batch 2000, loss[loss=0.1343, simple_loss=0.2035, pruned_loss=0.03257, over 4989.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2148, pruned_loss=0.03501, over 972352.57 frames.], batch size: 14, lr: 2.41e-04 +2022-05-06 09:28:56,804 INFO [train.py:715] (7/8) Epoch 9, batch 2050, loss[loss=0.139, simple_loss=0.2071, pruned_loss=0.03549, over 4830.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.0348, over 972048.70 frames.], batch size: 27, lr: 2.41e-04 +2022-05-06 09:29:35,324 INFO [train.py:715] (7/8) Epoch 9, batch 2100, loss[loss=0.1389, simple_loss=0.2226, pruned_loss=0.02754, over 4927.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2147, pruned_loss=0.03449, over 972798.09 frames.], batch size: 23, lr: 2.41e-04 +2022-05-06 09:30:14,647 INFO [train.py:715] (7/8) Epoch 9, batch 2150, loss[loss=0.1177, simple_loss=0.1933, pruned_loss=0.02101, over 4818.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2146, pruned_loss=0.03424, over 973350.85 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:30:53,734 INFO [train.py:715] (7/8) Epoch 9, batch 2200, loss[loss=0.1214, simple_loss=0.1936, pruned_loss=0.02459, over 4894.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2152, pruned_loss=0.03466, over 972731.18 frames.], batch size: 17, lr: 2.41e-04 +2022-05-06 09:31:32,492 INFO [train.py:715] (7/8) Epoch 9, batch 2250, loss[loss=0.1228, simple_loss=0.2011, pruned_loss=0.02222, over 4810.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2152, pruned_loss=0.03455, over 972706.65 frames.], batch size: 27, lr: 2.41e-04 +2022-05-06 09:32:11,658 INFO [train.py:715] (7/8) Epoch 9, batch 2300, loss[loss=0.1468, simple_loss=0.2176, pruned_loss=0.03798, over 4915.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2141, pruned_loss=0.03437, over 972992.56 frames.], batch size: 39, lr: 2.41e-04 +2022-05-06 09:32:50,736 INFO [train.py:715] (7/8) Epoch 9, batch 2350, loss[loss=0.1572, simple_loss=0.2236, pruned_loss=0.04542, over 4829.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2139, pruned_loss=0.03485, over 972870.78 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:33:30,103 INFO [train.py:715] (7/8) Epoch 9, batch 2400, loss[loss=0.1318, simple_loss=0.2067, pruned_loss=0.02846, over 4825.00 frames.], tot_loss[loss=0.1421, simple_loss=0.214, pruned_loss=0.03506, over 973181.61 frames.], batch size: 15, lr: 2.41e-04 +2022-05-06 09:34:08,887 INFO [train.py:715] (7/8) Epoch 9, batch 2450, loss[loss=0.1449, simple_loss=0.2101, pruned_loss=0.03987, over 4842.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2137, pruned_loss=0.03504, over 973192.53 frames.], batch size: 30, lr: 2.41e-04 +2022-05-06 09:34:48,500 INFO [train.py:715] (7/8) Epoch 9, batch 2500, loss[loss=0.1457, simple_loss=0.2185, pruned_loss=0.03642, over 4889.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2139, pruned_loss=0.03492, over 972754.34 frames.], batch size: 16, lr: 2.41e-04 +2022-05-06 09:35:27,022 INFO [train.py:715] (7/8) Epoch 9, batch 2550, loss[loss=0.1281, simple_loss=0.2031, pruned_loss=0.02658, over 4861.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2136, pruned_loss=0.03454, over 973131.07 frames.], batch size: 20, lr: 2.41e-04 +2022-05-06 09:36:06,039 INFO [train.py:715] (7/8) Epoch 9, batch 2600, loss[loss=0.1243, simple_loss=0.2033, pruned_loss=0.02267, over 4817.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2138, pruned_loss=0.03456, over 972536.16 frames.], batch size: 14, lr: 2.41e-04 +2022-05-06 09:36:45,109 INFO [train.py:715] (7/8) Epoch 9, batch 2650, loss[loss=0.1443, simple_loss=0.2167, pruned_loss=0.03592, over 4859.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2125, pruned_loss=0.03424, over 972452.01 frames.], batch size: 30, lr: 2.41e-04 +2022-05-06 09:37:24,475 INFO [train.py:715] (7/8) Epoch 9, batch 2700, loss[loss=0.1485, simple_loss=0.2238, pruned_loss=0.03663, over 4821.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2123, pruned_loss=0.03396, over 972806.50 frames.], batch size: 27, lr: 2.40e-04 +2022-05-06 09:38:03,295 INFO [train.py:715] (7/8) Epoch 9, batch 2750, loss[loss=0.1312, simple_loss=0.1977, pruned_loss=0.03239, over 4984.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2128, pruned_loss=0.03396, over 972718.29 frames.], batch size: 31, lr: 2.40e-04 +2022-05-06 09:38:42,266 INFO [train.py:715] (7/8) Epoch 9, batch 2800, loss[loss=0.1392, simple_loss=0.2151, pruned_loss=0.03162, over 4739.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2131, pruned_loss=0.03408, over 972656.91 frames.], batch size: 16, lr: 2.40e-04 +2022-05-06 09:39:21,841 INFO [train.py:715] (7/8) Epoch 9, batch 2850, loss[loss=0.165, simple_loss=0.2418, pruned_loss=0.04406, over 4706.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2137, pruned_loss=0.03453, over 971932.65 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 09:40:00,910 INFO [train.py:715] (7/8) Epoch 9, batch 2900, loss[loss=0.1307, simple_loss=0.2086, pruned_loss=0.02642, over 4780.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2133, pruned_loss=0.03412, over 971994.79 frames.], batch size: 17, lr: 2.40e-04 +2022-05-06 09:40:39,680 INFO [train.py:715] (7/8) Epoch 9, batch 2950, loss[loss=0.1476, simple_loss=0.2194, pruned_loss=0.03787, over 4773.00 frames.], tot_loss[loss=0.141, simple_loss=0.2134, pruned_loss=0.03428, over 971973.99 frames.], batch size: 14, lr: 2.40e-04 +2022-05-06 09:41:18,904 INFO [train.py:715] (7/8) Epoch 9, batch 3000, loss[loss=0.1237, simple_loss=0.1985, pruned_loss=0.02446, over 4766.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2139, pruned_loss=0.03462, over 971545.69 frames.], batch size: 19, lr: 2.40e-04 +2022-05-06 09:41:18,905 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 09:41:28,535 INFO [train.py:742] (7/8) Epoch 9, validation: loss=0.1069, simple_loss=0.1915, pruned_loss=0.01118, over 914524.00 frames. +2022-05-06 09:42:08,251 INFO [train.py:715] (7/8) Epoch 9, batch 3050, loss[loss=0.1321, simple_loss=0.209, pruned_loss=0.02756, over 4802.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2138, pruned_loss=0.03466, over 971195.51 frames.], batch size: 24, lr: 2.40e-04 +2022-05-06 09:42:47,737 INFO [train.py:715] (7/8) Epoch 9, batch 3100, loss[loss=0.1432, simple_loss=0.2097, pruned_loss=0.03833, over 4948.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2136, pruned_loss=0.03474, over 970989.34 frames.], batch size: 21, lr: 2.40e-04 +2022-05-06 09:43:27,214 INFO [train.py:715] (7/8) Epoch 9, batch 3150, loss[loss=0.132, simple_loss=0.2103, pruned_loss=0.02685, over 4830.00 frames.], tot_loss[loss=0.1431, simple_loss=0.215, pruned_loss=0.0356, over 971667.58 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 09:44:06,426 INFO [train.py:715] (7/8) Epoch 9, batch 3200, loss[loss=0.1555, simple_loss=0.2344, pruned_loss=0.03829, over 4865.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2142, pruned_loss=0.03515, over 971523.79 frames.], batch size: 30, lr: 2.40e-04 +2022-05-06 09:44:45,581 INFO [train.py:715] (7/8) Epoch 9, batch 3250, loss[loss=0.1524, simple_loss=0.227, pruned_loss=0.03893, over 4899.00 frames.], tot_loss[loss=0.1423, simple_loss=0.214, pruned_loss=0.03531, over 972618.05 frames.], batch size: 18, lr: 2.40e-04 +2022-05-06 09:45:24,839 INFO [train.py:715] (7/8) Epoch 9, batch 3300, loss[loss=0.111, simple_loss=0.1868, pruned_loss=0.01753, over 4700.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2137, pruned_loss=0.03482, over 972393.15 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 09:46:03,661 INFO [train.py:715] (7/8) Epoch 9, batch 3350, loss[loss=0.1571, simple_loss=0.2367, pruned_loss=0.03879, over 4783.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2136, pruned_loss=0.03451, over 972805.98 frames.], batch size: 17, lr: 2.40e-04 +2022-05-06 09:46:42,963 INFO [train.py:715] (7/8) Epoch 9, batch 3400, loss[loss=0.1407, simple_loss=0.215, pruned_loss=0.03326, over 4986.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2129, pruned_loss=0.03398, over 973636.66 frames.], batch size: 14, lr: 2.40e-04 +2022-05-06 09:47:22,071 INFO [train.py:715] (7/8) Epoch 9, batch 3450, loss[loss=0.1375, simple_loss=0.2163, pruned_loss=0.02938, over 4855.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2149, pruned_loss=0.03488, over 973876.48 frames.], batch size: 20, lr: 2.40e-04 +2022-05-06 09:48:00,723 INFO [train.py:715] (7/8) Epoch 9, batch 3500, loss[loss=0.1775, simple_loss=0.2385, pruned_loss=0.05821, over 4803.00 frames.], tot_loss[loss=0.142, simple_loss=0.2145, pruned_loss=0.03475, over 973861.06 frames.], batch size: 21, lr: 2.40e-04 +2022-05-06 09:48:40,287 INFO [train.py:715] (7/8) Epoch 9, batch 3550, loss[loss=0.1575, simple_loss=0.227, pruned_loss=0.04403, over 4902.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2156, pruned_loss=0.03504, over 973854.21 frames.], batch size: 18, lr: 2.40e-04 +2022-05-06 09:49:19,726 INFO [train.py:715] (7/8) Epoch 9, batch 3600, loss[loss=0.1509, simple_loss=0.2299, pruned_loss=0.03592, over 4951.00 frames.], tot_loss[loss=0.143, simple_loss=0.2155, pruned_loss=0.03521, over 973956.25 frames.], batch size: 24, lr: 2.40e-04 +2022-05-06 09:49:59,015 INFO [train.py:715] (7/8) Epoch 9, batch 3650, loss[loss=0.1395, simple_loss=0.2087, pruned_loss=0.03517, over 4772.00 frames.], tot_loss[loss=0.142, simple_loss=0.2146, pruned_loss=0.03472, over 973630.06 frames.], batch size: 19, lr: 2.40e-04 +2022-05-06 09:50:37,660 INFO [train.py:715] (7/8) Epoch 9, batch 3700, loss[loss=0.1393, simple_loss=0.2103, pruned_loss=0.03416, over 4884.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2154, pruned_loss=0.03505, over 973967.29 frames.], batch size: 16, lr: 2.40e-04 +2022-05-06 09:51:17,147 INFO [train.py:715] (7/8) Epoch 9, batch 3750, loss[loss=0.137, simple_loss=0.2153, pruned_loss=0.02935, over 4914.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2148, pruned_loss=0.03479, over 974651.32 frames.], batch size: 23, lr: 2.40e-04 +2022-05-06 09:51:56,919 INFO [train.py:715] (7/8) Epoch 9, batch 3800, loss[loss=0.1261, simple_loss=0.2051, pruned_loss=0.02357, over 4834.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2152, pruned_loss=0.03515, over 974160.82 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 09:52:35,339 INFO [train.py:715] (7/8) Epoch 9, batch 3850, loss[loss=0.1909, simple_loss=0.2539, pruned_loss=0.06399, over 4886.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2148, pruned_loss=0.03483, over 973987.93 frames.], batch size: 32, lr: 2.40e-04 +2022-05-06 09:53:14,341 INFO [train.py:715] (7/8) Epoch 9, batch 3900, loss[loss=0.1553, simple_loss=0.2329, pruned_loss=0.03889, over 4795.00 frames.], tot_loss[loss=0.142, simple_loss=0.2147, pruned_loss=0.03471, over 973738.42 frames.], batch size: 12, lr: 2.40e-04 +2022-05-06 09:53:53,825 INFO [train.py:715] (7/8) Epoch 9, batch 3950, loss[loss=0.1636, simple_loss=0.2356, pruned_loss=0.0458, over 4897.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2153, pruned_loss=0.03499, over 973603.92 frames.], batch size: 22, lr: 2.40e-04 +2022-05-06 09:54:33,401 INFO [train.py:715] (7/8) Epoch 9, batch 4000, loss[loss=0.1794, simple_loss=0.2422, pruned_loss=0.05827, over 4980.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2145, pruned_loss=0.03496, over 973248.86 frames.], batch size: 40, lr: 2.40e-04 +2022-05-06 09:55:12,128 INFO [train.py:715] (7/8) Epoch 9, batch 4050, loss[loss=0.1447, simple_loss=0.2127, pruned_loss=0.03839, over 4943.00 frames.], tot_loss[loss=0.143, simple_loss=0.2152, pruned_loss=0.03539, over 972552.25 frames.], batch size: 21, lr: 2.40e-04 +2022-05-06 09:55:52,107 INFO [train.py:715] (7/8) Epoch 9, batch 4100, loss[loss=0.1147, simple_loss=0.182, pruned_loss=0.02367, over 4862.00 frames.], tot_loss[loss=0.1416, simple_loss=0.214, pruned_loss=0.03457, over 972417.91 frames.], batch size: 16, lr: 2.40e-04 +2022-05-06 09:56:30,807 INFO [train.py:715] (7/8) Epoch 9, batch 4150, loss[loss=0.1717, simple_loss=0.2384, pruned_loss=0.05249, over 4916.00 frames.], tot_loss[loss=0.142, simple_loss=0.2148, pruned_loss=0.0346, over 972048.57 frames.], batch size: 39, lr: 2.40e-04 +2022-05-06 09:57:10,159 INFO [train.py:715] (7/8) Epoch 9, batch 4200, loss[loss=0.1531, simple_loss=0.2163, pruned_loss=0.04495, over 4904.00 frames.], tot_loss[loss=0.142, simple_loss=0.2147, pruned_loss=0.03466, over 971944.34 frames.], batch size: 17, lr: 2.40e-04 +2022-05-06 09:57:49,723 INFO [train.py:715] (7/8) Epoch 9, batch 4250, loss[loss=0.1623, simple_loss=0.2279, pruned_loss=0.04831, over 4917.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2151, pruned_loss=0.03514, over 971860.36 frames.], batch size: 39, lr: 2.40e-04 +2022-05-06 09:58:29,618 INFO [train.py:715] (7/8) Epoch 9, batch 4300, loss[loss=0.1471, simple_loss=0.2205, pruned_loss=0.03691, over 4920.00 frames.], tot_loss[loss=0.1428, simple_loss=0.215, pruned_loss=0.03527, over 971784.47 frames.], batch size: 18, lr: 2.40e-04 +2022-05-06 09:59:09,599 INFO [train.py:715] (7/8) Epoch 9, batch 4350, loss[loss=0.1287, simple_loss=0.197, pruned_loss=0.0302, over 4842.00 frames.], tot_loss[loss=0.1429, simple_loss=0.215, pruned_loss=0.03539, over 971890.52 frames.], batch size: 13, lr: 2.40e-04 +2022-05-06 09:59:48,193 INFO [train.py:715] (7/8) Epoch 9, batch 4400, loss[loss=0.1557, simple_loss=0.236, pruned_loss=0.03766, over 4755.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2146, pruned_loss=0.0351, over 972270.62 frames.], batch size: 16, lr: 2.40e-04 +2022-05-06 10:00:27,691 INFO [train.py:715] (7/8) Epoch 9, batch 4450, loss[loss=0.1858, simple_loss=0.2564, pruned_loss=0.05758, over 4899.00 frames.], tot_loss[loss=0.1425, simple_loss=0.215, pruned_loss=0.03504, over 972543.82 frames.], batch size: 19, lr: 2.40e-04 +2022-05-06 10:01:06,483 INFO [train.py:715] (7/8) Epoch 9, batch 4500, loss[loss=0.1439, simple_loss=0.2188, pruned_loss=0.03451, over 4678.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2143, pruned_loss=0.03461, over 971969.48 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 10:01:45,449 INFO [train.py:715] (7/8) Epoch 9, batch 4550, loss[loss=0.1545, simple_loss=0.2319, pruned_loss=0.03861, over 4794.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2156, pruned_loss=0.03488, over 972775.24 frames.], batch size: 24, lr: 2.40e-04 +2022-05-06 10:02:24,725 INFO [train.py:715] (7/8) Epoch 9, batch 4600, loss[loss=0.1327, simple_loss=0.2079, pruned_loss=0.02872, over 4876.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2153, pruned_loss=0.03486, over 972231.44 frames.], batch size: 22, lr: 2.40e-04 +2022-05-06 10:03:04,293 INFO [train.py:715] (7/8) Epoch 9, batch 4650, loss[loss=0.1632, simple_loss=0.2174, pruned_loss=0.05447, over 4747.00 frames.], tot_loss[loss=0.1433, simple_loss=0.216, pruned_loss=0.03528, over 972075.35 frames.], batch size: 16, lr: 2.40e-04 +2022-05-06 10:03:43,902 INFO [train.py:715] (7/8) Epoch 9, batch 4700, loss[loss=0.1468, simple_loss=0.2165, pruned_loss=0.03852, over 4979.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2151, pruned_loss=0.03463, over 972263.56 frames.], batch size: 14, lr: 2.40e-04 +2022-05-06 10:04:22,850 INFO [train.py:715] (7/8) Epoch 9, batch 4750, loss[loss=0.1407, simple_loss=0.2198, pruned_loss=0.03079, over 4823.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.03503, over 971708.78 frames.], batch size: 26, lr: 2.40e-04 +2022-05-06 10:05:02,426 INFO [train.py:715] (7/8) Epoch 9, batch 4800, loss[loss=0.1537, simple_loss=0.2204, pruned_loss=0.04348, over 4695.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2145, pruned_loss=0.0347, over 972287.15 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 10:05:41,422 INFO [train.py:715] (7/8) Epoch 9, batch 4850, loss[loss=0.1279, simple_loss=0.1951, pruned_loss=0.03037, over 4777.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03406, over 972705.50 frames.], batch size: 12, lr: 2.40e-04 +2022-05-06 10:06:20,854 INFO [train.py:715] (7/8) Epoch 9, batch 4900, loss[loss=0.1628, simple_loss=0.2402, pruned_loss=0.04274, over 4767.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2141, pruned_loss=0.03437, over 972653.52 frames.], batch size: 14, lr: 2.40e-04 +2022-05-06 10:06:59,739 INFO [train.py:715] (7/8) Epoch 9, batch 4950, loss[loss=0.1263, simple_loss=0.1995, pruned_loss=0.02656, over 4925.00 frames.], tot_loss[loss=0.141, simple_loss=0.2138, pruned_loss=0.03404, over 972573.69 frames.], batch size: 18, lr: 2.40e-04 +2022-05-06 10:07:39,116 INFO [train.py:715] (7/8) Epoch 9, batch 5000, loss[loss=0.1444, simple_loss=0.2136, pruned_loss=0.03763, over 4923.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2139, pruned_loss=0.03416, over 972005.37 frames.], batch size: 39, lr: 2.40e-04 +2022-05-06 10:08:18,417 INFO [train.py:715] (7/8) Epoch 9, batch 5050, loss[loss=0.1512, simple_loss=0.2226, pruned_loss=0.03987, over 4925.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2137, pruned_loss=0.03408, over 971899.98 frames.], batch size: 18, lr: 2.40e-04 +2022-05-06 10:08:57,174 INFO [train.py:715] (7/8) Epoch 9, batch 5100, loss[loss=0.1394, simple_loss=0.2058, pruned_loss=0.03653, over 4909.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2142, pruned_loss=0.03438, over 971512.54 frames.], batch size: 18, lr: 2.40e-04 +2022-05-06 10:09:36,561 INFO [train.py:715] (7/8) Epoch 9, batch 5150, loss[loss=0.1682, simple_loss=0.2429, pruned_loss=0.04672, over 4983.00 frames.], tot_loss[loss=0.141, simple_loss=0.2137, pruned_loss=0.03415, over 971476.95 frames.], batch size: 39, lr: 2.40e-04 +2022-05-06 10:10:15,466 INFO [train.py:715] (7/8) Epoch 9, batch 5200, loss[loss=0.1537, simple_loss=0.2352, pruned_loss=0.03614, over 4950.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2141, pruned_loss=0.03405, over 971447.64 frames.], batch size: 21, lr: 2.40e-04 +2022-05-06 10:10:54,750 INFO [train.py:715] (7/8) Epoch 9, batch 5250, loss[loss=0.1524, simple_loss=0.2212, pruned_loss=0.04182, over 4813.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2143, pruned_loss=0.03442, over 971638.73 frames.], batch size: 15, lr: 2.40e-04 +2022-05-06 10:11:33,955 INFO [train.py:715] (7/8) Epoch 9, batch 5300, loss[loss=0.1438, simple_loss=0.2274, pruned_loss=0.03015, over 4807.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03397, over 971840.57 frames.], batch size: 21, lr: 2.39e-04 +2022-05-06 10:12:13,444 INFO [train.py:715] (7/8) Epoch 9, batch 5350, loss[loss=0.1612, simple_loss=0.2322, pruned_loss=0.04508, over 4820.00 frames.], tot_loss[loss=0.142, simple_loss=0.2146, pruned_loss=0.03473, over 972592.29 frames.], batch size: 21, lr: 2.39e-04 +2022-05-06 10:12:52,105 INFO [train.py:715] (7/8) Epoch 9, batch 5400, loss[loss=0.1503, simple_loss=0.2219, pruned_loss=0.03936, over 4950.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2145, pruned_loss=0.03454, over 971986.97 frames.], batch size: 35, lr: 2.39e-04 +2022-05-06 10:13:30,899 INFO [train.py:715] (7/8) Epoch 9, batch 5450, loss[loss=0.1477, simple_loss=0.2101, pruned_loss=0.0426, over 4698.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2142, pruned_loss=0.03433, over 972719.36 frames.], batch size: 15, lr: 2.39e-04 +2022-05-06 10:14:10,211 INFO [train.py:715] (7/8) Epoch 9, batch 5500, loss[loss=0.1451, simple_loss=0.2281, pruned_loss=0.03101, over 4858.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2147, pruned_loss=0.0345, over 972251.45 frames.], batch size: 20, lr: 2.39e-04 +2022-05-06 10:14:49,300 INFO [train.py:715] (7/8) Epoch 9, batch 5550, loss[loss=0.1445, simple_loss=0.2165, pruned_loss=0.03629, over 4969.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2148, pruned_loss=0.035, over 971946.54 frames.], batch size: 25, lr: 2.39e-04 +2022-05-06 10:15:28,468 INFO [train.py:715] (7/8) Epoch 9, batch 5600, loss[loss=0.1632, simple_loss=0.2234, pruned_loss=0.05157, over 4827.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2155, pruned_loss=0.03495, over 972730.26 frames.], batch size: 26, lr: 2.39e-04 +2022-05-06 10:16:07,456 INFO [train.py:715] (7/8) Epoch 9, batch 5650, loss[loss=0.1511, simple_loss=0.218, pruned_loss=0.04212, over 4789.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2165, pruned_loss=0.03527, over 972402.36 frames.], batch size: 17, lr: 2.39e-04 +2022-05-06 10:16:47,098 INFO [train.py:715] (7/8) Epoch 9, batch 5700, loss[loss=0.1259, simple_loss=0.2069, pruned_loss=0.02249, over 4953.00 frames.], tot_loss[loss=0.143, simple_loss=0.216, pruned_loss=0.03503, over 972872.92 frames.], batch size: 21, lr: 2.39e-04 +2022-05-06 10:17:26,141 INFO [train.py:715] (7/8) Epoch 9, batch 5750, loss[loss=0.1575, simple_loss=0.2392, pruned_loss=0.03789, over 4881.00 frames.], tot_loss[loss=0.143, simple_loss=0.2159, pruned_loss=0.03504, over 972838.75 frames.], batch size: 16, lr: 2.39e-04 +2022-05-06 10:18:04,787 INFO [train.py:715] (7/8) Epoch 9, batch 5800, loss[loss=0.1293, simple_loss=0.2035, pruned_loss=0.02759, over 4786.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03528, over 972512.80 frames.], batch size: 21, lr: 2.39e-04 +2022-05-06 10:18:44,315 INFO [train.py:715] (7/8) Epoch 9, batch 5850, loss[loss=0.1636, simple_loss=0.2361, pruned_loss=0.0455, over 4935.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2147, pruned_loss=0.03492, over 972445.95 frames.], batch size: 39, lr: 2.39e-04 +2022-05-06 10:19:23,132 INFO [train.py:715] (7/8) Epoch 9, batch 5900, loss[loss=0.1271, simple_loss=0.1952, pruned_loss=0.02946, over 4856.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2144, pruned_loss=0.03511, over 971532.66 frames.], batch size: 30, lr: 2.39e-04 +2022-05-06 10:20:02,778 INFO [train.py:715] (7/8) Epoch 9, batch 5950, loss[loss=0.1399, simple_loss=0.2107, pruned_loss=0.03453, over 4785.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2154, pruned_loss=0.03575, over 973121.45 frames.], batch size: 23, lr: 2.39e-04 +2022-05-06 10:20:41,535 INFO [train.py:715] (7/8) Epoch 9, batch 6000, loss[loss=0.1507, simple_loss=0.2285, pruned_loss=0.03643, over 4910.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.03549, over 973507.40 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:20:41,536 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 10:20:51,194 INFO [train.py:742] (7/8) Epoch 9, validation: loss=0.107, simple_loss=0.1914, pruned_loss=0.0113, over 914524.00 frames. +2022-05-06 10:21:30,885 INFO [train.py:715] (7/8) Epoch 9, batch 6050, loss[loss=0.1543, simple_loss=0.2192, pruned_loss=0.04465, over 4775.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2149, pruned_loss=0.03521, over 973089.76 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:22:10,755 INFO [train.py:715] (7/8) Epoch 9, batch 6100, loss[loss=0.1321, simple_loss=0.2101, pruned_loss=0.02702, over 4983.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2145, pruned_loss=0.03509, over 972771.76 frames.], batch size: 24, lr: 2.39e-04 +2022-05-06 10:22:49,974 INFO [train.py:715] (7/8) Epoch 9, batch 6150, loss[loss=0.1399, simple_loss=0.2239, pruned_loss=0.02794, over 4967.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2147, pruned_loss=0.0352, over 972983.62 frames.], batch size: 21, lr: 2.39e-04 +2022-05-06 10:23:28,785 INFO [train.py:715] (7/8) Epoch 9, batch 6200, loss[loss=0.1508, simple_loss=0.2248, pruned_loss=0.03842, over 4752.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2148, pruned_loss=0.03504, over 972985.29 frames.], batch size: 19, lr: 2.39e-04 +2022-05-06 10:24:08,420 INFO [train.py:715] (7/8) Epoch 9, batch 6250, loss[loss=0.1413, simple_loss=0.2244, pruned_loss=0.02907, over 4983.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.03548, over 972551.95 frames.], batch size: 25, lr: 2.39e-04 +2022-05-06 10:24:47,202 INFO [train.py:715] (7/8) Epoch 9, batch 6300, loss[loss=0.1417, simple_loss=0.2277, pruned_loss=0.02785, over 4986.00 frames.], tot_loss[loss=0.143, simple_loss=0.2155, pruned_loss=0.03522, over 973275.81 frames.], batch size: 39, lr: 2.39e-04 +2022-05-06 10:25:26,321 INFO [train.py:715] (7/8) Epoch 9, batch 6350, loss[loss=0.1376, simple_loss=0.2237, pruned_loss=0.02574, over 4785.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2154, pruned_loss=0.03505, over 973923.63 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:26:05,955 INFO [train.py:715] (7/8) Epoch 9, batch 6400, loss[loss=0.1496, simple_loss=0.2237, pruned_loss=0.03771, over 4775.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2159, pruned_loss=0.0356, over 973660.16 frames.], batch size: 17, lr: 2.39e-04 +2022-05-06 10:26:46,100 INFO [train.py:715] (7/8) Epoch 9, batch 6450, loss[loss=0.1624, simple_loss=0.2368, pruned_loss=0.04403, over 4880.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2158, pruned_loss=0.03558, over 973538.79 frames.], batch size: 39, lr: 2.39e-04 +2022-05-06 10:27:25,425 INFO [train.py:715] (7/8) Epoch 9, batch 6500, loss[loss=0.1527, simple_loss=0.2214, pruned_loss=0.04199, over 4768.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2159, pruned_loss=0.03561, over 973043.74 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:28:04,261 INFO [train.py:715] (7/8) Epoch 9, batch 6550, loss[loss=0.1154, simple_loss=0.1951, pruned_loss=0.01781, over 4878.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2148, pruned_loss=0.03471, over 973585.37 frames.], batch size: 22, lr: 2.39e-04 +2022-05-06 10:28:44,038 INFO [train.py:715] (7/8) Epoch 9, batch 6600, loss[loss=0.167, simple_loss=0.2438, pruned_loss=0.04513, over 4785.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2147, pruned_loss=0.03476, over 972878.00 frames.], batch size: 17, lr: 2.39e-04 +2022-05-06 10:29:23,595 INFO [train.py:715] (7/8) Epoch 9, batch 6650, loss[loss=0.1371, simple_loss=0.2081, pruned_loss=0.03307, over 4817.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2148, pruned_loss=0.03473, over 972244.75 frames.], batch size: 26, lr: 2.39e-04 +2022-05-06 10:30:02,749 INFO [train.py:715] (7/8) Epoch 9, batch 6700, loss[loss=0.1594, simple_loss=0.2299, pruned_loss=0.04444, over 4975.00 frames.], tot_loss[loss=0.143, simple_loss=0.216, pruned_loss=0.03502, over 972772.95 frames.], batch size: 39, lr: 2.39e-04 +2022-05-06 10:30:44,173 INFO [train.py:715] (7/8) Epoch 9, batch 6750, loss[loss=0.1576, simple_loss=0.2183, pruned_loss=0.0484, over 4900.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2155, pruned_loss=0.03509, over 972346.03 frames.], batch size: 19, lr: 2.39e-04 +2022-05-06 10:31:23,604 INFO [train.py:715] (7/8) Epoch 9, batch 6800, loss[loss=0.1287, simple_loss=0.2026, pruned_loss=0.02737, over 4776.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2152, pruned_loss=0.03494, over 972281.85 frames.], batch size: 14, lr: 2.39e-04 +2022-05-06 10:32:02,559 INFO [train.py:715] (7/8) Epoch 9, batch 6850, loss[loss=0.1458, simple_loss=0.2252, pruned_loss=0.03317, over 4857.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.03446, over 972519.23 frames.], batch size: 20, lr: 2.39e-04 +2022-05-06 10:32:40,754 INFO [train.py:715] (7/8) Epoch 9, batch 6900, loss[loss=0.1374, simple_loss=0.2068, pruned_loss=0.03399, over 4941.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2152, pruned_loss=0.03454, over 971705.88 frames.], batch size: 21, lr: 2.39e-04 +2022-05-06 10:33:20,060 INFO [train.py:715] (7/8) Epoch 9, batch 6950, loss[loss=0.135, simple_loss=0.2125, pruned_loss=0.02879, over 4827.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2142, pruned_loss=0.03405, over 971188.35 frames.], batch size: 30, lr: 2.39e-04 +2022-05-06 10:33:59,866 INFO [train.py:715] (7/8) Epoch 9, batch 7000, loss[loss=0.1338, simple_loss=0.2039, pruned_loss=0.0319, over 4963.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2136, pruned_loss=0.03362, over 970732.15 frames.], batch size: 35, lr: 2.39e-04 +2022-05-06 10:34:38,726 INFO [train.py:715] (7/8) Epoch 9, batch 7050, loss[loss=0.1454, simple_loss=0.2142, pruned_loss=0.03833, over 4901.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2135, pruned_loss=0.03391, over 970727.59 frames.], batch size: 19, lr: 2.39e-04 +2022-05-06 10:35:17,349 INFO [train.py:715] (7/8) Epoch 9, batch 7100, loss[loss=0.1277, simple_loss=0.2043, pruned_loss=0.02555, over 4923.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2139, pruned_loss=0.03448, over 971787.22 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:35:56,811 INFO [train.py:715] (7/8) Epoch 9, batch 7150, loss[loss=0.1556, simple_loss=0.2229, pruned_loss=0.04419, over 4930.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2134, pruned_loss=0.03423, over 971179.51 frames.], batch size: 29, lr: 2.39e-04 +2022-05-06 10:36:35,508 INFO [train.py:715] (7/8) Epoch 9, batch 7200, loss[loss=0.1785, simple_loss=0.2428, pruned_loss=0.05716, over 4959.00 frames.], tot_loss[loss=0.1416, simple_loss=0.214, pruned_loss=0.03463, over 971667.83 frames.], batch size: 15, lr: 2.39e-04 +2022-05-06 10:37:14,252 INFO [train.py:715] (7/8) Epoch 9, batch 7250, loss[loss=0.1503, simple_loss=0.2246, pruned_loss=0.03801, over 4925.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2135, pruned_loss=0.03396, over 971757.57 frames.], batch size: 29, lr: 2.39e-04 +2022-05-06 10:37:53,498 INFO [train.py:715] (7/8) Epoch 9, batch 7300, loss[loss=0.1605, simple_loss=0.2331, pruned_loss=0.04393, over 4962.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2146, pruned_loss=0.03436, over 972509.53 frames.], batch size: 25, lr: 2.39e-04 +2022-05-06 10:38:32,803 INFO [train.py:715] (7/8) Epoch 9, batch 7350, loss[loss=0.1262, simple_loss=0.1944, pruned_loss=0.02904, over 4934.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2144, pruned_loss=0.03435, over 971749.41 frames.], batch size: 29, lr: 2.39e-04 +2022-05-06 10:39:11,303 INFO [train.py:715] (7/8) Epoch 9, batch 7400, loss[loss=0.1674, simple_loss=0.2379, pruned_loss=0.04849, over 4646.00 frames.], tot_loss[loss=0.141, simple_loss=0.2138, pruned_loss=0.03405, over 971380.60 frames.], batch size: 13, lr: 2.39e-04 +2022-05-06 10:39:50,259 INFO [train.py:715] (7/8) Epoch 9, batch 7450, loss[loss=0.1307, simple_loss=0.2035, pruned_loss=0.02896, over 4895.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2135, pruned_loss=0.03384, over 971412.09 frames.], batch size: 19, lr: 2.39e-04 +2022-05-06 10:40:30,204 INFO [train.py:715] (7/8) Epoch 9, batch 7500, loss[loss=0.1244, simple_loss=0.2014, pruned_loss=0.02368, over 4955.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2138, pruned_loss=0.03428, over 972551.41 frames.], batch size: 21, lr: 2.39e-04 +2022-05-06 10:41:09,247 INFO [train.py:715] (7/8) Epoch 9, batch 7550, loss[loss=0.1608, simple_loss=0.2396, pruned_loss=0.04101, over 4751.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2141, pruned_loss=0.03462, over 972044.46 frames.], batch size: 19, lr: 2.39e-04 +2022-05-06 10:41:48,089 INFO [train.py:715] (7/8) Epoch 9, batch 7600, loss[loss=0.1246, simple_loss=0.1975, pruned_loss=0.02582, over 4763.00 frames.], tot_loss[loss=0.1411, simple_loss=0.214, pruned_loss=0.03404, over 972362.12 frames.], batch size: 19, lr: 2.39e-04 +2022-05-06 10:42:27,544 INFO [train.py:715] (7/8) Epoch 9, batch 7650, loss[loss=0.135, simple_loss=0.2036, pruned_loss=0.03321, over 4876.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2147, pruned_loss=0.0344, over 972633.86 frames.], batch size: 22, lr: 2.39e-04 +2022-05-06 10:43:06,742 INFO [train.py:715] (7/8) Epoch 9, batch 7700, loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.03166, over 4943.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2143, pruned_loss=0.03439, over 972455.68 frames.], batch size: 29, lr: 2.39e-04 +2022-05-06 10:43:45,565 INFO [train.py:715] (7/8) Epoch 9, batch 7750, loss[loss=0.1921, simple_loss=0.2555, pruned_loss=0.06435, over 4790.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2157, pruned_loss=0.03548, over 971843.49 frames.], batch size: 13, lr: 2.39e-04 +2022-05-06 10:44:24,379 INFO [train.py:715] (7/8) Epoch 9, batch 7800, loss[loss=0.1532, simple_loss=0.2278, pruned_loss=0.0393, over 4891.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2163, pruned_loss=0.03553, over 971949.90 frames.], batch size: 19, lr: 2.39e-04 +2022-05-06 10:45:04,417 INFO [train.py:715] (7/8) Epoch 9, batch 7850, loss[loss=0.1694, simple_loss=0.2438, pruned_loss=0.04756, over 4692.00 frames.], tot_loss[loss=0.143, simple_loss=0.216, pruned_loss=0.03502, over 972162.80 frames.], batch size: 15, lr: 2.39e-04 +2022-05-06 10:45:43,398 INFO [train.py:715] (7/8) Epoch 9, batch 7900, loss[loss=0.1409, simple_loss=0.2226, pruned_loss=0.02956, over 4697.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2156, pruned_loss=0.03482, over 971287.94 frames.], batch size: 15, lr: 2.39e-04 +2022-05-06 10:46:21,526 INFO [train.py:715] (7/8) Epoch 9, batch 7950, loss[loss=0.1454, simple_loss=0.2142, pruned_loss=0.03823, over 4765.00 frames.], tot_loss[loss=0.142, simple_loss=0.215, pruned_loss=0.03449, over 971765.92 frames.], batch size: 18, lr: 2.39e-04 +2022-05-06 10:47:00,915 INFO [train.py:715] (7/8) Epoch 9, batch 8000, loss[loss=0.1397, simple_loss=0.2187, pruned_loss=0.03031, over 4795.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2147, pruned_loss=0.0343, over 971971.30 frames.], batch size: 14, lr: 2.38e-04 +2022-05-06 10:47:39,934 INFO [train.py:715] (7/8) Epoch 9, batch 8050, loss[loss=0.1336, simple_loss=0.2125, pruned_loss=0.0274, over 4785.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2156, pruned_loss=0.03448, over 972154.03 frames.], batch size: 17, lr: 2.38e-04 +2022-05-06 10:48:18,561 INFO [train.py:715] (7/8) Epoch 9, batch 8100, loss[loss=0.1704, simple_loss=0.2374, pruned_loss=0.05169, over 4690.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2159, pruned_loss=0.03466, over 972645.62 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 10:48:57,108 INFO [train.py:715] (7/8) Epoch 9, batch 8150, loss[loss=0.1478, simple_loss=0.2247, pruned_loss=0.03548, over 4819.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2148, pruned_loss=0.03473, over 973268.97 frames.], batch size: 25, lr: 2.38e-04 +2022-05-06 10:49:36,460 INFO [train.py:715] (7/8) Epoch 9, batch 8200, loss[loss=0.162, simple_loss=0.2299, pruned_loss=0.04705, over 4803.00 frames.], tot_loss[loss=0.1426, simple_loss=0.215, pruned_loss=0.03513, over 973018.01 frames.], batch size: 24, lr: 2.38e-04 +2022-05-06 10:50:15,127 INFO [train.py:715] (7/8) Epoch 9, batch 8250, loss[loss=0.1266, simple_loss=0.1986, pruned_loss=0.02736, over 4820.00 frames.], tot_loss[loss=0.1424, simple_loss=0.215, pruned_loss=0.03491, over 973845.24 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 10:50:53,697 INFO [train.py:715] (7/8) Epoch 9, batch 8300, loss[loss=0.1338, simple_loss=0.2183, pruned_loss=0.02464, over 4869.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03421, over 972543.97 frames.], batch size: 20, lr: 2.38e-04 +2022-05-06 10:51:32,741 INFO [train.py:715] (7/8) Epoch 9, batch 8350, loss[loss=0.136, simple_loss=0.2087, pruned_loss=0.03163, over 4809.00 frames.], tot_loss[loss=0.141, simple_loss=0.214, pruned_loss=0.03397, over 972418.47 frames.], batch size: 21, lr: 2.38e-04 +2022-05-06 10:52:12,414 INFO [train.py:715] (7/8) Epoch 9, batch 8400, loss[loss=0.1364, simple_loss=0.2161, pruned_loss=0.02839, over 4783.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2135, pruned_loss=0.03369, over 972050.25 frames.], batch size: 17, lr: 2.38e-04 +2022-05-06 10:52:50,772 INFO [train.py:715] (7/8) Epoch 9, batch 8450, loss[loss=0.1605, simple_loss=0.2301, pruned_loss=0.04542, over 4792.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.03352, over 971537.49 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 10:53:29,414 INFO [train.py:715] (7/8) Epoch 9, batch 8500, loss[loss=0.1572, simple_loss=0.2271, pruned_loss=0.0437, over 4784.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2137, pruned_loss=0.03426, over 971752.31 frames.], batch size: 14, lr: 2.38e-04 +2022-05-06 10:54:08,962 INFO [train.py:715] (7/8) Epoch 9, batch 8550, loss[loss=0.1679, simple_loss=0.2408, pruned_loss=0.04752, over 4777.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.03418, over 972130.77 frames.], batch size: 17, lr: 2.38e-04 +2022-05-06 10:54:48,130 INFO [train.py:715] (7/8) Epoch 9, batch 8600, loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02832, over 4834.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2147, pruned_loss=0.03488, over 971460.85 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 10:55:26,985 INFO [train.py:715] (7/8) Epoch 9, batch 8650, loss[loss=0.141, simple_loss=0.2035, pruned_loss=0.03928, over 4855.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2142, pruned_loss=0.03464, over 972584.59 frames.], batch size: 13, lr: 2.38e-04 +2022-05-06 10:56:06,799 INFO [train.py:715] (7/8) Epoch 9, batch 8700, loss[loss=0.1496, simple_loss=0.2203, pruned_loss=0.03945, over 4903.00 frames.], tot_loss[loss=0.142, simple_loss=0.2144, pruned_loss=0.03478, over 972933.22 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 10:56:46,702 INFO [train.py:715] (7/8) Epoch 9, batch 8750, loss[loss=0.1196, simple_loss=0.1905, pruned_loss=0.02432, over 4762.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2128, pruned_loss=0.03402, over 972284.36 frames.], batch size: 12, lr: 2.38e-04 +2022-05-06 10:57:25,013 INFO [train.py:715] (7/8) Epoch 9, batch 8800, loss[loss=0.1445, simple_loss=0.2177, pruned_loss=0.03567, over 4986.00 frames.], tot_loss[loss=0.141, simple_loss=0.2133, pruned_loss=0.03434, over 971853.26 frames.], batch size: 20, lr: 2.38e-04 +2022-05-06 10:58:04,392 INFO [train.py:715] (7/8) Epoch 9, batch 8850, loss[loss=0.15, simple_loss=0.2208, pruned_loss=0.03956, over 4763.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2129, pruned_loss=0.03422, over 970922.60 frames.], batch size: 14, lr: 2.38e-04 +2022-05-06 10:58:43,839 INFO [train.py:715] (7/8) Epoch 9, batch 8900, loss[loss=0.16, simple_loss=0.2381, pruned_loss=0.04096, over 4908.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2129, pruned_loss=0.03408, over 971343.50 frames.], batch size: 17, lr: 2.38e-04 +2022-05-06 10:59:22,968 INFO [train.py:715] (7/8) Epoch 9, batch 8950, loss[loss=0.1474, simple_loss=0.2195, pruned_loss=0.03765, over 4986.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2139, pruned_loss=0.0346, over 971839.99 frames.], batch size: 14, lr: 2.38e-04 +2022-05-06 11:00:01,619 INFO [train.py:715] (7/8) Epoch 9, batch 9000, loss[loss=0.1975, simple_loss=0.2518, pruned_loss=0.07161, over 4694.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2134, pruned_loss=0.03454, over 972036.56 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 11:00:01,620 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 11:00:11,234 INFO [train.py:742] (7/8) Epoch 9, validation: loss=0.107, simple_loss=0.1914, pruned_loss=0.0113, over 914524.00 frames. +2022-05-06 11:00:49,918 INFO [train.py:715] (7/8) Epoch 9, batch 9050, loss[loss=0.1582, simple_loss=0.2292, pruned_loss=0.0436, over 4948.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2136, pruned_loss=0.03462, over 971831.51 frames.], batch size: 39, lr: 2.38e-04 +2022-05-06 11:01:30,084 INFO [train.py:715] (7/8) Epoch 9, batch 9100, loss[loss=0.1333, simple_loss=0.2112, pruned_loss=0.02774, over 4922.00 frames.], tot_loss[loss=0.1428, simple_loss=0.215, pruned_loss=0.03532, over 971885.71 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:02:09,670 INFO [train.py:715] (7/8) Epoch 9, batch 9150, loss[loss=0.1331, simple_loss=0.1995, pruned_loss=0.03334, over 4776.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2144, pruned_loss=0.03504, over 972230.72 frames.], batch size: 12, lr: 2.38e-04 +2022-05-06 11:02:48,633 INFO [train.py:715] (7/8) Epoch 9, batch 9200, loss[loss=0.169, simple_loss=0.2309, pruned_loss=0.05356, over 4792.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2152, pruned_loss=0.03529, over 972787.40 frames.], batch size: 17, lr: 2.38e-04 +2022-05-06 11:03:28,188 INFO [train.py:715] (7/8) Epoch 9, batch 9250, loss[loss=0.1269, simple_loss=0.2085, pruned_loss=0.02271, over 4856.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2146, pruned_loss=0.03454, over 973245.03 frames.], batch size: 20, lr: 2.38e-04 +2022-05-06 11:04:07,601 INFO [train.py:715] (7/8) Epoch 9, batch 9300, loss[loss=0.1667, simple_loss=0.2294, pruned_loss=0.05202, over 4833.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2147, pruned_loss=0.03453, over 972712.32 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 11:04:46,770 INFO [train.py:715] (7/8) Epoch 9, batch 9350, loss[loss=0.1503, simple_loss=0.2218, pruned_loss=0.03944, over 4785.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2138, pruned_loss=0.03437, over 972064.98 frames.], batch size: 17, lr: 2.38e-04 +2022-05-06 11:05:25,233 INFO [train.py:715] (7/8) Epoch 9, batch 9400, loss[loss=0.1647, simple_loss=0.2391, pruned_loss=0.04509, over 4986.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.0341, over 971895.43 frames.], batch size: 28, lr: 2.38e-04 +2022-05-06 11:06:05,138 INFO [train.py:715] (7/8) Epoch 9, batch 9450, loss[loss=0.1323, simple_loss=0.2009, pruned_loss=0.03185, over 4825.00 frames.], tot_loss[loss=0.1403, simple_loss=0.213, pruned_loss=0.0338, over 972792.89 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 11:06:44,281 INFO [train.py:715] (7/8) Epoch 9, batch 9500, loss[loss=0.1307, simple_loss=0.2061, pruned_loss=0.02767, over 4951.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2126, pruned_loss=0.03359, over 972342.72 frames.], batch size: 21, lr: 2.38e-04 +2022-05-06 11:07:22,933 INFO [train.py:715] (7/8) Epoch 9, batch 9550, loss[loss=0.1722, simple_loss=0.2519, pruned_loss=0.0463, over 4691.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2121, pruned_loss=0.03359, over 972644.96 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 11:08:02,132 INFO [train.py:715] (7/8) Epoch 9, batch 9600, loss[loss=0.1491, simple_loss=0.2299, pruned_loss=0.03415, over 4961.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2129, pruned_loss=0.03427, over 973221.27 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 11:08:41,399 INFO [train.py:715] (7/8) Epoch 9, batch 9650, loss[loss=0.1397, simple_loss=0.2143, pruned_loss=0.03257, over 4916.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2127, pruned_loss=0.034, over 972958.47 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:09:20,428 INFO [train.py:715] (7/8) Epoch 9, batch 9700, loss[loss=0.1423, simple_loss=0.2193, pruned_loss=0.03265, over 4870.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2131, pruned_loss=0.03414, over 972379.20 frames.], batch size: 16, lr: 2.38e-04 +2022-05-06 11:09:58,456 INFO [train.py:715] (7/8) Epoch 9, batch 9750, loss[loss=0.1548, simple_loss=0.2353, pruned_loss=0.03712, over 4752.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2134, pruned_loss=0.03409, over 972105.60 frames.], batch size: 19, lr: 2.38e-04 +2022-05-06 11:10:38,592 INFO [train.py:715] (7/8) Epoch 9, batch 9800, loss[loss=0.134, simple_loss=0.2123, pruned_loss=0.02787, over 4902.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.0337, over 971955.34 frames.], batch size: 19, lr: 2.38e-04 +2022-05-06 11:11:18,280 INFO [train.py:715] (7/8) Epoch 9, batch 9850, loss[loss=0.1253, simple_loss=0.2019, pruned_loss=0.02435, over 4931.00 frames.], tot_loss[loss=0.1403, simple_loss=0.213, pruned_loss=0.03377, over 971942.92 frames.], batch size: 35, lr: 2.38e-04 +2022-05-06 11:11:56,609 INFO [train.py:715] (7/8) Epoch 9, batch 9900, loss[loss=0.1266, simple_loss=0.2035, pruned_loss=0.0249, over 4930.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2122, pruned_loss=0.03348, over 972704.07 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:12:35,818 INFO [train.py:715] (7/8) Epoch 9, batch 9950, loss[loss=0.1627, simple_loss=0.2308, pruned_loss=0.04735, over 4835.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2125, pruned_loss=0.03372, over 973068.05 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 11:13:15,756 INFO [train.py:715] (7/8) Epoch 9, batch 10000, loss[loss=0.1357, simple_loss=0.2134, pruned_loss=0.02903, over 4915.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2129, pruned_loss=0.03351, over 972723.96 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:13:55,093 INFO [train.py:715] (7/8) Epoch 9, batch 10050, loss[loss=0.119, simple_loss=0.1964, pruned_loss=0.02082, over 4816.00 frames.], tot_loss[loss=0.1399, simple_loss=0.213, pruned_loss=0.03343, over 972780.48 frames.], batch size: 25, lr: 2.38e-04 +2022-05-06 11:14:33,376 INFO [train.py:715] (7/8) Epoch 9, batch 10100, loss[loss=0.1307, simple_loss=0.209, pruned_loss=0.02619, over 4811.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03397, over 972782.10 frames.], batch size: 27, lr: 2.38e-04 +2022-05-06 11:15:12,912 INFO [train.py:715] (7/8) Epoch 9, batch 10150, loss[loss=0.1772, simple_loss=0.2467, pruned_loss=0.05387, over 4925.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03392, over 973033.59 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:15:52,571 INFO [train.py:715] (7/8) Epoch 9, batch 10200, loss[loss=0.1502, simple_loss=0.2172, pruned_loss=0.04162, over 4940.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2135, pruned_loss=0.03399, over 972752.96 frames.], batch size: 15, lr: 2.38e-04 +2022-05-06 11:16:31,362 INFO [train.py:715] (7/8) Epoch 9, batch 10250, loss[loss=0.1593, simple_loss=0.2431, pruned_loss=0.03775, over 4925.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2134, pruned_loss=0.03385, over 972248.83 frames.], batch size: 18, lr: 2.38e-04 +2022-05-06 11:17:10,103 INFO [train.py:715] (7/8) Epoch 9, batch 10300, loss[loss=0.1594, simple_loss=0.2257, pruned_loss=0.04657, over 4907.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2135, pruned_loss=0.03438, over 972167.54 frames.], batch size: 17, lr: 2.38e-04 +2022-05-06 11:17:49,725 INFO [train.py:715] (7/8) Epoch 9, batch 10350, loss[loss=0.1575, simple_loss=0.2217, pruned_loss=0.04668, over 4971.00 frames.], tot_loss[loss=0.141, simple_loss=0.2132, pruned_loss=0.03434, over 971813.24 frames.], batch size: 14, lr: 2.38e-04 +2022-05-06 11:18:28,420 INFO [train.py:715] (7/8) Epoch 9, batch 10400, loss[loss=0.139, simple_loss=0.2107, pruned_loss=0.03364, over 4753.00 frames.], tot_loss[loss=0.142, simple_loss=0.214, pruned_loss=0.03501, over 972693.56 frames.], batch size: 19, lr: 2.38e-04 +2022-05-06 11:19:06,743 INFO [train.py:715] (7/8) Epoch 9, batch 10450, loss[loss=0.1457, simple_loss=0.2112, pruned_loss=0.04011, over 4921.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2145, pruned_loss=0.03501, over 972780.61 frames.], batch size: 29, lr: 2.38e-04 +2022-05-06 11:19:45,853 INFO [train.py:715] (7/8) Epoch 9, batch 10500, loss[loss=0.1279, simple_loss=0.2074, pruned_loss=0.02421, over 4775.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2139, pruned_loss=0.0342, over 973452.98 frames.], batch size: 14, lr: 2.38e-04 +2022-05-06 11:20:25,285 INFO [train.py:715] (7/8) Epoch 9, batch 10550, loss[loss=0.1385, simple_loss=0.2137, pruned_loss=0.03171, over 4867.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2148, pruned_loss=0.03441, over 972707.99 frames.], batch size: 16, lr: 2.38e-04 +2022-05-06 11:21:04,103 INFO [train.py:715] (7/8) Epoch 9, batch 10600, loss[loss=0.1349, simple_loss=0.2019, pruned_loss=0.03398, over 4946.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.03439, over 973160.64 frames.], batch size: 39, lr: 2.38e-04 +2022-05-06 11:21:42,612 INFO [train.py:715] (7/8) Epoch 9, batch 10650, loss[loss=0.1343, simple_loss=0.2199, pruned_loss=0.02435, over 4801.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.03441, over 972203.77 frames.], batch size: 12, lr: 2.38e-04 +2022-05-06 11:22:21,915 INFO [train.py:715] (7/8) Epoch 9, batch 10700, loss[loss=0.1393, simple_loss=0.2128, pruned_loss=0.0329, over 4852.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2149, pruned_loss=0.03466, over 972506.61 frames.], batch size: 32, lr: 2.37e-04 +2022-05-06 11:23:01,948 INFO [train.py:715] (7/8) Epoch 9, batch 10750, loss[loss=0.1656, simple_loss=0.2303, pruned_loss=0.0504, over 4836.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2154, pruned_loss=0.03455, over 971681.23 frames.], batch size: 15, lr: 2.37e-04 +2022-05-06 11:23:40,539 INFO [train.py:715] (7/8) Epoch 9, batch 10800, loss[loss=0.1185, simple_loss=0.1977, pruned_loss=0.01972, over 4817.00 frames.], tot_loss[loss=0.141, simple_loss=0.2142, pruned_loss=0.03391, over 972057.34 frames.], batch size: 25, lr: 2.37e-04 +2022-05-06 11:24:20,018 INFO [train.py:715] (7/8) Epoch 9, batch 10850, loss[loss=0.1337, simple_loss=0.2064, pruned_loss=0.03049, over 4858.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2141, pruned_loss=0.03436, over 972103.81 frames.], batch size: 16, lr: 2.37e-04 +2022-05-06 11:24:59,848 INFO [train.py:715] (7/8) Epoch 9, batch 10900, loss[loss=0.1326, simple_loss=0.2058, pruned_loss=0.02976, over 4811.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2142, pruned_loss=0.03475, over 972026.69 frames.], batch size: 12, lr: 2.37e-04 +2022-05-06 11:25:40,139 INFO [train.py:715] (7/8) Epoch 9, batch 10950, loss[loss=0.1782, simple_loss=0.2489, pruned_loss=0.05377, over 4858.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03396, over 972477.01 frames.], batch size: 32, lr: 2.37e-04 +2022-05-06 11:26:20,016 INFO [train.py:715] (7/8) Epoch 9, batch 11000, loss[loss=0.1914, simple_loss=0.2553, pruned_loss=0.0637, over 4816.00 frames.], tot_loss[loss=0.141, simple_loss=0.214, pruned_loss=0.034, over 972240.80 frames.], batch size: 26, lr: 2.37e-04 +2022-05-06 11:27:00,852 INFO [train.py:715] (7/8) Epoch 9, batch 11050, loss[loss=0.1294, simple_loss=0.1972, pruned_loss=0.03078, over 4815.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2135, pruned_loss=0.03408, over 971590.01 frames.], batch size: 15, lr: 2.37e-04 +2022-05-06 11:27:42,117 INFO [train.py:715] (7/8) Epoch 9, batch 11100, loss[loss=0.1148, simple_loss=0.194, pruned_loss=0.01785, over 4895.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03482, over 971635.00 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:28:22,783 INFO [train.py:715] (7/8) Epoch 9, batch 11150, loss[loss=0.1507, simple_loss=0.2172, pruned_loss=0.04207, over 4903.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2142, pruned_loss=0.03454, over 972566.53 frames.], batch size: 19, lr: 2.37e-04 +2022-05-06 11:29:03,603 INFO [train.py:715] (7/8) Epoch 9, batch 11200, loss[loss=0.1266, simple_loss=0.2016, pruned_loss=0.0258, over 4778.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.0345, over 971815.85 frames.], batch size: 14, lr: 2.37e-04 +2022-05-06 11:29:45,097 INFO [train.py:715] (7/8) Epoch 9, batch 11250, loss[loss=0.1388, simple_loss=0.22, pruned_loss=0.0288, over 4845.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2145, pruned_loss=0.03451, over 972621.37 frames.], batch size: 13, lr: 2.37e-04 +2022-05-06 11:30:26,202 INFO [train.py:715] (7/8) Epoch 9, batch 11300, loss[loss=0.1378, simple_loss=0.2169, pruned_loss=0.02933, over 4975.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2147, pruned_loss=0.03504, over 973050.83 frames.], batch size: 28, lr: 2.37e-04 +2022-05-06 11:31:06,651 INFO [train.py:715] (7/8) Epoch 9, batch 11350, loss[loss=0.1289, simple_loss=0.1981, pruned_loss=0.02987, over 4813.00 frames.], tot_loss[loss=0.1419, simple_loss=0.214, pruned_loss=0.03487, over 972224.30 frames.], batch size: 12, lr: 2.37e-04 +2022-05-06 11:31:47,931 INFO [train.py:715] (7/8) Epoch 9, batch 11400, loss[loss=0.134, simple_loss=0.1998, pruned_loss=0.03409, over 4869.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2137, pruned_loss=0.03476, over 972130.52 frames.], batch size: 32, lr: 2.37e-04 +2022-05-06 11:32:29,506 INFO [train.py:715] (7/8) Epoch 9, batch 11450, loss[loss=0.1288, simple_loss=0.2036, pruned_loss=0.02698, over 4853.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2139, pruned_loss=0.03477, over 971625.40 frames.], batch size: 20, lr: 2.37e-04 +2022-05-06 11:33:10,079 INFO [train.py:715] (7/8) Epoch 9, batch 11500, loss[loss=0.1193, simple_loss=0.1944, pruned_loss=0.02213, over 4842.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2135, pruned_loss=0.03462, over 971968.78 frames.], batch size: 13, lr: 2.37e-04 +2022-05-06 11:33:50,771 INFO [train.py:715] (7/8) Epoch 9, batch 11550, loss[loss=0.1439, simple_loss=0.2089, pruned_loss=0.03945, over 4852.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2129, pruned_loss=0.0342, over 972836.26 frames.], batch size: 20, lr: 2.37e-04 +2022-05-06 11:34:32,086 INFO [train.py:715] (7/8) Epoch 9, batch 11600, loss[loss=0.1347, simple_loss=0.2059, pruned_loss=0.03169, over 4809.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2133, pruned_loss=0.03443, over 972338.50 frames.], batch size: 25, lr: 2.37e-04 +2022-05-06 11:35:13,605 INFO [train.py:715] (7/8) Epoch 9, batch 11650, loss[loss=0.1173, simple_loss=0.1984, pruned_loss=0.01808, over 4910.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2136, pruned_loss=0.0343, over 972475.09 frames.], batch size: 29, lr: 2.37e-04 +2022-05-06 11:35:53,528 INFO [train.py:715] (7/8) Epoch 9, batch 11700, loss[loss=0.1517, simple_loss=0.2341, pruned_loss=0.03468, over 4842.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2131, pruned_loss=0.03402, over 971485.26 frames.], batch size: 15, lr: 2.37e-04 +2022-05-06 11:36:34,968 INFO [train.py:715] (7/8) Epoch 9, batch 11750, loss[loss=0.1151, simple_loss=0.1889, pruned_loss=0.02068, over 4771.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2125, pruned_loss=0.03406, over 971690.97 frames.], batch size: 18, lr: 2.37e-04 +2022-05-06 11:37:16,471 INFO [train.py:715] (7/8) Epoch 9, batch 11800, loss[loss=0.1619, simple_loss=0.2335, pruned_loss=0.0452, over 4941.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2125, pruned_loss=0.03392, over 972396.53 frames.], batch size: 21, lr: 2.37e-04 +2022-05-06 11:37:56,814 INFO [train.py:715] (7/8) Epoch 9, batch 11850, loss[loss=0.149, simple_loss=0.2209, pruned_loss=0.03859, over 4841.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2122, pruned_loss=0.03365, over 972458.53 frames.], batch size: 13, lr: 2.37e-04 +2022-05-06 11:38:37,236 INFO [train.py:715] (7/8) Epoch 9, batch 11900, loss[loss=0.1744, simple_loss=0.2426, pruned_loss=0.05305, over 4808.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2121, pruned_loss=0.03331, over 972532.57 frames.], batch size: 25, lr: 2.37e-04 +2022-05-06 11:39:18,261 INFO [train.py:715] (7/8) Epoch 9, batch 11950, loss[loss=0.1138, simple_loss=0.1941, pruned_loss=0.01672, over 4841.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.03416, over 972743.13 frames.], batch size: 15, lr: 2.37e-04 +2022-05-06 11:39:59,371 INFO [train.py:715] (7/8) Epoch 9, batch 12000, loss[loss=0.1306, simple_loss=0.2054, pruned_loss=0.02791, over 4821.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2143, pruned_loss=0.03462, over 972662.17 frames.], batch size: 26, lr: 2.37e-04 +2022-05-06 11:39:59,372 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 11:40:09,082 INFO [train.py:742] (7/8) Epoch 9, validation: loss=0.107, simple_loss=0.1913, pruned_loss=0.01136, over 914524.00 frames. +2022-05-06 11:40:50,132 INFO [train.py:715] (7/8) Epoch 9, batch 12050, loss[loss=0.1378, simple_loss=0.201, pruned_loss=0.03727, over 4902.00 frames.], tot_loss[loss=0.1414, simple_loss=0.214, pruned_loss=0.0344, over 972465.41 frames.], batch size: 17, lr: 2.37e-04 +2022-05-06 11:41:29,624 INFO [train.py:715] (7/8) Epoch 9, batch 12100, loss[loss=0.1464, simple_loss=0.2181, pruned_loss=0.03739, over 4817.00 frames.], tot_loss[loss=0.141, simple_loss=0.2136, pruned_loss=0.0342, over 972280.52 frames.], batch size: 15, lr: 2.37e-04 +2022-05-06 11:42:10,010 INFO [train.py:715] (7/8) Epoch 9, batch 12150, loss[loss=0.1325, simple_loss=0.1932, pruned_loss=0.03592, over 4836.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2141, pruned_loss=0.03456, over 972157.11 frames.], batch size: 13, lr: 2.37e-04 +2022-05-06 11:42:50,009 INFO [train.py:715] (7/8) Epoch 9, batch 12200, loss[loss=0.1232, simple_loss=0.1961, pruned_loss=0.02512, over 4902.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2138, pruned_loss=0.03436, over 972816.68 frames.], batch size: 17, lr: 2.37e-04 +2022-05-06 11:43:29,260 INFO [train.py:715] (7/8) Epoch 9, batch 12250, loss[loss=0.1602, simple_loss=0.2342, pruned_loss=0.04309, over 4881.00 frames.], tot_loss[loss=0.142, simple_loss=0.2145, pruned_loss=0.03471, over 973208.64 frames.], batch size: 32, lr: 2.37e-04 +2022-05-06 11:44:08,224 INFO [train.py:715] (7/8) Epoch 9, batch 12300, loss[loss=0.1261, simple_loss=0.1966, pruned_loss=0.02785, over 4928.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2148, pruned_loss=0.03495, over 973655.20 frames.], batch size: 18, lr: 2.37e-04 +2022-05-06 11:44:47,994 INFO [train.py:715] (7/8) Epoch 9, batch 12350, loss[loss=0.1416, simple_loss=0.2116, pruned_loss=0.03584, over 4782.00 frames.], tot_loss[loss=0.142, simple_loss=0.2148, pruned_loss=0.03457, over 973427.84 frames.], batch size: 18, lr: 2.37e-04 +2022-05-06 11:45:28,033 INFO [train.py:715] (7/8) Epoch 9, batch 12400, loss[loss=0.1249, simple_loss=0.2046, pruned_loss=0.02264, over 4983.00 frames.], tot_loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.0343, over 973425.18 frames.], batch size: 25, lr: 2.37e-04 +2022-05-06 11:46:07,546 INFO [train.py:715] (7/8) Epoch 9, batch 12450, loss[loss=0.1416, simple_loss=0.217, pruned_loss=0.03311, over 4796.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2152, pruned_loss=0.03493, over 973050.87 frames.], batch size: 24, lr: 2.37e-04 +2022-05-06 11:46:47,600 INFO [train.py:715] (7/8) Epoch 9, batch 12500, loss[loss=0.1251, simple_loss=0.2051, pruned_loss=0.02257, over 4798.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2161, pruned_loss=0.03544, over 973201.82 frames.], batch size: 24, lr: 2.37e-04 +2022-05-06 11:47:27,729 INFO [train.py:715] (7/8) Epoch 9, batch 12550, loss[loss=0.159, simple_loss=0.2357, pruned_loss=0.04116, over 4966.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2154, pruned_loss=0.03512, over 973174.34 frames.], batch size: 39, lr: 2.37e-04 +2022-05-06 11:48:07,692 INFO [train.py:715] (7/8) Epoch 9, batch 12600, loss[loss=0.1404, simple_loss=0.222, pruned_loss=0.02938, over 4962.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2152, pruned_loss=0.03511, over 973310.85 frames.], batch size: 21, lr: 2.37e-04 +2022-05-06 11:48:46,462 INFO [train.py:715] (7/8) Epoch 9, batch 12650, loss[loss=0.1444, simple_loss=0.2213, pruned_loss=0.03376, over 4872.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2149, pruned_loss=0.03526, over 972706.71 frames.], batch size: 16, lr: 2.37e-04 +2022-05-06 11:49:26,598 INFO [train.py:715] (7/8) Epoch 9, batch 12700, loss[loss=0.1204, simple_loss=0.1978, pruned_loss=0.02154, over 4941.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2144, pruned_loss=0.03524, over 971916.94 frames.], batch size: 29, lr: 2.37e-04 +2022-05-06 11:50:06,591 INFO [train.py:715] (7/8) Epoch 9, batch 12750, loss[loss=0.1249, simple_loss=0.1902, pruned_loss=0.02976, over 4703.00 frames.], tot_loss[loss=0.141, simple_loss=0.213, pruned_loss=0.03455, over 972347.27 frames.], batch size: 15, lr: 2.37e-04 +2022-05-06 11:50:45,760 INFO [train.py:715] (7/8) Epoch 9, batch 12800, loss[loss=0.1298, simple_loss=0.2077, pruned_loss=0.02594, over 4839.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2128, pruned_loss=0.03421, over 972569.66 frames.], batch size: 26, lr: 2.37e-04 +2022-05-06 11:51:25,605 INFO [train.py:715] (7/8) Epoch 9, batch 12850, loss[loss=0.1414, simple_loss=0.2231, pruned_loss=0.02989, over 4864.00 frames.], tot_loss[loss=0.1405, simple_loss=0.213, pruned_loss=0.03405, over 973055.98 frames.], batch size: 20, lr: 2.37e-04 +2022-05-06 11:52:05,497 INFO [train.py:715] (7/8) Epoch 9, batch 12900, loss[loss=0.1488, simple_loss=0.2202, pruned_loss=0.0387, over 4982.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2133, pruned_loss=0.03429, over 972430.80 frames.], batch size: 25, lr: 2.37e-04 +2022-05-06 11:52:45,475 INFO [train.py:715] (7/8) Epoch 9, batch 12950, loss[loss=0.1247, simple_loss=0.1949, pruned_loss=0.02724, over 4751.00 frames.], tot_loss[loss=0.1408, simple_loss=0.213, pruned_loss=0.0343, over 971307.76 frames.], batch size: 12, lr: 2.37e-04 +2022-05-06 11:53:24,504 INFO [train.py:715] (7/8) Epoch 9, batch 13000, loss[loss=0.1201, simple_loss=0.1989, pruned_loss=0.02059, over 4886.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2132, pruned_loss=0.03398, over 971447.46 frames.], batch size: 22, lr: 2.37e-04 +2022-05-06 11:54:04,857 INFO [train.py:715] (7/8) Epoch 9, batch 13050, loss[loss=0.1208, simple_loss=0.1935, pruned_loss=0.0241, over 4787.00 frames.], tot_loss[loss=0.141, simple_loss=0.2136, pruned_loss=0.03416, over 971490.74 frames.], batch size: 14, lr: 2.37e-04 +2022-05-06 11:54:44,623 INFO [train.py:715] (7/8) Epoch 9, batch 13100, loss[loss=0.1392, simple_loss=0.2215, pruned_loss=0.02847, over 4812.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2133, pruned_loss=0.03445, over 971592.90 frames.], batch size: 27, lr: 2.37e-04 +2022-05-06 11:55:23,868 INFO [train.py:715] (7/8) Epoch 9, batch 13150, loss[loss=0.1641, simple_loss=0.2494, pruned_loss=0.03938, over 4744.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2134, pruned_loss=0.03439, over 972025.21 frames.], batch size: 16, lr: 2.37e-04 +2022-05-06 11:56:03,853 INFO [train.py:715] (7/8) Epoch 9, batch 13200, loss[loss=0.1358, simple_loss=0.1971, pruned_loss=0.03719, over 4848.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2133, pruned_loss=0.03454, over 971703.31 frames.], batch size: 30, lr: 2.37e-04 +2022-05-06 11:56:44,168 INFO [train.py:715] (7/8) Epoch 9, batch 13250, loss[loss=0.1247, simple_loss=0.203, pruned_loss=0.02318, over 4956.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2133, pruned_loss=0.03426, over 972199.28 frames.], batch size: 21, lr: 2.37e-04 +2022-05-06 11:57:23,741 INFO [train.py:715] (7/8) Epoch 9, batch 13300, loss[loss=0.1474, simple_loss=0.221, pruned_loss=0.03692, over 4971.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2136, pruned_loss=0.03474, over 972242.38 frames.], batch size: 39, lr: 2.37e-04 +2022-05-06 11:58:03,448 INFO [train.py:715] (7/8) Epoch 9, batch 13350, loss[loss=0.2137, simple_loss=0.2781, pruned_loss=0.07465, over 4844.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2144, pruned_loss=0.03523, over 972940.23 frames.], batch size: 32, lr: 2.37e-04 +2022-05-06 11:58:43,524 INFO [train.py:715] (7/8) Epoch 9, batch 13400, loss[loss=0.1434, simple_loss=0.2336, pruned_loss=0.0266, over 4774.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2138, pruned_loss=0.03473, over 971834.52 frames.], batch size: 17, lr: 2.37e-04 +2022-05-06 11:59:23,794 INFO [train.py:715] (7/8) Epoch 9, batch 13450, loss[loss=0.1654, simple_loss=0.2355, pruned_loss=0.0476, over 4827.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2136, pruned_loss=0.03481, over 971869.64 frames.], batch size: 15, lr: 2.36e-04 +2022-05-06 12:00:02,969 INFO [train.py:715] (7/8) Epoch 9, batch 13500, loss[loss=0.1753, simple_loss=0.2344, pruned_loss=0.05805, over 4638.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2136, pruned_loss=0.03461, over 972156.45 frames.], batch size: 13, lr: 2.36e-04 +2022-05-06 12:00:42,984 INFO [train.py:715] (7/8) Epoch 9, batch 13550, loss[loss=0.1521, simple_loss=0.2237, pruned_loss=0.0402, over 4892.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2149, pruned_loss=0.03506, over 972621.41 frames.], batch size: 19, lr: 2.36e-04 +2022-05-06 12:01:22,500 INFO [train.py:715] (7/8) Epoch 9, batch 13600, loss[loss=0.1415, simple_loss=0.2105, pruned_loss=0.03624, over 4949.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2148, pruned_loss=0.0347, over 972916.33 frames.], batch size: 21, lr: 2.36e-04 +2022-05-06 12:02:01,622 INFO [train.py:715] (7/8) Epoch 9, batch 13650, loss[loss=0.1366, simple_loss=0.2078, pruned_loss=0.03269, over 4933.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2142, pruned_loss=0.03452, over 972066.99 frames.], batch size: 23, lr: 2.36e-04 +2022-05-06 12:02:40,855 INFO [train.py:715] (7/8) Epoch 9, batch 13700, loss[loss=0.1438, simple_loss=0.211, pruned_loss=0.0383, over 4773.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2139, pruned_loss=0.03439, over 971627.86 frames.], batch size: 14, lr: 2.36e-04 +2022-05-06 12:03:20,734 INFO [train.py:715] (7/8) Epoch 9, batch 13750, loss[loss=0.1101, simple_loss=0.1835, pruned_loss=0.01834, over 4964.00 frames.], tot_loss[loss=0.1407, simple_loss=0.213, pruned_loss=0.03423, over 971342.75 frames.], batch size: 35, lr: 2.36e-04 +2022-05-06 12:03:59,885 INFO [train.py:715] (7/8) Epoch 9, batch 13800, loss[loss=0.192, simple_loss=0.2392, pruned_loss=0.0724, over 4901.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2142, pruned_loss=0.03484, over 971557.04 frames.], batch size: 17, lr: 2.36e-04 +2022-05-06 12:04:38,385 INFO [train.py:715] (7/8) Epoch 9, batch 13850, loss[loss=0.1323, simple_loss=0.2028, pruned_loss=0.03086, over 4832.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2151, pruned_loss=0.03484, over 972602.11 frames.], batch size: 13, lr: 2.36e-04 +2022-05-06 12:05:17,814 INFO [train.py:715] (7/8) Epoch 9, batch 13900, loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.03431, over 4861.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2147, pruned_loss=0.03471, over 972066.49 frames.], batch size: 20, lr: 2.36e-04 +2022-05-06 12:05:57,960 INFO [train.py:715] (7/8) Epoch 9, batch 13950, loss[loss=0.1279, simple_loss=0.2004, pruned_loss=0.02775, over 4815.00 frames.], tot_loss[loss=0.141, simple_loss=0.2136, pruned_loss=0.03418, over 972369.31 frames.], batch size: 26, lr: 2.36e-04 +2022-05-06 12:06:36,919 INFO [train.py:715] (7/8) Epoch 9, batch 14000, loss[loss=0.1345, simple_loss=0.2066, pruned_loss=0.03124, over 4867.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2143, pruned_loss=0.03475, over 972500.41 frames.], batch size: 32, lr: 2.36e-04 +2022-05-06 12:07:16,028 INFO [train.py:715] (7/8) Epoch 9, batch 14050, loss[loss=0.1426, simple_loss=0.2239, pruned_loss=0.03061, over 4749.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2142, pruned_loss=0.03442, over 972305.19 frames.], batch size: 19, lr: 2.36e-04 +2022-05-06 12:07:55,565 INFO [train.py:715] (7/8) Epoch 9, batch 14100, loss[loss=0.1556, simple_loss=0.2232, pruned_loss=0.04397, over 4953.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2141, pruned_loss=0.03448, over 971258.69 frames.], batch size: 21, lr: 2.36e-04 +2022-05-06 12:08:35,131 INFO [train.py:715] (7/8) Epoch 9, batch 14150, loss[loss=0.1299, simple_loss=0.184, pruned_loss=0.03794, over 4809.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.03415, over 971614.89 frames.], batch size: 12, lr: 2.36e-04 +2022-05-06 12:09:14,479 INFO [train.py:715] (7/8) Epoch 9, batch 14200, loss[loss=0.1574, simple_loss=0.2145, pruned_loss=0.05012, over 4850.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2143, pruned_loss=0.03442, over 971704.29 frames.], batch size: 32, lr: 2.36e-04 +2022-05-06 12:09:53,803 INFO [train.py:715] (7/8) Epoch 9, batch 14250, loss[loss=0.1436, simple_loss=0.2276, pruned_loss=0.02976, over 4787.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2144, pruned_loss=0.0345, over 971575.54 frames.], batch size: 18, lr: 2.36e-04 +2022-05-06 12:10:33,296 INFO [train.py:715] (7/8) Epoch 9, batch 14300, loss[loss=0.141, simple_loss=0.2159, pruned_loss=0.033, over 4689.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2152, pruned_loss=0.03478, over 970836.89 frames.], batch size: 15, lr: 2.36e-04 +2022-05-06 12:11:11,973 INFO [train.py:715] (7/8) Epoch 9, batch 14350, loss[loss=0.1375, simple_loss=0.2083, pruned_loss=0.0334, over 4807.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2154, pruned_loss=0.03491, over 971781.08 frames.], batch size: 15, lr: 2.36e-04 +2022-05-06 12:11:50,597 INFO [train.py:715] (7/8) Epoch 9, batch 14400, loss[loss=0.1124, simple_loss=0.1872, pruned_loss=0.01878, over 4751.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2155, pruned_loss=0.03502, over 971857.24 frames.], batch size: 19, lr: 2.36e-04 +2022-05-06 12:12:30,357 INFO [train.py:715] (7/8) Epoch 9, batch 14450, loss[loss=0.1486, simple_loss=0.2237, pruned_loss=0.03669, over 4822.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2156, pruned_loss=0.03473, over 972857.50 frames.], batch size: 26, lr: 2.36e-04 +2022-05-06 12:13:09,693 INFO [train.py:715] (7/8) Epoch 9, batch 14500, loss[loss=0.1404, simple_loss=0.2106, pruned_loss=0.03509, over 4951.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2143, pruned_loss=0.03477, over 972260.01 frames.], batch size: 14, lr: 2.36e-04 +2022-05-06 12:13:48,634 INFO [train.py:715] (7/8) Epoch 9, batch 14550, loss[loss=0.1654, simple_loss=0.2476, pruned_loss=0.04157, over 4854.00 frames.], tot_loss[loss=0.1429, simple_loss=0.215, pruned_loss=0.03545, over 972947.97 frames.], batch size: 20, lr: 2.36e-04 +2022-05-06 12:14:27,684 INFO [train.py:715] (7/8) Epoch 9, batch 14600, loss[loss=0.1179, simple_loss=0.1927, pruned_loss=0.02157, over 4979.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2155, pruned_loss=0.03544, over 973233.61 frames.], batch size: 35, lr: 2.36e-04 +2022-05-06 12:15:07,386 INFO [train.py:715] (7/8) Epoch 9, batch 14650, loss[loss=0.1501, simple_loss=0.2296, pruned_loss=0.03532, over 4784.00 frames.], tot_loss[loss=0.143, simple_loss=0.2149, pruned_loss=0.03557, over 972202.11 frames.], batch size: 14, lr: 2.36e-04 +2022-05-06 12:15:45,919 INFO [train.py:715] (7/8) Epoch 9, batch 14700, loss[loss=0.1126, simple_loss=0.1876, pruned_loss=0.01877, over 4793.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2145, pruned_loss=0.03542, over 973090.07 frames.], batch size: 12, lr: 2.36e-04 +2022-05-06 12:16:27,519 INFO [train.py:715] (7/8) Epoch 9, batch 14750, loss[loss=0.1228, simple_loss=0.1963, pruned_loss=0.02461, over 4875.00 frames.], tot_loss[loss=0.1425, simple_loss=0.214, pruned_loss=0.03553, over 972937.50 frames.], batch size: 20, lr: 2.36e-04 +2022-05-06 12:17:06,572 INFO [train.py:715] (7/8) Epoch 9, batch 14800, loss[loss=0.1397, simple_loss=0.2154, pruned_loss=0.032, over 4838.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2138, pruned_loss=0.0348, over 973104.77 frames.], batch size: 27, lr: 2.36e-04 +2022-05-06 12:17:45,497 INFO [train.py:715] (7/8) Epoch 9, batch 14850, loss[loss=0.1535, simple_loss=0.2205, pruned_loss=0.04327, over 4833.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2135, pruned_loss=0.03486, over 971954.57 frames.], batch size: 30, lr: 2.36e-04 +2022-05-06 12:18:24,547 INFO [train.py:715] (7/8) Epoch 9, batch 14900, loss[loss=0.1529, simple_loss=0.2347, pruned_loss=0.0355, over 4894.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2147, pruned_loss=0.03522, over 972049.91 frames.], batch size: 17, lr: 2.36e-04 +2022-05-06 12:19:03,081 INFO [train.py:715] (7/8) Epoch 9, batch 14950, loss[loss=0.1745, simple_loss=0.2441, pruned_loss=0.05245, over 4790.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2151, pruned_loss=0.0353, over 972073.16 frames.], batch size: 21, lr: 2.36e-04 +2022-05-06 12:19:42,680 INFO [train.py:715] (7/8) Epoch 9, batch 15000, loss[loss=0.1936, simple_loss=0.2797, pruned_loss=0.05374, over 4745.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2153, pruned_loss=0.0354, over 970866.20 frames.], batch size: 16, lr: 2.36e-04 +2022-05-06 12:19:42,680 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 12:19:52,343 INFO [train.py:742] (7/8) Epoch 9, validation: loss=0.1071, simple_loss=0.1915, pruned_loss=0.01139, over 914524.00 frames. +2022-05-06 12:20:32,095 INFO [train.py:715] (7/8) Epoch 9, batch 15050, loss[loss=0.1313, simple_loss=0.2043, pruned_loss=0.02911, over 4985.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2145, pruned_loss=0.03528, over 971914.68 frames.], batch size: 35, lr: 2.36e-04 +2022-05-06 12:21:11,100 INFO [train.py:715] (7/8) Epoch 9, batch 15100, loss[loss=0.1435, simple_loss=0.2243, pruned_loss=0.03131, over 4930.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2144, pruned_loss=0.03518, over 972239.33 frames.], batch size: 39, lr: 2.36e-04 +2022-05-06 12:21:50,198 INFO [train.py:715] (7/8) Epoch 9, batch 15150, loss[loss=0.1117, simple_loss=0.1914, pruned_loss=0.01597, over 4983.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2139, pruned_loss=0.03479, over 972738.41 frames.], batch size: 31, lr: 2.36e-04 +2022-05-06 12:22:30,010 INFO [train.py:715] (7/8) Epoch 9, batch 15200, loss[loss=0.1331, simple_loss=0.2061, pruned_loss=0.03005, over 4834.00 frames.], tot_loss[loss=0.142, simple_loss=0.214, pruned_loss=0.03503, over 973084.26 frames.], batch size: 26, lr: 2.36e-04 +2022-05-06 12:23:09,323 INFO [train.py:715] (7/8) Epoch 9, batch 15250, loss[loss=0.14, simple_loss=0.2138, pruned_loss=0.03314, over 4820.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2144, pruned_loss=0.03506, over 972559.95 frames.], batch size: 26, lr: 2.36e-04 +2022-05-06 12:23:48,032 INFO [train.py:715] (7/8) Epoch 9, batch 15300, loss[loss=0.141, simple_loss=0.2207, pruned_loss=0.03061, over 4849.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2146, pruned_loss=0.03492, over 971687.00 frames.], batch size: 20, lr: 2.36e-04 +2022-05-06 12:24:27,149 INFO [train.py:715] (7/8) Epoch 9, batch 15350, loss[loss=0.1128, simple_loss=0.1914, pruned_loss=0.01709, over 4809.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2154, pruned_loss=0.03478, over 972460.46 frames.], batch size: 12, lr: 2.36e-04 +2022-05-06 12:25:06,186 INFO [train.py:715] (7/8) Epoch 9, batch 15400, loss[loss=0.1251, simple_loss=0.2033, pruned_loss=0.02348, over 4860.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.03442, over 972109.32 frames.], batch size: 20, lr: 2.36e-04 +2022-05-06 12:25:44,959 INFO [train.py:715] (7/8) Epoch 9, batch 15450, loss[loss=0.1746, simple_loss=0.2377, pruned_loss=0.05571, over 4848.00 frames.], tot_loss[loss=0.1425, simple_loss=0.215, pruned_loss=0.03498, over 972551.81 frames.], batch size: 15, lr: 2.36e-04 +2022-05-06 12:26:23,387 INFO [train.py:715] (7/8) Epoch 9, batch 15500, loss[loss=0.1446, simple_loss=0.2152, pruned_loss=0.03701, over 4982.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2155, pruned_loss=0.03494, over 973759.64 frames.], batch size: 15, lr: 2.36e-04 +2022-05-06 12:27:03,116 INFO [train.py:715] (7/8) Epoch 9, batch 15550, loss[loss=0.1427, simple_loss=0.2165, pruned_loss=0.03442, over 4857.00 frames.], tot_loss[loss=0.143, simple_loss=0.2157, pruned_loss=0.03511, over 973791.18 frames.], batch size: 20, lr: 2.36e-04 +2022-05-06 12:27:41,877 INFO [train.py:715] (7/8) Epoch 9, batch 15600, loss[loss=0.1354, simple_loss=0.1999, pruned_loss=0.0354, over 4884.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2158, pruned_loss=0.03503, over 973589.43 frames.], batch size: 16, lr: 2.36e-04 +2022-05-06 12:28:20,224 INFO [train.py:715] (7/8) Epoch 9, batch 15650, loss[loss=0.1372, simple_loss=0.2084, pruned_loss=0.03301, over 4898.00 frames.], tot_loss[loss=0.1435, simple_loss=0.216, pruned_loss=0.03555, over 973182.14 frames.], batch size: 19, lr: 2.36e-04 +2022-05-06 12:28:59,318 INFO [train.py:715] (7/8) Epoch 9, batch 15700, loss[loss=0.1265, simple_loss=0.202, pruned_loss=0.02546, over 4961.00 frames.], tot_loss[loss=0.1439, simple_loss=0.2163, pruned_loss=0.03574, over 972198.81 frames.], batch size: 24, lr: 2.36e-04 +2022-05-06 12:29:39,072 INFO [train.py:715] (7/8) Epoch 9, batch 15750, loss[loss=0.1497, simple_loss=0.2205, pruned_loss=0.03943, over 4834.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2156, pruned_loss=0.03528, over 972170.15 frames.], batch size: 13, lr: 2.36e-04 +2022-05-06 12:30:17,866 INFO [train.py:715] (7/8) Epoch 9, batch 15800, loss[loss=0.135, simple_loss=0.2066, pruned_loss=0.03169, over 4782.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2157, pruned_loss=0.0353, over 972821.42 frames.], batch size: 18, lr: 2.36e-04 +2022-05-06 12:30:56,796 INFO [train.py:715] (7/8) Epoch 9, batch 15850, loss[loss=0.1496, simple_loss=0.2288, pruned_loss=0.03525, over 4973.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2152, pruned_loss=0.03489, over 972810.46 frames.], batch size: 15, lr: 2.36e-04 +2022-05-06 12:31:36,402 INFO [train.py:715] (7/8) Epoch 9, batch 15900, loss[loss=0.2021, simple_loss=0.2732, pruned_loss=0.06547, over 4903.00 frames.], tot_loss[loss=0.143, simple_loss=0.2153, pruned_loss=0.03532, over 972411.76 frames.], batch size: 17, lr: 2.36e-04 +2022-05-06 12:32:15,976 INFO [train.py:715] (7/8) Epoch 9, batch 15950, loss[loss=0.1771, simple_loss=0.2381, pruned_loss=0.05806, over 4850.00 frames.], tot_loss[loss=0.1427, simple_loss=0.215, pruned_loss=0.03524, over 972482.78 frames.], batch size: 30, lr: 2.36e-04 +2022-05-06 12:32:54,620 INFO [train.py:715] (7/8) Epoch 9, batch 16000, loss[loss=0.1253, simple_loss=0.1927, pruned_loss=0.02901, over 4807.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2154, pruned_loss=0.03536, over 972159.51 frames.], batch size: 12, lr: 2.36e-04 +2022-05-06 12:33:33,297 INFO [train.py:715] (7/8) Epoch 9, batch 16050, loss[loss=0.1413, simple_loss=0.2115, pruned_loss=0.03554, over 4979.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2155, pruned_loss=0.0348, over 972705.32 frames.], batch size: 28, lr: 2.36e-04 +2022-05-06 12:34:12,508 INFO [train.py:715] (7/8) Epoch 9, batch 16100, loss[loss=0.1061, simple_loss=0.1783, pruned_loss=0.017, over 4865.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2161, pruned_loss=0.03532, over 971619.57 frames.], batch size: 13, lr: 2.36e-04 +2022-05-06 12:34:51,592 INFO [train.py:715] (7/8) Epoch 9, batch 16150, loss[loss=0.1268, simple_loss=0.2015, pruned_loss=0.02601, over 4927.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2158, pruned_loss=0.03496, over 971741.72 frames.], batch size: 18, lr: 2.36e-04 +2022-05-06 12:35:30,768 INFO [train.py:715] (7/8) Epoch 9, batch 16200, loss[loss=0.1218, simple_loss=0.1971, pruned_loss=0.02325, over 4978.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2155, pruned_loss=0.03485, over 972385.43 frames.], batch size: 14, lr: 2.36e-04 +2022-05-06 12:36:10,110 INFO [train.py:715] (7/8) Epoch 9, batch 16250, loss[loss=0.1281, simple_loss=0.2083, pruned_loss=0.02392, over 4936.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2155, pruned_loss=0.03463, over 972847.29 frames.], batch size: 29, lr: 2.35e-04 +2022-05-06 12:36:49,787 INFO [train.py:715] (7/8) Epoch 9, batch 16300, loss[loss=0.1138, simple_loss=0.193, pruned_loss=0.01729, over 4905.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2149, pruned_loss=0.03433, over 972012.79 frames.], batch size: 17, lr: 2.35e-04 +2022-05-06 12:37:27,729 INFO [train.py:715] (7/8) Epoch 9, batch 16350, loss[loss=0.15, simple_loss=0.2328, pruned_loss=0.03356, over 4695.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2158, pruned_loss=0.03484, over 972459.36 frames.], batch size: 15, lr: 2.35e-04 +2022-05-06 12:38:07,161 INFO [train.py:715] (7/8) Epoch 9, batch 16400, loss[loss=0.1446, simple_loss=0.2146, pruned_loss=0.03726, over 4957.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2163, pruned_loss=0.03565, over 972251.62 frames.], batch size: 35, lr: 2.35e-04 +2022-05-06 12:38:47,054 INFO [train.py:715] (7/8) Epoch 9, batch 16450, loss[loss=0.1503, simple_loss=0.2359, pruned_loss=0.03231, over 4815.00 frames.], tot_loss[loss=0.1433, simple_loss=0.216, pruned_loss=0.03531, over 971816.55 frames.], batch size: 25, lr: 2.35e-04 +2022-05-06 12:39:25,804 INFO [train.py:715] (7/8) Epoch 9, batch 16500, loss[loss=0.1397, simple_loss=0.2142, pruned_loss=0.03261, over 4891.00 frames.], tot_loss[loss=0.1434, simple_loss=0.2161, pruned_loss=0.03537, over 972046.94 frames.], batch size: 22, lr: 2.35e-04 +2022-05-06 12:40:04,384 INFO [train.py:715] (7/8) Epoch 9, batch 16550, loss[loss=0.1127, simple_loss=0.1908, pruned_loss=0.01733, over 4859.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2156, pruned_loss=0.03542, over 971959.04 frames.], batch size: 20, lr: 2.35e-04 +2022-05-06 12:40:43,848 INFO [train.py:715] (7/8) Epoch 9, batch 16600, loss[loss=0.1365, simple_loss=0.2067, pruned_loss=0.03313, over 4816.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2148, pruned_loss=0.03508, over 971747.31 frames.], batch size: 15, lr: 2.35e-04 +2022-05-06 12:41:23,436 INFO [train.py:715] (7/8) Epoch 9, batch 16650, loss[loss=0.1314, simple_loss=0.2024, pruned_loss=0.03021, over 4829.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2154, pruned_loss=0.03545, over 972400.62 frames.], batch size: 15, lr: 2.35e-04 +2022-05-06 12:42:02,349 INFO [train.py:715] (7/8) Epoch 9, batch 16700, loss[loss=0.115, simple_loss=0.1855, pruned_loss=0.02226, over 4802.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2151, pruned_loss=0.0353, over 971498.64 frames.], batch size: 12, lr: 2.35e-04 +2022-05-06 12:42:41,614 INFO [train.py:715] (7/8) Epoch 9, batch 16750, loss[loss=0.1091, simple_loss=0.1881, pruned_loss=0.0151, over 4786.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2148, pruned_loss=0.03499, over 971839.54 frames.], batch size: 14, lr: 2.35e-04 +2022-05-06 12:43:21,411 INFO [train.py:715] (7/8) Epoch 9, batch 16800, loss[loss=0.127, simple_loss=0.1997, pruned_loss=0.02709, over 4834.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2148, pruned_loss=0.03481, over 971834.74 frames.], batch size: 30, lr: 2.35e-04 +2022-05-06 12:44:01,038 INFO [train.py:715] (7/8) Epoch 9, batch 16850, loss[loss=0.1223, simple_loss=0.2017, pruned_loss=0.02142, over 4934.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2145, pruned_loss=0.03466, over 971826.54 frames.], batch size: 23, lr: 2.35e-04 +2022-05-06 12:44:40,457 INFO [train.py:715] (7/8) Epoch 9, batch 16900, loss[loss=0.14, simple_loss=0.218, pruned_loss=0.03102, over 4978.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2148, pruned_loss=0.0349, over 972083.06 frames.], batch size: 27, lr: 2.35e-04 +2022-05-06 12:45:20,533 INFO [train.py:715] (7/8) Epoch 9, batch 16950, loss[loss=0.129, simple_loss=0.2054, pruned_loss=0.0263, over 4876.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2138, pruned_loss=0.03431, over 971648.62 frames.], batch size: 20, lr: 2.35e-04 +2022-05-06 12:46:00,235 INFO [train.py:715] (7/8) Epoch 9, batch 17000, loss[loss=0.1581, simple_loss=0.2279, pruned_loss=0.04414, over 4838.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2145, pruned_loss=0.03416, over 971742.75 frames.], batch size: 30, lr: 2.35e-04 +2022-05-06 12:46:38,805 INFO [train.py:715] (7/8) Epoch 9, batch 17050, loss[loss=0.1231, simple_loss=0.1881, pruned_loss=0.02904, over 4918.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2145, pruned_loss=0.03434, over 971845.31 frames.], batch size: 17, lr: 2.35e-04 +2022-05-06 12:47:18,387 INFO [train.py:715] (7/8) Epoch 9, batch 17100, loss[loss=0.1252, simple_loss=0.2044, pruned_loss=0.02297, over 4794.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2148, pruned_loss=0.03421, over 972713.51 frames.], batch size: 21, lr: 2.35e-04 +2022-05-06 12:47:58,064 INFO [train.py:715] (7/8) Epoch 9, batch 17150, loss[loss=0.1654, simple_loss=0.2404, pruned_loss=0.04515, over 4901.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2146, pruned_loss=0.03402, over 972647.31 frames.], batch size: 16, lr: 2.35e-04 +2022-05-06 12:48:37,319 INFO [train.py:715] (7/8) Epoch 9, batch 17200, loss[loss=0.1327, simple_loss=0.2055, pruned_loss=0.02995, over 4776.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2143, pruned_loss=0.03392, over 972049.65 frames.], batch size: 18, lr: 2.35e-04 +2022-05-06 12:49:15,992 INFO [train.py:715] (7/8) Epoch 9, batch 17250, loss[loss=0.1434, simple_loss=0.222, pruned_loss=0.03242, over 4850.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2139, pruned_loss=0.03397, over 971171.26 frames.], batch size: 15, lr: 2.35e-04 +2022-05-06 12:49:54,886 INFO [train.py:715] (7/8) Epoch 9, batch 17300, loss[loss=0.1433, simple_loss=0.2183, pruned_loss=0.03415, over 4752.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2144, pruned_loss=0.03417, over 971953.38 frames.], batch size: 19, lr: 2.35e-04 +2022-05-06 12:50:33,972 INFO [train.py:715] (7/8) Epoch 9, batch 17350, loss[loss=0.1717, simple_loss=0.2533, pruned_loss=0.04504, over 4981.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2161, pruned_loss=0.0351, over 971718.00 frames.], batch size: 25, lr: 2.35e-04 +2022-05-06 12:51:13,079 INFO [train.py:715] (7/8) Epoch 9, batch 17400, loss[loss=0.1215, simple_loss=0.1941, pruned_loss=0.02448, over 4988.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2161, pruned_loss=0.0351, over 972246.70 frames.], batch size: 25, lr: 2.35e-04 +2022-05-06 12:51:52,392 INFO [train.py:715] (7/8) Epoch 9, batch 17450, loss[loss=0.1488, simple_loss=0.225, pruned_loss=0.03634, over 4808.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2154, pruned_loss=0.03475, over 971840.77 frames.], batch size: 21, lr: 2.35e-04 +2022-05-06 12:52:31,602 INFO [train.py:715] (7/8) Epoch 9, batch 17500, loss[loss=0.1407, simple_loss=0.2189, pruned_loss=0.0312, over 4799.00 frames.], tot_loss[loss=0.143, simple_loss=0.2158, pruned_loss=0.03506, over 971811.59 frames.], batch size: 21, lr: 2.35e-04 +2022-05-06 12:53:10,813 INFO [train.py:715] (7/8) Epoch 9, batch 17550, loss[loss=0.122, simple_loss=0.1945, pruned_loss=0.02469, over 4971.00 frames.], tot_loss[loss=0.1429, simple_loss=0.216, pruned_loss=0.03486, over 972098.17 frames.], batch size: 24, lr: 2.35e-04 +2022-05-06 12:53:49,892 INFO [train.py:715] (7/8) Epoch 9, batch 17600, loss[loss=0.154, simple_loss=0.2217, pruned_loss=0.04312, over 4845.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2158, pruned_loss=0.0349, over 971886.46 frames.], batch size: 30, lr: 2.35e-04 +2022-05-06 12:54:29,587 INFO [train.py:715] (7/8) Epoch 9, batch 17650, loss[loss=0.1389, simple_loss=0.216, pruned_loss=0.0309, over 4912.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2153, pruned_loss=0.03495, over 972726.96 frames.], batch size: 35, lr: 2.35e-04 +2022-05-06 12:55:08,477 INFO [train.py:715] (7/8) Epoch 9, batch 17700, loss[loss=0.1383, simple_loss=0.216, pruned_loss=0.03026, over 4916.00 frames.], tot_loss[loss=0.1421, simple_loss=0.215, pruned_loss=0.03458, over 972568.03 frames.], batch size: 29, lr: 2.35e-04 +2022-05-06 12:55:47,742 INFO [train.py:715] (7/8) Epoch 9, batch 17750, loss[loss=0.1497, simple_loss=0.223, pruned_loss=0.03824, over 4785.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2151, pruned_loss=0.03493, over 972958.79 frames.], batch size: 17, lr: 2.35e-04 +2022-05-06 12:56:27,548 INFO [train.py:715] (7/8) Epoch 9, batch 17800, loss[loss=0.1447, simple_loss=0.2171, pruned_loss=0.03614, over 4965.00 frames.], tot_loss[loss=0.1425, simple_loss=0.215, pruned_loss=0.03497, over 973378.40 frames.], batch size: 14, lr: 2.35e-04 +2022-05-06 12:57:06,522 INFO [train.py:715] (7/8) Epoch 9, batch 17850, loss[loss=0.1294, simple_loss=0.2002, pruned_loss=0.02926, over 4773.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2146, pruned_loss=0.0349, over 972966.97 frames.], batch size: 19, lr: 2.35e-04 +2022-05-06 12:57:45,748 INFO [train.py:715] (7/8) Epoch 9, batch 17900, loss[loss=0.1458, simple_loss=0.2188, pruned_loss=0.03645, over 4805.00 frames.], tot_loss[loss=0.1424, simple_loss=0.215, pruned_loss=0.03487, over 972198.46 frames.], batch size: 21, lr: 2.35e-04 +2022-05-06 12:58:25,612 INFO [train.py:715] (7/8) Epoch 9, batch 17950, loss[loss=0.1734, simple_loss=0.2453, pruned_loss=0.05075, over 4899.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2146, pruned_loss=0.0351, over 972849.37 frames.], batch size: 17, lr: 2.35e-04 +2022-05-06 12:59:04,969 INFO [train.py:715] (7/8) Epoch 9, batch 18000, loss[loss=0.1081, simple_loss=0.192, pruned_loss=0.01213, over 4931.00 frames.], tot_loss[loss=0.1418, simple_loss=0.214, pruned_loss=0.03478, over 972795.26 frames.], batch size: 23, lr: 2.35e-04 +2022-05-06 12:59:04,970 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 12:59:14,501 INFO [train.py:742] (7/8) Epoch 9, validation: loss=0.1068, simple_loss=0.1912, pruned_loss=0.01121, over 914524.00 frames. +2022-05-06 12:59:53,954 INFO [train.py:715] (7/8) Epoch 9, batch 18050, loss[loss=0.1333, simple_loss=0.2094, pruned_loss=0.02864, over 4893.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2148, pruned_loss=0.03498, over 973396.48 frames.], batch size: 19, lr: 2.35e-04 +2022-05-06 13:00:33,772 INFO [train.py:715] (7/8) Epoch 9, batch 18100, loss[loss=0.1505, simple_loss=0.2443, pruned_loss=0.02831, over 4805.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.03509, over 972418.74 frames.], batch size: 21, lr: 2.35e-04 +2022-05-06 13:01:13,063 INFO [train.py:715] (7/8) Epoch 9, batch 18150, loss[loss=0.1183, simple_loss=0.1905, pruned_loss=0.0231, over 4971.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2148, pruned_loss=0.03516, over 972268.48 frames.], batch size: 35, lr: 2.35e-04 +2022-05-06 13:01:52,673 INFO [train.py:715] (7/8) Epoch 9, batch 18200, loss[loss=0.1257, simple_loss=0.2094, pruned_loss=0.02102, over 4816.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.03544, over 971972.52 frames.], batch size: 26, lr: 2.35e-04 +2022-05-06 13:02:31,900 INFO [train.py:715] (7/8) Epoch 9, batch 18250, loss[loss=0.143, simple_loss=0.2248, pruned_loss=0.03063, over 4910.00 frames.], tot_loss[loss=0.144, simple_loss=0.2165, pruned_loss=0.03576, over 971910.78 frames.], batch size: 39, lr: 2.35e-04 +2022-05-06 13:03:11,074 INFO [train.py:715] (7/8) Epoch 9, batch 18300, loss[loss=0.1428, simple_loss=0.221, pruned_loss=0.03232, over 4871.00 frames.], tot_loss[loss=0.1438, simple_loss=0.2165, pruned_loss=0.03551, over 971950.66 frames.], batch size: 22, lr: 2.35e-04 +2022-05-06 13:03:50,430 INFO [train.py:715] (7/8) Epoch 9, batch 18350, loss[loss=0.1423, simple_loss=0.2129, pruned_loss=0.03581, over 4970.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2154, pruned_loss=0.03501, over 971624.55 frames.], batch size: 24, lr: 2.35e-04 +2022-05-06 13:04:29,594 INFO [train.py:715] (7/8) Epoch 9, batch 18400, loss[loss=0.1486, simple_loss=0.2217, pruned_loss=0.0378, over 4778.00 frames.], tot_loss[loss=0.142, simple_loss=0.2149, pruned_loss=0.03461, over 971495.25 frames.], batch size: 17, lr: 2.35e-04 +2022-05-06 13:05:08,638 INFO [train.py:715] (7/8) Epoch 9, batch 18450, loss[loss=0.1362, simple_loss=0.209, pruned_loss=0.03173, over 4882.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2145, pruned_loss=0.03455, over 971682.68 frames.], batch size: 22, lr: 2.35e-04 +2022-05-06 13:05:47,602 INFO [train.py:715] (7/8) Epoch 9, batch 18500, loss[loss=0.1267, simple_loss=0.2126, pruned_loss=0.0204, over 4755.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2138, pruned_loss=0.034, over 972020.15 frames.], batch size: 14, lr: 2.35e-04 +2022-05-06 13:06:26,996 INFO [train.py:715] (7/8) Epoch 9, batch 18550, loss[loss=0.113, simple_loss=0.1868, pruned_loss=0.01958, over 4922.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2136, pruned_loss=0.0339, over 972025.18 frames.], batch size: 23, lr: 2.35e-04 +2022-05-06 13:07:06,067 INFO [train.py:715] (7/8) Epoch 9, batch 18600, loss[loss=0.1463, simple_loss=0.2148, pruned_loss=0.03888, over 4836.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2132, pruned_loss=0.03386, over 972435.56 frames.], batch size: 15, lr: 2.35e-04 +2022-05-06 13:07:44,917 INFO [train.py:715] (7/8) Epoch 9, batch 18650, loss[loss=0.1495, simple_loss=0.2295, pruned_loss=0.03472, over 4964.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2132, pruned_loss=0.03381, over 971779.35 frames.], batch size: 15, lr: 2.35e-04 +2022-05-06 13:08:24,473 INFO [train.py:715] (7/8) Epoch 9, batch 18700, loss[loss=0.1298, simple_loss=0.2103, pruned_loss=0.02467, over 4805.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03426, over 970865.86 frames.], batch size: 25, lr: 2.35e-04 +2022-05-06 13:09:03,188 INFO [train.py:715] (7/8) Epoch 9, batch 18750, loss[loss=0.1325, simple_loss=0.1915, pruned_loss=0.03676, over 4808.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2136, pruned_loss=0.03382, over 971325.09 frames.], batch size: 12, lr: 2.35e-04 +2022-05-06 13:09:42,760 INFO [train.py:715] (7/8) Epoch 9, batch 18800, loss[loss=0.1391, simple_loss=0.2076, pruned_loss=0.0353, over 4933.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2148, pruned_loss=0.03445, over 972440.29 frames.], batch size: 29, lr: 2.35e-04 +2022-05-06 13:10:21,586 INFO [train.py:715] (7/8) Epoch 9, batch 18850, loss[loss=0.1328, simple_loss=0.216, pruned_loss=0.02482, over 4956.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2149, pruned_loss=0.03488, over 972532.95 frames.], batch size: 24, lr: 2.35e-04 +2022-05-06 13:11:00,816 INFO [train.py:715] (7/8) Epoch 9, batch 18900, loss[loss=0.1315, simple_loss=0.216, pruned_loss=0.02353, over 4729.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2155, pruned_loss=0.03506, over 972626.61 frames.], batch size: 16, lr: 2.35e-04 +2022-05-06 13:11:40,164 INFO [train.py:715] (7/8) Epoch 9, batch 18950, loss[loss=0.1563, simple_loss=0.2244, pruned_loss=0.04405, over 4875.00 frames.], tot_loss[loss=0.143, simple_loss=0.2156, pruned_loss=0.03524, over 972353.76 frames.], batch size: 22, lr: 2.35e-04 +2022-05-06 13:12:18,870 INFO [train.py:715] (7/8) Epoch 9, batch 19000, loss[loss=0.1345, simple_loss=0.2039, pruned_loss=0.03251, over 4980.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.0346, over 971722.52 frames.], batch size: 28, lr: 2.35e-04 +2022-05-06 13:12:58,961 INFO [train.py:715] (7/8) Epoch 9, batch 19050, loss[loss=0.1352, simple_loss=0.1998, pruned_loss=0.03527, over 4976.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2146, pruned_loss=0.03419, over 972311.43 frames.], batch size: 35, lr: 2.34e-04 +2022-05-06 13:13:38,428 INFO [train.py:715] (7/8) Epoch 9, batch 19100, loss[loss=0.1251, simple_loss=0.2153, pruned_loss=0.01747, over 4920.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2143, pruned_loss=0.03417, over 972215.25 frames.], batch size: 29, lr: 2.34e-04 +2022-05-06 13:14:17,259 INFO [train.py:715] (7/8) Epoch 9, batch 19150, loss[loss=0.1639, simple_loss=0.2278, pruned_loss=0.05004, over 4990.00 frames.], tot_loss[loss=0.1402, simple_loss=0.213, pruned_loss=0.03372, over 972391.14 frames.], batch size: 26, lr: 2.34e-04 +2022-05-06 13:14:57,088 INFO [train.py:715] (7/8) Epoch 9, batch 19200, loss[loss=0.1436, simple_loss=0.217, pruned_loss=0.03512, over 4780.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2136, pruned_loss=0.03399, over 972705.17 frames.], batch size: 18, lr: 2.34e-04 +2022-05-06 13:15:36,591 INFO [train.py:715] (7/8) Epoch 9, batch 19250, loss[loss=0.1474, simple_loss=0.2376, pruned_loss=0.02864, over 4809.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03331, over 972622.42 frames.], batch size: 25, lr: 2.34e-04 +2022-05-06 13:16:15,486 INFO [train.py:715] (7/8) Epoch 9, batch 19300, loss[loss=0.1485, simple_loss=0.2202, pruned_loss=0.03839, over 4899.00 frames.], tot_loss[loss=0.1402, simple_loss=0.213, pruned_loss=0.03368, over 972048.90 frames.], batch size: 17, lr: 2.34e-04 +2022-05-06 13:16:54,062 INFO [train.py:715] (7/8) Epoch 9, batch 19350, loss[loss=0.1585, simple_loss=0.2259, pruned_loss=0.04557, over 4933.00 frames.], tot_loss[loss=0.1396, simple_loss=0.212, pruned_loss=0.03361, over 971968.18 frames.], batch size: 35, lr: 2.34e-04 +2022-05-06 13:17:34,091 INFO [train.py:715] (7/8) Epoch 9, batch 19400, loss[loss=0.1279, simple_loss=0.195, pruned_loss=0.03045, over 4921.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2122, pruned_loss=0.03356, over 971365.19 frames.], batch size: 18, lr: 2.34e-04 +2022-05-06 13:18:13,123 INFO [train.py:715] (7/8) Epoch 9, batch 19450, loss[loss=0.1639, simple_loss=0.2447, pruned_loss=0.0416, over 4920.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2124, pruned_loss=0.0336, over 972502.20 frames.], batch size: 18, lr: 2.34e-04 +2022-05-06 13:18:51,813 INFO [train.py:715] (7/8) Epoch 9, batch 19500, loss[loss=0.1378, simple_loss=0.2107, pruned_loss=0.03249, over 4812.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2119, pruned_loss=0.03338, over 972089.72 frames.], batch size: 12, lr: 2.34e-04 +2022-05-06 13:19:30,944 INFO [train.py:715] (7/8) Epoch 9, batch 19550, loss[loss=0.1208, simple_loss=0.1978, pruned_loss=0.02184, over 4939.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2121, pruned_loss=0.03377, over 972183.82 frames.], batch size: 29, lr: 2.34e-04 +2022-05-06 13:20:10,205 INFO [train.py:715] (7/8) Epoch 9, batch 19600, loss[loss=0.149, simple_loss=0.2222, pruned_loss=0.03789, over 4893.00 frames.], tot_loss[loss=0.141, simple_loss=0.2134, pruned_loss=0.03432, over 972101.98 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:20:48,781 INFO [train.py:715] (7/8) Epoch 9, batch 19650, loss[loss=0.1513, simple_loss=0.215, pruned_loss=0.04383, over 4915.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2142, pruned_loss=0.03452, over 972632.39 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:21:27,270 INFO [train.py:715] (7/8) Epoch 9, batch 19700, loss[loss=0.1568, simple_loss=0.2345, pruned_loss=0.0396, over 4964.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2147, pruned_loss=0.03471, over 972295.30 frames.], batch size: 15, lr: 2.34e-04 +2022-05-06 13:22:07,183 INFO [train.py:715] (7/8) Epoch 9, batch 19750, loss[loss=0.1409, simple_loss=0.2154, pruned_loss=0.03319, over 4919.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2149, pruned_loss=0.03488, over 972727.72 frames.], batch size: 18, lr: 2.34e-04 +2022-05-06 13:22:46,855 INFO [train.py:715] (7/8) Epoch 9, batch 19800, loss[loss=0.1295, simple_loss=0.2097, pruned_loss=0.02466, over 4756.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2152, pruned_loss=0.03495, over 972298.21 frames.], batch size: 16, lr: 2.34e-04 +2022-05-06 13:23:26,649 INFO [train.py:715] (7/8) Epoch 9, batch 19850, loss[loss=0.1408, simple_loss=0.2111, pruned_loss=0.03525, over 4747.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2155, pruned_loss=0.0347, over 971966.73 frames.], batch size: 16, lr: 2.34e-04 +2022-05-06 13:24:06,293 INFO [train.py:715] (7/8) Epoch 9, batch 19900, loss[loss=0.1321, simple_loss=0.2095, pruned_loss=0.02737, over 4984.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03531, over 971411.64 frames.], batch size: 27, lr: 2.34e-04 +2022-05-06 13:24:45,456 INFO [train.py:715] (7/8) Epoch 9, batch 19950, loss[loss=0.1481, simple_loss=0.2243, pruned_loss=0.03601, over 4951.00 frames.], tot_loss[loss=0.1433, simple_loss=0.216, pruned_loss=0.03528, over 971414.94 frames.], batch size: 35, lr: 2.34e-04 +2022-05-06 13:25:24,507 INFO [train.py:715] (7/8) Epoch 9, batch 20000, loss[loss=0.1201, simple_loss=0.1942, pruned_loss=0.02303, over 4932.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2156, pruned_loss=0.03532, over 971874.98 frames.], batch size: 18, lr: 2.34e-04 +2022-05-06 13:26:02,952 INFO [train.py:715] (7/8) Epoch 9, batch 20050, loss[loss=0.1552, simple_loss=0.2289, pruned_loss=0.04074, over 4928.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2149, pruned_loss=0.03498, over 972970.54 frames.], batch size: 39, lr: 2.34e-04 +2022-05-06 13:26:42,422 INFO [train.py:715] (7/8) Epoch 9, batch 20100, loss[loss=0.1234, simple_loss=0.1959, pruned_loss=0.02547, over 4921.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2145, pruned_loss=0.03481, over 973214.59 frames.], batch size: 23, lr: 2.34e-04 +2022-05-06 13:27:21,487 INFO [train.py:715] (7/8) Epoch 9, batch 20150, loss[loss=0.1201, simple_loss=0.2016, pruned_loss=0.01928, over 4768.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.03463, over 972600.09 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:27:59,971 INFO [train.py:715] (7/8) Epoch 9, batch 20200, loss[loss=0.1611, simple_loss=0.2305, pruned_loss=0.0458, over 4969.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.03461, over 971711.18 frames.], batch size: 28, lr: 2.34e-04 +2022-05-06 13:28:39,477 INFO [train.py:715] (7/8) Epoch 9, batch 20250, loss[loss=0.1349, simple_loss=0.2047, pruned_loss=0.03255, over 4786.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2146, pruned_loss=0.03476, over 972927.89 frames.], batch size: 14, lr: 2.34e-04 +2022-05-06 13:29:18,330 INFO [train.py:715] (7/8) Epoch 9, batch 20300, loss[loss=0.1462, simple_loss=0.2306, pruned_loss=0.03086, over 4793.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2153, pruned_loss=0.03482, over 972871.19 frames.], batch size: 14, lr: 2.34e-04 +2022-05-06 13:29:57,722 INFO [train.py:715] (7/8) Epoch 9, batch 20350, loss[loss=0.116, simple_loss=0.1932, pruned_loss=0.01938, over 4799.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2149, pruned_loss=0.03446, over 971699.68 frames.], batch size: 21, lr: 2.34e-04 +2022-05-06 13:30:37,203 INFO [train.py:715] (7/8) Epoch 9, batch 20400, loss[loss=0.1788, simple_loss=0.2521, pruned_loss=0.05273, over 4904.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2135, pruned_loss=0.03438, over 971164.04 frames.], batch size: 17, lr: 2.34e-04 +2022-05-06 13:31:17,091 INFO [train.py:715] (7/8) Epoch 9, batch 20450, loss[loss=0.1789, simple_loss=0.242, pruned_loss=0.05789, over 4935.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2152, pruned_loss=0.03506, over 971862.44 frames.], batch size: 29, lr: 2.34e-04 +2022-05-06 13:31:56,598 INFO [train.py:715] (7/8) Epoch 9, batch 20500, loss[loss=0.1349, simple_loss=0.2135, pruned_loss=0.02813, over 4904.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.03464, over 971078.85 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:32:35,671 INFO [train.py:715] (7/8) Epoch 9, batch 20550, loss[loss=0.1169, simple_loss=0.1908, pruned_loss=0.02152, over 4881.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2145, pruned_loss=0.03413, over 972011.45 frames.], batch size: 22, lr: 2.34e-04 +2022-05-06 13:33:14,862 INFO [train.py:715] (7/8) Epoch 9, batch 20600, loss[loss=0.166, simple_loss=0.2412, pruned_loss=0.04535, over 4901.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2144, pruned_loss=0.03403, over 972644.07 frames.], batch size: 17, lr: 2.34e-04 +2022-05-06 13:33:53,312 INFO [train.py:715] (7/8) Epoch 9, batch 20650, loss[loss=0.1365, simple_loss=0.2044, pruned_loss=0.03427, over 4906.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2145, pruned_loss=0.0339, over 972136.05 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:34:32,414 INFO [train.py:715] (7/8) Epoch 9, batch 20700, loss[loss=0.1242, simple_loss=0.1951, pruned_loss=0.02671, over 4973.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2139, pruned_loss=0.03363, over 972548.19 frames.], batch size: 27, lr: 2.34e-04 +2022-05-06 13:35:11,247 INFO [train.py:715] (7/8) Epoch 9, batch 20750, loss[loss=0.1414, simple_loss=0.227, pruned_loss=0.02787, over 4980.00 frames.], tot_loss[loss=0.141, simple_loss=0.2143, pruned_loss=0.03381, over 973142.23 frames.], batch size: 28, lr: 2.34e-04 +2022-05-06 13:35:50,840 INFO [train.py:715] (7/8) Epoch 9, batch 20800, loss[loss=0.1231, simple_loss=0.1932, pruned_loss=0.02651, over 4892.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2145, pruned_loss=0.03425, over 972899.33 frames.], batch size: 22, lr: 2.34e-04 +2022-05-06 13:36:30,209 INFO [train.py:715] (7/8) Epoch 9, batch 20850, loss[loss=0.1264, simple_loss=0.2062, pruned_loss=0.02328, over 4924.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2147, pruned_loss=0.03412, over 973523.09 frames.], batch size: 23, lr: 2.34e-04 +2022-05-06 13:37:09,649 INFO [train.py:715] (7/8) Epoch 9, batch 20900, loss[loss=0.1372, simple_loss=0.2062, pruned_loss=0.03409, over 4971.00 frames.], tot_loss[loss=0.1408, simple_loss=0.214, pruned_loss=0.03382, over 973487.70 frames.], batch size: 15, lr: 2.34e-04 +2022-05-06 13:37:49,144 INFO [train.py:715] (7/8) Epoch 9, batch 20950, loss[loss=0.1406, simple_loss=0.2097, pruned_loss=0.03577, over 4786.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2144, pruned_loss=0.03408, over 974386.38 frames.], batch size: 21, lr: 2.34e-04 +2022-05-06 13:38:28,447 INFO [train.py:715] (7/8) Epoch 9, batch 21000, loss[loss=0.13, simple_loss=0.1946, pruned_loss=0.03266, over 4947.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2145, pruned_loss=0.03431, over 974395.71 frames.], batch size: 35, lr: 2.34e-04 +2022-05-06 13:38:28,448 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 13:38:38,083 INFO [train.py:742] (7/8) Epoch 9, validation: loss=0.1069, simple_loss=0.1912, pruned_loss=0.01129, over 914524.00 frames. +2022-05-06 13:39:17,240 INFO [train.py:715] (7/8) Epoch 9, batch 21050, loss[loss=0.1358, simple_loss=0.2091, pruned_loss=0.03119, over 4870.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2142, pruned_loss=0.03418, over 974976.45 frames.], batch size: 20, lr: 2.34e-04 +2022-05-06 13:39:56,158 INFO [train.py:715] (7/8) Epoch 9, batch 21100, loss[loss=0.1081, simple_loss=0.1869, pruned_loss=0.01464, over 4808.00 frames.], tot_loss[loss=0.142, simple_loss=0.2151, pruned_loss=0.03449, over 974601.25 frames.], batch size: 25, lr: 2.34e-04 +2022-05-06 13:40:35,521 INFO [train.py:715] (7/8) Epoch 9, batch 21150, loss[loss=0.1334, simple_loss=0.1945, pruned_loss=0.03612, over 4715.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2149, pruned_loss=0.0343, over 974172.69 frames.], batch size: 12, lr: 2.34e-04 +2022-05-06 13:41:14,531 INFO [train.py:715] (7/8) Epoch 9, batch 21200, loss[loss=0.1352, simple_loss=0.2001, pruned_loss=0.03516, over 4796.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2155, pruned_loss=0.0348, over 974642.00 frames.], batch size: 24, lr: 2.34e-04 +2022-05-06 13:41:54,099 INFO [train.py:715] (7/8) Epoch 9, batch 21250, loss[loss=0.1442, simple_loss=0.2165, pruned_loss=0.0359, over 4944.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2152, pruned_loss=0.03457, over 973567.10 frames.], batch size: 35, lr: 2.34e-04 +2022-05-06 13:42:32,488 INFO [train.py:715] (7/8) Epoch 9, batch 21300, loss[loss=0.1503, simple_loss=0.2156, pruned_loss=0.0425, over 4761.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2148, pruned_loss=0.03443, over 972273.78 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:43:11,102 INFO [train.py:715] (7/8) Epoch 9, batch 21350, loss[loss=0.1443, simple_loss=0.2303, pruned_loss=0.02917, over 4784.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2139, pruned_loss=0.03385, over 972302.94 frames.], batch size: 18, lr: 2.34e-04 +2022-05-06 13:43:50,029 INFO [train.py:715] (7/8) Epoch 9, batch 21400, loss[loss=0.1256, simple_loss=0.2, pruned_loss=0.02564, over 4735.00 frames.], tot_loss[loss=0.14, simple_loss=0.2132, pruned_loss=0.0334, over 972247.58 frames.], batch size: 16, lr: 2.34e-04 +2022-05-06 13:44:28,773 INFO [train.py:715] (7/8) Epoch 9, batch 21450, loss[loss=0.1591, simple_loss=0.2188, pruned_loss=0.04973, over 4867.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2146, pruned_loss=0.03451, over 972335.56 frames.], batch size: 16, lr: 2.34e-04 +2022-05-06 13:45:07,168 INFO [train.py:715] (7/8) Epoch 9, batch 21500, loss[loss=0.1721, simple_loss=0.2381, pruned_loss=0.05303, over 4767.00 frames.], tot_loss[loss=0.1422, simple_loss=0.215, pruned_loss=0.03469, over 972001.01 frames.], batch size: 18, lr: 2.34e-04 +2022-05-06 13:45:46,286 INFO [train.py:715] (7/8) Epoch 9, batch 21550, loss[loss=0.1293, simple_loss=0.1983, pruned_loss=0.03015, over 4802.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2138, pruned_loss=0.03446, over 971561.41 frames.], batch size: 25, lr: 2.34e-04 +2022-05-06 13:46:25,002 INFO [train.py:715] (7/8) Epoch 9, batch 21600, loss[loss=0.1607, simple_loss=0.2414, pruned_loss=0.04001, over 4967.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2147, pruned_loss=0.03459, over 972662.63 frames.], batch size: 35, lr: 2.34e-04 +2022-05-06 13:47:04,091 INFO [train.py:715] (7/8) Epoch 9, batch 21650, loss[loss=0.1652, simple_loss=0.2497, pruned_loss=0.04035, over 4765.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2145, pruned_loss=0.03452, over 971156.83 frames.], batch size: 19, lr: 2.34e-04 +2022-05-06 13:47:43,365 INFO [train.py:715] (7/8) Epoch 9, batch 21700, loss[loss=0.1559, simple_loss=0.241, pruned_loss=0.03535, over 4923.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2145, pruned_loss=0.0342, over 970714.99 frames.], batch size: 23, lr: 2.34e-04 +2022-05-06 13:48:22,455 INFO [train.py:715] (7/8) Epoch 9, batch 21750, loss[loss=0.1398, simple_loss=0.2132, pruned_loss=0.03322, over 4862.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2144, pruned_loss=0.03448, over 970138.08 frames.], batch size: 13, lr: 2.34e-04 +2022-05-06 13:49:01,563 INFO [train.py:715] (7/8) Epoch 9, batch 21800, loss[loss=0.1299, simple_loss=0.2071, pruned_loss=0.02638, over 4795.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2144, pruned_loss=0.03445, over 970284.71 frames.], batch size: 25, lr: 2.34e-04 +2022-05-06 13:49:41,091 INFO [train.py:715] (7/8) Epoch 9, batch 21850, loss[loss=0.1515, simple_loss=0.224, pruned_loss=0.03957, over 4885.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03409, over 971235.61 frames.], batch size: 39, lr: 2.34e-04 +2022-05-06 13:50:20,440 INFO [train.py:715] (7/8) Epoch 9, batch 21900, loss[loss=0.1304, simple_loss=0.2068, pruned_loss=0.02698, over 4849.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2137, pruned_loss=0.03424, over 971676.33 frames.], batch size: 34, lr: 2.33e-04 +2022-05-06 13:50:59,011 INFO [train.py:715] (7/8) Epoch 9, batch 21950, loss[loss=0.1356, simple_loss=0.202, pruned_loss=0.03458, over 4848.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2128, pruned_loss=0.0338, over 972376.93 frames.], batch size: 32, lr: 2.33e-04 +2022-05-06 13:51:37,912 INFO [train.py:715] (7/8) Epoch 9, batch 22000, loss[loss=0.137, simple_loss=0.2025, pruned_loss=0.03577, over 4945.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2122, pruned_loss=0.03347, over 972014.68 frames.], batch size: 35, lr: 2.33e-04 +2022-05-06 13:52:16,813 INFO [train.py:715] (7/8) Epoch 9, batch 22050, loss[loss=0.1277, simple_loss=0.1932, pruned_loss=0.03116, over 4842.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.03353, over 972225.81 frames.], batch size: 30, lr: 2.33e-04 +2022-05-06 13:52:56,516 INFO [train.py:715] (7/8) Epoch 9, batch 22100, loss[loss=0.1826, simple_loss=0.2519, pruned_loss=0.05662, over 4828.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2137, pruned_loss=0.03437, over 972008.41 frames.], batch size: 15, lr: 2.33e-04 +2022-05-06 13:53:35,800 INFO [train.py:715] (7/8) Epoch 9, batch 22150, loss[loss=0.1786, simple_loss=0.2375, pruned_loss=0.05982, over 4974.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2143, pruned_loss=0.03495, over 972392.94 frames.], batch size: 15, lr: 2.33e-04 +2022-05-06 13:54:14,970 INFO [train.py:715] (7/8) Epoch 9, batch 22200, loss[loss=0.1373, simple_loss=0.2037, pruned_loss=0.03548, over 4882.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2145, pruned_loss=0.03505, over 972651.30 frames.], batch size: 22, lr: 2.33e-04 +2022-05-06 13:54:54,448 INFO [train.py:715] (7/8) Epoch 9, batch 22250, loss[loss=0.1369, simple_loss=0.2124, pruned_loss=0.03072, over 4772.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2148, pruned_loss=0.0348, over 972836.48 frames.], batch size: 17, lr: 2.33e-04 +2022-05-06 13:55:33,233 INFO [train.py:715] (7/8) Epoch 9, batch 22300, loss[loss=0.1393, simple_loss=0.2053, pruned_loss=0.03662, over 4844.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2149, pruned_loss=0.03487, over 972608.53 frames.], batch size: 30, lr: 2.33e-04 +2022-05-06 13:56:11,831 INFO [train.py:715] (7/8) Epoch 9, batch 22350, loss[loss=0.1575, simple_loss=0.2265, pruned_loss=0.04423, over 4814.00 frames.], tot_loss[loss=0.143, simple_loss=0.2158, pruned_loss=0.03515, over 972727.15 frames.], batch size: 26, lr: 2.33e-04 +2022-05-06 13:56:50,721 INFO [train.py:715] (7/8) Epoch 9, batch 22400, loss[loss=0.1628, simple_loss=0.2386, pruned_loss=0.04352, over 4955.00 frames.], tot_loss[loss=0.1433, simple_loss=0.2156, pruned_loss=0.03545, over 972116.84 frames.], batch size: 21, lr: 2.33e-04 +2022-05-06 13:57:29,428 INFO [train.py:715] (7/8) Epoch 9, batch 22450, loss[loss=0.1511, simple_loss=0.2199, pruned_loss=0.04115, over 4886.00 frames.], tot_loss[loss=0.1432, simple_loss=0.2159, pruned_loss=0.0352, over 972892.93 frames.], batch size: 17, lr: 2.33e-04 +2022-05-06 13:58:08,127 INFO [train.py:715] (7/8) Epoch 9, batch 22500, loss[loss=0.122, simple_loss=0.1984, pruned_loss=0.02285, over 4796.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2154, pruned_loss=0.03487, over 972968.90 frames.], batch size: 13, lr: 2.33e-04 +2022-05-06 13:58:47,017 INFO [train.py:715] (7/8) Epoch 9, batch 22550, loss[loss=0.1354, simple_loss=0.2044, pruned_loss=0.03321, over 4942.00 frames.], tot_loss[loss=0.1436, simple_loss=0.2162, pruned_loss=0.03551, over 973604.84 frames.], batch size: 23, lr: 2.33e-04 +2022-05-06 13:59:26,036 INFO [train.py:715] (7/8) Epoch 9, batch 22600, loss[loss=0.1436, simple_loss=0.2277, pruned_loss=0.02971, over 4927.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2155, pruned_loss=0.03505, over 973061.48 frames.], batch size: 23, lr: 2.33e-04 +2022-05-06 14:00:05,204 INFO [train.py:715] (7/8) Epoch 9, batch 22650, loss[loss=0.1592, simple_loss=0.2407, pruned_loss=0.0388, over 4830.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2161, pruned_loss=0.03543, over 972018.90 frames.], batch size: 26, lr: 2.33e-04 +2022-05-06 14:00:44,243 INFO [train.py:715] (7/8) Epoch 9, batch 22700, loss[loss=0.1436, simple_loss=0.2228, pruned_loss=0.03223, over 4969.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2154, pruned_loss=0.03505, over 972715.83 frames.], batch size: 24, lr: 2.33e-04 +2022-05-06 14:01:26,078 INFO [train.py:715] (7/8) Epoch 9, batch 22750, loss[loss=0.1488, simple_loss=0.2322, pruned_loss=0.03269, over 4945.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03516, over 973120.95 frames.], batch size: 29, lr: 2.33e-04 +2022-05-06 14:02:04,855 INFO [train.py:715] (7/8) Epoch 9, batch 22800, loss[loss=0.1399, simple_loss=0.2204, pruned_loss=0.02968, over 4948.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2147, pruned_loss=0.03493, over 973084.19 frames.], batch size: 21, lr: 2.33e-04 +2022-05-06 14:02:44,153 INFO [train.py:715] (7/8) Epoch 9, batch 22850, loss[loss=0.1663, simple_loss=0.2341, pruned_loss=0.04924, over 4865.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2145, pruned_loss=0.03519, over 973039.28 frames.], batch size: 20, lr: 2.33e-04 +2022-05-06 14:03:22,723 INFO [train.py:715] (7/8) Epoch 9, batch 22900, loss[loss=0.1402, simple_loss=0.2051, pruned_loss=0.03765, over 4777.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2144, pruned_loss=0.03519, over 973582.06 frames.], batch size: 17, lr: 2.33e-04 +2022-05-06 14:04:01,804 INFO [train.py:715] (7/8) Epoch 9, batch 22950, loss[loss=0.126, simple_loss=0.2021, pruned_loss=0.02497, over 4939.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2135, pruned_loss=0.03492, over 973475.54 frames.], batch size: 18, lr: 2.33e-04 +2022-05-06 14:04:40,857 INFO [train.py:715] (7/8) Epoch 9, batch 23000, loss[loss=0.152, simple_loss=0.2213, pruned_loss=0.0414, over 4829.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2141, pruned_loss=0.03532, over 972621.93 frames.], batch size: 15, lr: 2.33e-04 +2022-05-06 14:05:20,249 INFO [train.py:715] (7/8) Epoch 9, batch 23050, loss[loss=0.1396, simple_loss=0.2108, pruned_loss=0.03416, over 4985.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2148, pruned_loss=0.03525, over 972747.60 frames.], batch size: 28, lr: 2.33e-04 +2022-05-06 14:05:59,523 INFO [train.py:715] (7/8) Epoch 9, batch 23100, loss[loss=0.1712, simple_loss=0.2413, pruned_loss=0.05052, over 4829.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2149, pruned_loss=0.03497, over 972474.17 frames.], batch size: 26, lr: 2.33e-04 +2022-05-06 14:06:38,546 INFO [train.py:715] (7/8) Epoch 9, batch 23150, loss[loss=0.1408, simple_loss=0.2142, pruned_loss=0.03374, over 4860.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2152, pruned_loss=0.03547, over 972662.12 frames.], batch size: 20, lr: 2.33e-04 +2022-05-06 14:07:18,160 INFO [train.py:715] (7/8) Epoch 9, batch 23200, loss[loss=0.1563, simple_loss=0.2307, pruned_loss=0.04092, over 4779.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2143, pruned_loss=0.03475, over 972142.39 frames.], batch size: 14, lr: 2.33e-04 +2022-05-06 14:07:57,915 INFO [train.py:715] (7/8) Epoch 9, batch 23250, loss[loss=0.1676, simple_loss=0.2288, pruned_loss=0.05322, over 4959.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2146, pruned_loss=0.03492, over 972290.70 frames.], batch size: 15, lr: 2.33e-04 +2022-05-06 14:08:37,685 INFO [train.py:715] (7/8) Epoch 9, batch 23300, loss[loss=0.1718, simple_loss=0.243, pruned_loss=0.05028, over 4760.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2145, pruned_loss=0.03497, over 972850.20 frames.], batch size: 19, lr: 2.33e-04 +2022-05-06 14:09:17,440 INFO [train.py:715] (7/8) Epoch 9, batch 23350, loss[loss=0.1608, simple_loss=0.2231, pruned_loss=0.0492, over 4848.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.03461, over 972543.92 frames.], batch size: 20, lr: 2.33e-04 +2022-05-06 14:09:56,738 INFO [train.py:715] (7/8) Epoch 9, batch 23400, loss[loss=0.1225, simple_loss=0.196, pruned_loss=0.02451, over 4969.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2135, pruned_loss=0.034, over 972457.92 frames.], batch size: 24, lr: 2.33e-04 +2022-05-06 14:10:35,594 INFO [train.py:715] (7/8) Epoch 9, batch 23450, loss[loss=0.1172, simple_loss=0.1936, pruned_loss=0.02037, over 4936.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03389, over 972898.12 frames.], batch size: 21, lr: 2.33e-04 +2022-05-06 14:11:14,356 INFO [train.py:715] (7/8) Epoch 9, batch 23500, loss[loss=0.1634, simple_loss=0.2325, pruned_loss=0.04716, over 4694.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2132, pruned_loss=0.03396, over 972377.15 frames.], batch size: 15, lr: 2.33e-04 +2022-05-06 14:11:52,882 INFO [train.py:715] (7/8) Epoch 9, batch 23550, loss[loss=0.1327, simple_loss=0.217, pruned_loss=0.02419, over 4880.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2136, pruned_loss=0.03401, over 971715.85 frames.], batch size: 22, lr: 2.33e-04 +2022-05-06 14:12:32,348 INFO [train.py:715] (7/8) Epoch 9, batch 23600, loss[loss=0.1574, simple_loss=0.2262, pruned_loss=0.04432, over 4958.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2139, pruned_loss=0.03424, over 971984.08 frames.], batch size: 39, lr: 2.33e-04 +2022-05-06 14:13:11,500 INFO [train.py:715] (7/8) Epoch 9, batch 23650, loss[loss=0.1436, simple_loss=0.2148, pruned_loss=0.03617, over 4939.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2137, pruned_loss=0.03435, over 971685.16 frames.], batch size: 23, lr: 2.33e-04 +2022-05-06 14:13:50,878 INFO [train.py:715] (7/8) Epoch 9, batch 23700, loss[loss=0.1444, simple_loss=0.2166, pruned_loss=0.03616, over 4985.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2141, pruned_loss=0.03446, over 971524.25 frames.], batch size: 14, lr: 2.33e-04 +2022-05-06 14:14:30,049 INFO [train.py:715] (7/8) Epoch 9, batch 23750, loss[loss=0.165, simple_loss=0.2296, pruned_loss=0.0502, over 4812.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2141, pruned_loss=0.03443, over 972112.80 frames.], batch size: 13, lr: 2.33e-04 +2022-05-06 14:15:09,286 INFO [train.py:715] (7/8) Epoch 9, batch 23800, loss[loss=0.1269, simple_loss=0.2092, pruned_loss=0.02228, over 4954.00 frames.], tot_loss[loss=0.141, simple_loss=0.2137, pruned_loss=0.03417, over 971934.93 frames.], batch size: 21, lr: 2.33e-04 +2022-05-06 14:15:48,394 INFO [train.py:715] (7/8) Epoch 9, batch 23850, loss[loss=0.1518, simple_loss=0.2164, pruned_loss=0.04363, over 4887.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2135, pruned_loss=0.03409, over 972015.44 frames.], batch size: 19, lr: 2.33e-04 +2022-05-06 14:16:27,645 INFO [train.py:715] (7/8) Epoch 9, batch 23900, loss[loss=0.1142, simple_loss=0.1813, pruned_loss=0.02358, over 4881.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2127, pruned_loss=0.03393, over 971356.47 frames.], batch size: 16, lr: 2.33e-04 +2022-05-06 14:17:06,536 INFO [train.py:715] (7/8) Epoch 9, batch 23950, loss[loss=0.1397, simple_loss=0.2133, pruned_loss=0.03309, over 4969.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2121, pruned_loss=0.03383, over 971620.95 frames.], batch size: 15, lr: 2.33e-04 +2022-05-06 14:17:45,503 INFO [train.py:715] (7/8) Epoch 9, batch 24000, loss[loss=0.1174, simple_loss=0.1988, pruned_loss=0.01795, over 4826.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2133, pruned_loss=0.03414, over 972177.87 frames.], batch size: 26, lr: 2.33e-04 +2022-05-06 14:17:45,504 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 14:17:55,356 INFO [train.py:742] (7/8) Epoch 9, validation: loss=0.1069, simple_loss=0.1913, pruned_loss=0.01128, over 914524.00 frames. +2022-05-06 14:18:34,691 INFO [train.py:715] (7/8) Epoch 9, batch 24050, loss[loss=0.1618, simple_loss=0.2338, pruned_loss=0.04491, over 4797.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2129, pruned_loss=0.03388, over 972379.39 frames.], batch size: 21, lr: 2.33e-04 +2022-05-06 14:19:14,963 INFO [train.py:715] (7/8) Epoch 9, batch 24100, loss[loss=0.1512, simple_loss=0.2179, pruned_loss=0.04223, over 4963.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2117, pruned_loss=0.03328, over 972444.26 frames.], batch size: 35, lr: 2.33e-04 +2022-05-06 14:19:54,474 INFO [train.py:715] (7/8) Epoch 9, batch 24150, loss[loss=0.1756, simple_loss=0.2435, pruned_loss=0.05384, over 4929.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2126, pruned_loss=0.03356, over 972862.34 frames.], batch size: 35, lr: 2.33e-04 +2022-05-06 14:20:33,559 INFO [train.py:715] (7/8) Epoch 9, batch 24200, loss[loss=0.1825, simple_loss=0.2504, pruned_loss=0.05731, over 4763.00 frames.], tot_loss[loss=0.14, simple_loss=0.2127, pruned_loss=0.03372, over 973436.60 frames.], batch size: 19, lr: 2.33e-04 +2022-05-06 14:21:12,484 INFO [train.py:715] (7/8) Epoch 9, batch 24250, loss[loss=0.121, simple_loss=0.1895, pruned_loss=0.02624, over 4859.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2124, pruned_loss=0.03368, over 974037.63 frames.], batch size: 20, lr: 2.33e-04 +2022-05-06 14:21:52,134 INFO [train.py:715] (7/8) Epoch 9, batch 24300, loss[loss=0.1501, simple_loss=0.2258, pruned_loss=0.03716, over 4982.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2127, pruned_loss=0.03372, over 973791.43 frames.], batch size: 28, lr: 2.33e-04 +2022-05-06 14:22:31,314 INFO [train.py:715] (7/8) Epoch 9, batch 24350, loss[loss=0.14, simple_loss=0.2183, pruned_loss=0.03083, over 4949.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2134, pruned_loss=0.0342, over 973366.03 frames.], batch size: 23, lr: 2.33e-04 +2022-05-06 14:23:10,727 INFO [train.py:715] (7/8) Epoch 9, batch 24400, loss[loss=0.158, simple_loss=0.2275, pruned_loss=0.04427, over 4689.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2136, pruned_loss=0.03445, over 972626.01 frames.], batch size: 15, lr: 2.33e-04 +2022-05-06 14:23:50,613 INFO [train.py:715] (7/8) Epoch 9, batch 24450, loss[loss=0.1386, simple_loss=0.2036, pruned_loss=0.03679, over 4839.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2133, pruned_loss=0.03419, over 972849.20 frames.], batch size: 15, lr: 2.33e-04 +2022-05-06 14:24:30,640 INFO [train.py:715] (7/8) Epoch 9, batch 24500, loss[loss=0.1286, simple_loss=0.1994, pruned_loss=0.02892, over 4989.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2134, pruned_loss=0.03422, over 973411.22 frames.], batch size: 27, lr: 2.33e-04 +2022-05-06 14:25:10,999 INFO [train.py:715] (7/8) Epoch 9, batch 24550, loss[loss=0.116, simple_loss=0.1994, pruned_loss=0.01635, over 4797.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2128, pruned_loss=0.03396, over 972780.74 frames.], batch size: 12, lr: 2.33e-04 +2022-05-06 14:25:50,745 INFO [train.py:715] (7/8) Epoch 9, batch 24600, loss[loss=0.1628, simple_loss=0.2366, pruned_loss=0.04452, over 4954.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2134, pruned_loss=0.0344, over 973307.51 frames.], batch size: 21, lr: 2.33e-04 +2022-05-06 14:26:30,714 INFO [train.py:715] (7/8) Epoch 9, batch 24650, loss[loss=0.1268, simple_loss=0.2085, pruned_loss=0.02256, over 4817.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2129, pruned_loss=0.03422, over 973101.90 frames.], batch size: 26, lr: 2.33e-04 +2022-05-06 14:27:09,793 INFO [train.py:715] (7/8) Epoch 9, batch 24700, loss[loss=0.1553, simple_loss=0.2308, pruned_loss=0.03992, over 4818.00 frames.], tot_loss[loss=0.1406, simple_loss=0.213, pruned_loss=0.03412, over 972063.79 frames.], batch size: 15, lr: 2.33e-04 +2022-05-06 14:27:48,507 INFO [train.py:715] (7/8) Epoch 9, batch 24750, loss[loss=0.1439, simple_loss=0.2279, pruned_loss=0.02993, over 4971.00 frames.], tot_loss[loss=0.14, simple_loss=0.2126, pruned_loss=0.03366, over 971799.48 frames.], batch size: 24, lr: 2.33e-04 +2022-05-06 14:28:28,025 INFO [train.py:715] (7/8) Epoch 9, batch 24800, loss[loss=0.11, simple_loss=0.1879, pruned_loss=0.01606, over 4785.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2121, pruned_loss=0.03353, over 972092.61 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 14:29:07,571 INFO [train.py:715] (7/8) Epoch 9, batch 24850, loss[loss=0.1219, simple_loss=0.1917, pruned_loss=0.02604, over 4841.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2126, pruned_loss=0.03361, over 972170.80 frames.], batch size: 30, lr: 2.32e-04 +2022-05-06 14:29:46,972 INFO [train.py:715] (7/8) Epoch 9, batch 24900, loss[loss=0.1184, simple_loss=0.1926, pruned_loss=0.02208, over 4758.00 frames.], tot_loss[loss=0.14, simple_loss=0.2127, pruned_loss=0.03364, over 972317.61 frames.], batch size: 19, lr: 2.32e-04 +2022-05-06 14:30:26,388 INFO [train.py:715] (7/8) Epoch 9, batch 24950, loss[loss=0.137, simple_loss=0.2145, pruned_loss=0.02975, over 4987.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2136, pruned_loss=0.03438, over 972431.65 frames.], batch size: 26, lr: 2.32e-04 +2022-05-06 14:31:06,084 INFO [train.py:715] (7/8) Epoch 9, batch 25000, loss[loss=0.1187, simple_loss=0.1922, pruned_loss=0.02259, over 4821.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.03417, over 972095.81 frames.], batch size: 12, lr: 2.32e-04 +2022-05-06 14:31:44,920 INFO [train.py:715] (7/8) Epoch 9, batch 25050, loss[loss=0.1396, simple_loss=0.2194, pruned_loss=0.02994, over 4972.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2146, pruned_loss=0.03451, over 972073.15 frames.], batch size: 24, lr: 2.32e-04 +2022-05-06 14:32:24,417 INFO [train.py:715] (7/8) Epoch 9, batch 25100, loss[loss=0.1255, simple_loss=0.1995, pruned_loss=0.02571, over 4912.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2148, pruned_loss=0.03437, over 972523.11 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 14:33:03,521 INFO [train.py:715] (7/8) Epoch 9, batch 25150, loss[loss=0.1442, simple_loss=0.2157, pruned_loss=0.03637, over 4919.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2145, pruned_loss=0.03422, over 972127.86 frames.], batch size: 29, lr: 2.32e-04 +2022-05-06 14:33:42,582 INFO [train.py:715] (7/8) Epoch 9, batch 25200, loss[loss=0.1471, simple_loss=0.2119, pruned_loss=0.04119, over 4861.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2154, pruned_loss=0.03497, over 971966.15 frames.], batch size: 30, lr: 2.32e-04 +2022-05-06 14:34:21,841 INFO [train.py:715] (7/8) Epoch 9, batch 25250, loss[loss=0.1404, simple_loss=0.2014, pruned_loss=0.03966, over 4804.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2148, pruned_loss=0.03476, over 971578.74 frames.], batch size: 13, lr: 2.32e-04 +2022-05-06 14:35:00,584 INFO [train.py:715] (7/8) Epoch 9, batch 25300, loss[loss=0.175, simple_loss=0.2479, pruned_loss=0.05105, over 4820.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2148, pruned_loss=0.03508, over 971719.05 frames.], batch size: 15, lr: 2.32e-04 +2022-05-06 14:35:40,270 INFO [train.py:715] (7/8) Epoch 9, batch 25350, loss[loss=0.1272, simple_loss=0.2024, pruned_loss=0.026, over 4831.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2144, pruned_loss=0.03463, over 971386.55 frames.], batch size: 26, lr: 2.32e-04 +2022-05-06 14:36:20,107 INFO [train.py:715] (7/8) Epoch 9, batch 25400, loss[loss=0.1331, simple_loss=0.2139, pruned_loss=0.02618, over 4782.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2134, pruned_loss=0.0342, over 971668.18 frames.], batch size: 18, lr: 2.32e-04 +2022-05-06 14:37:00,345 INFO [train.py:715] (7/8) Epoch 9, batch 25450, loss[loss=0.1384, simple_loss=0.2191, pruned_loss=0.02887, over 4853.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03387, over 971453.02 frames.], batch size: 30, lr: 2.32e-04 +2022-05-06 14:37:38,913 INFO [train.py:715] (7/8) Epoch 9, batch 25500, loss[loss=0.1498, simple_loss=0.2276, pruned_loss=0.03601, over 4976.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2131, pruned_loss=0.03414, over 972218.57 frames.], batch size: 35, lr: 2.32e-04 +2022-05-06 14:38:18,073 INFO [train.py:715] (7/8) Epoch 9, batch 25550, loss[loss=0.1518, simple_loss=0.2147, pruned_loss=0.04444, over 4986.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2136, pruned_loss=0.03438, over 971609.42 frames.], batch size: 35, lr: 2.32e-04 +2022-05-06 14:38:57,226 INFO [train.py:715] (7/8) Epoch 9, batch 25600, loss[loss=0.1274, simple_loss=0.202, pruned_loss=0.02634, over 4911.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2136, pruned_loss=0.03454, over 972703.22 frames.], batch size: 29, lr: 2.32e-04 +2022-05-06 14:39:36,164 INFO [train.py:715] (7/8) Epoch 9, batch 25650, loss[loss=0.1597, simple_loss=0.2325, pruned_loss=0.04344, over 4908.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2127, pruned_loss=0.03409, over 971841.27 frames.], batch size: 18, lr: 2.32e-04 +2022-05-06 14:40:15,297 INFO [train.py:715] (7/8) Epoch 9, batch 25700, loss[loss=0.143, simple_loss=0.2188, pruned_loss=0.03363, over 4808.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2125, pruned_loss=0.03421, over 971501.41 frames.], batch size: 25, lr: 2.32e-04 +2022-05-06 14:40:54,417 INFO [train.py:715] (7/8) Epoch 9, batch 25750, loss[loss=0.1435, simple_loss=0.2162, pruned_loss=0.03545, over 4959.00 frames.], tot_loss[loss=0.1408, simple_loss=0.213, pruned_loss=0.03423, over 971869.64 frames.], batch size: 24, lr: 2.32e-04 +2022-05-06 14:41:33,411 INFO [train.py:715] (7/8) Epoch 9, batch 25800, loss[loss=0.1174, simple_loss=0.194, pruned_loss=0.02042, over 4792.00 frames.], tot_loss[loss=0.1408, simple_loss=0.213, pruned_loss=0.03426, over 972588.96 frames.], batch size: 18, lr: 2.32e-04 +2022-05-06 14:42:13,628 INFO [train.py:715] (7/8) Epoch 9, batch 25850, loss[loss=0.1659, simple_loss=0.249, pruned_loss=0.0414, over 4952.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2132, pruned_loss=0.03392, over 972180.60 frames.], batch size: 21, lr: 2.32e-04 +2022-05-06 14:42:53,073 INFO [train.py:715] (7/8) Epoch 9, batch 25900, loss[loss=0.1266, simple_loss=0.2035, pruned_loss=0.0248, over 4977.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2136, pruned_loss=0.03431, over 972083.16 frames.], batch size: 25, lr: 2.32e-04 +2022-05-06 14:43:32,747 INFO [train.py:715] (7/8) Epoch 9, batch 25950, loss[loss=0.1581, simple_loss=0.2255, pruned_loss=0.04539, over 4913.00 frames.], tot_loss[loss=0.141, simple_loss=0.2137, pruned_loss=0.03416, over 972740.65 frames.], batch size: 18, lr: 2.32e-04 +2022-05-06 14:44:11,978 INFO [train.py:715] (7/8) Epoch 9, batch 26000, loss[loss=0.1418, simple_loss=0.2137, pruned_loss=0.03493, over 4930.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2135, pruned_loss=0.03439, over 972233.24 frames.], batch size: 18, lr: 2.32e-04 +2022-05-06 14:44:51,308 INFO [train.py:715] (7/8) Epoch 9, batch 26050, loss[loss=0.1463, simple_loss=0.2124, pruned_loss=0.04003, over 4792.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2136, pruned_loss=0.0345, over 971535.64 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 14:45:30,101 INFO [train.py:715] (7/8) Epoch 9, batch 26100, loss[loss=0.1357, simple_loss=0.2065, pruned_loss=0.03244, over 4790.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2136, pruned_loss=0.03452, over 970702.98 frames.], batch size: 14, lr: 2.32e-04 +2022-05-06 14:46:09,806 INFO [train.py:715] (7/8) Epoch 9, batch 26150, loss[loss=0.1422, simple_loss=0.2128, pruned_loss=0.0358, over 4931.00 frames.], tot_loss[loss=0.141, simple_loss=0.2134, pruned_loss=0.03427, over 971140.68 frames.], batch size: 29, lr: 2.32e-04 +2022-05-06 14:46:50,053 INFO [train.py:715] (7/8) Epoch 9, batch 26200, loss[loss=0.1463, simple_loss=0.2116, pruned_loss=0.04047, over 4987.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2137, pruned_loss=0.03425, over 971067.65 frames.], batch size: 28, lr: 2.32e-04 +2022-05-06 14:47:29,917 INFO [train.py:715] (7/8) Epoch 9, batch 26250, loss[loss=0.1249, simple_loss=0.2072, pruned_loss=0.02124, over 4989.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2128, pruned_loss=0.03367, over 971198.54 frames.], batch size: 28, lr: 2.32e-04 +2022-05-06 14:48:09,838 INFO [train.py:715] (7/8) Epoch 9, batch 26300, loss[loss=0.1368, simple_loss=0.2136, pruned_loss=0.03002, over 4771.00 frames.], tot_loss[loss=0.14, simple_loss=0.2127, pruned_loss=0.03364, over 971712.67 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 14:48:49,367 INFO [train.py:715] (7/8) Epoch 9, batch 26350, loss[loss=0.1256, simple_loss=0.2088, pruned_loss=0.02113, over 4885.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2131, pruned_loss=0.03369, over 972477.53 frames.], batch size: 22, lr: 2.32e-04 +2022-05-06 14:49:28,724 INFO [train.py:715] (7/8) Epoch 9, batch 26400, loss[loss=0.1727, simple_loss=0.2317, pruned_loss=0.05691, over 4822.00 frames.], tot_loss[loss=0.14, simple_loss=0.2125, pruned_loss=0.03369, over 971793.72 frames.], batch size: 15, lr: 2.32e-04 +2022-05-06 14:50:07,641 INFO [train.py:715] (7/8) Epoch 9, batch 26450, loss[loss=0.1244, simple_loss=0.1954, pruned_loss=0.0267, over 4788.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2126, pruned_loss=0.03362, over 971830.65 frames.], batch size: 24, lr: 2.32e-04 +2022-05-06 14:50:46,957 INFO [train.py:715] (7/8) Epoch 9, batch 26500, loss[loss=0.1457, simple_loss=0.2081, pruned_loss=0.04162, over 4960.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2139, pruned_loss=0.03397, over 972102.93 frames.], batch size: 35, lr: 2.32e-04 +2022-05-06 14:51:26,799 INFO [train.py:715] (7/8) Epoch 9, batch 26550, loss[loss=0.1281, simple_loss=0.2047, pruned_loss=0.02573, over 4990.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2136, pruned_loss=0.03403, over 971871.21 frames.], batch size: 15, lr: 2.32e-04 +2022-05-06 14:52:06,135 INFO [train.py:715] (7/8) Epoch 9, batch 26600, loss[loss=0.1411, simple_loss=0.2071, pruned_loss=0.03757, over 4916.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2128, pruned_loss=0.03376, over 972345.36 frames.], batch size: 18, lr: 2.32e-04 +2022-05-06 14:52:46,089 INFO [train.py:715] (7/8) Epoch 9, batch 26650, loss[loss=0.1275, simple_loss=0.2045, pruned_loss=0.02522, over 4773.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2141, pruned_loss=0.03418, over 972992.74 frames.], batch size: 19, lr: 2.32e-04 +2022-05-06 14:53:25,380 INFO [train.py:715] (7/8) Epoch 9, batch 26700, loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02939, over 4990.00 frames.], tot_loss[loss=0.142, simple_loss=0.2147, pruned_loss=0.0346, over 973353.86 frames.], batch size: 14, lr: 2.32e-04 +2022-05-06 14:54:04,745 INFO [train.py:715] (7/8) Epoch 9, batch 26750, loss[loss=0.1477, simple_loss=0.2205, pruned_loss=0.03745, over 4988.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2153, pruned_loss=0.03469, over 973439.39 frames.], batch size: 25, lr: 2.32e-04 +2022-05-06 14:54:43,925 INFO [train.py:715] (7/8) Epoch 9, batch 26800, loss[loss=0.1607, simple_loss=0.2344, pruned_loss=0.0435, over 4915.00 frames.], tot_loss[loss=0.143, simple_loss=0.2158, pruned_loss=0.03508, over 972929.94 frames.], batch size: 18, lr: 2.32e-04 +2022-05-06 14:55:22,921 INFO [train.py:715] (7/8) Epoch 9, batch 26850, loss[loss=0.1177, simple_loss=0.1879, pruned_loss=0.02369, over 4887.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2152, pruned_loss=0.03512, over 972575.20 frames.], batch size: 22, lr: 2.32e-04 +2022-05-06 14:56:02,402 INFO [train.py:715] (7/8) Epoch 9, batch 26900, loss[loss=0.1513, simple_loss=0.217, pruned_loss=0.04277, over 4822.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2145, pruned_loss=0.03464, over 971464.20 frames.], batch size: 15, lr: 2.32e-04 +2022-05-06 14:56:42,228 INFO [train.py:715] (7/8) Epoch 9, batch 26950, loss[loss=0.1398, simple_loss=0.2153, pruned_loss=0.03216, over 4889.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2147, pruned_loss=0.03445, over 971043.82 frames.], batch size: 19, lr: 2.32e-04 +2022-05-06 14:57:21,396 INFO [train.py:715] (7/8) Epoch 9, batch 27000, loss[loss=0.1257, simple_loss=0.2011, pruned_loss=0.02518, over 4910.00 frames.], tot_loss[loss=0.1421, simple_loss=0.215, pruned_loss=0.03459, over 970661.89 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 14:57:21,397 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 14:57:30,964 INFO [train.py:742] (7/8) Epoch 9, validation: loss=0.1068, simple_loss=0.1912, pruned_loss=0.01121, over 914524.00 frames. +2022-05-06 14:58:10,507 INFO [train.py:715] (7/8) Epoch 9, batch 27050, loss[loss=0.168, simple_loss=0.2351, pruned_loss=0.05043, over 4966.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2151, pruned_loss=0.03472, over 971793.47 frames.], batch size: 35, lr: 2.32e-04 +2022-05-06 14:58:50,068 INFO [train.py:715] (7/8) Epoch 9, batch 27100, loss[loss=0.132, simple_loss=0.2075, pruned_loss=0.0282, over 4784.00 frames.], tot_loss[loss=0.142, simple_loss=0.215, pruned_loss=0.03456, over 970917.33 frames.], batch size: 18, lr: 2.32e-04 +2022-05-06 14:59:30,122 INFO [train.py:715] (7/8) Epoch 9, batch 27150, loss[loss=0.1485, simple_loss=0.2242, pruned_loss=0.03638, over 4792.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2144, pruned_loss=0.03457, over 970503.97 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 15:00:09,248 INFO [train.py:715] (7/8) Epoch 9, batch 27200, loss[loss=0.149, simple_loss=0.2256, pruned_loss=0.03622, over 4767.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2142, pruned_loss=0.0346, over 970676.47 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 15:00:48,164 INFO [train.py:715] (7/8) Epoch 9, batch 27250, loss[loss=0.1637, simple_loss=0.2337, pruned_loss=0.0468, over 4952.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2134, pruned_loss=0.03415, over 970623.50 frames.], batch size: 39, lr: 2.32e-04 +2022-05-06 15:01:27,382 INFO [train.py:715] (7/8) Epoch 9, batch 27300, loss[loss=0.1899, simple_loss=0.2479, pruned_loss=0.06591, over 4784.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2146, pruned_loss=0.03486, over 971457.37 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 15:02:06,271 INFO [train.py:715] (7/8) Epoch 9, batch 27350, loss[loss=0.1585, simple_loss=0.2266, pruned_loss=0.04515, over 4856.00 frames.], tot_loss[loss=0.1424, simple_loss=0.215, pruned_loss=0.03489, over 971752.49 frames.], batch size: 20, lr: 2.32e-04 +2022-05-06 15:02:45,303 INFO [train.py:715] (7/8) Epoch 9, batch 27400, loss[loss=0.125, simple_loss=0.1975, pruned_loss=0.0263, over 4856.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2146, pruned_loss=0.03515, over 972444.45 frames.], batch size: 20, lr: 2.32e-04 +2022-05-06 15:03:24,468 INFO [train.py:715] (7/8) Epoch 9, batch 27450, loss[loss=0.1343, simple_loss=0.2057, pruned_loss=0.03147, over 4901.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2147, pruned_loss=0.03494, over 972329.32 frames.], batch size: 22, lr: 2.32e-04 +2022-05-06 15:04:03,433 INFO [train.py:715] (7/8) Epoch 9, batch 27500, loss[loss=0.1343, simple_loss=0.2117, pruned_loss=0.02841, over 4775.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2149, pruned_loss=0.0354, over 971797.13 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 15:04:42,464 INFO [train.py:715] (7/8) Epoch 9, batch 27550, loss[loss=0.2136, simple_loss=0.2795, pruned_loss=0.07388, over 4913.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2149, pruned_loss=0.03546, over 972213.06 frames.], batch size: 17, lr: 2.32e-04 +2022-05-06 15:05:21,388 INFO [train.py:715] (7/8) Epoch 9, batch 27600, loss[loss=0.1429, simple_loss=0.2196, pruned_loss=0.0331, over 4827.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2141, pruned_loss=0.03454, over 971648.37 frames.], batch size: 27, lr: 2.32e-04 +2022-05-06 15:06:00,162 INFO [train.py:715] (7/8) Epoch 9, batch 27650, loss[loss=0.1343, simple_loss=0.2136, pruned_loss=0.02747, over 4774.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2139, pruned_loss=0.03461, over 971312.66 frames.], batch size: 16, lr: 2.32e-04 +2022-05-06 15:06:39,018 INFO [train.py:715] (7/8) Epoch 9, batch 27700, loss[loss=0.118, simple_loss=0.1834, pruned_loss=0.02634, over 4646.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2133, pruned_loss=0.034, over 971102.18 frames.], batch size: 13, lr: 2.32e-04 +2022-05-06 15:07:18,264 INFO [train.py:715] (7/8) Epoch 9, batch 27750, loss[loss=0.1139, simple_loss=0.1926, pruned_loss=0.01759, over 4802.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03338, over 970967.31 frames.], batch size: 25, lr: 2.31e-04 +2022-05-06 15:07:57,613 INFO [train.py:715] (7/8) Epoch 9, batch 27800, loss[loss=0.1324, simple_loss=0.2002, pruned_loss=0.03228, over 4821.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2124, pruned_loss=0.03351, over 970907.09 frames.], batch size: 13, lr: 2.31e-04 +2022-05-06 15:08:36,547 INFO [train.py:715] (7/8) Epoch 9, batch 27850, loss[loss=0.1259, simple_loss=0.2025, pruned_loss=0.02461, over 4830.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2126, pruned_loss=0.03377, over 971323.87 frames.], batch size: 26, lr: 2.31e-04 +2022-05-06 15:09:16,413 INFO [train.py:715] (7/8) Epoch 9, batch 27900, loss[loss=0.1508, simple_loss=0.234, pruned_loss=0.03377, over 4949.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2118, pruned_loss=0.03322, over 971265.04 frames.], batch size: 29, lr: 2.31e-04 +2022-05-06 15:09:54,912 INFO [train.py:715] (7/8) Epoch 9, batch 27950, loss[loss=0.1479, simple_loss=0.2155, pruned_loss=0.04016, over 4867.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2123, pruned_loss=0.03346, over 972174.48 frames.], batch size: 32, lr: 2.31e-04 +2022-05-06 15:10:34,269 INFO [train.py:715] (7/8) Epoch 9, batch 28000, loss[loss=0.1446, simple_loss=0.2191, pruned_loss=0.03504, over 4977.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2129, pruned_loss=0.03391, over 971694.93 frames.], batch size: 24, lr: 2.31e-04 +2022-05-06 15:11:13,574 INFO [train.py:715] (7/8) Epoch 9, batch 28050, loss[loss=0.1331, simple_loss=0.2005, pruned_loss=0.03286, over 4924.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2131, pruned_loss=0.03425, over 972113.02 frames.], batch size: 17, lr: 2.31e-04 +2022-05-06 15:11:52,643 INFO [train.py:715] (7/8) Epoch 9, batch 28100, loss[loss=0.1475, simple_loss=0.2201, pruned_loss=0.03752, over 4822.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2139, pruned_loss=0.03467, over 971874.43 frames.], batch size: 25, lr: 2.31e-04 +2022-05-06 15:12:31,910 INFO [train.py:715] (7/8) Epoch 9, batch 28150, loss[loss=0.124, simple_loss=0.2048, pruned_loss=0.02164, over 4921.00 frames.], tot_loss[loss=0.141, simple_loss=0.2133, pruned_loss=0.0343, over 972409.79 frames.], batch size: 29, lr: 2.31e-04 +2022-05-06 15:13:10,821 INFO [train.py:715] (7/8) Epoch 9, batch 28200, loss[loss=0.141, simple_loss=0.2171, pruned_loss=0.03242, over 4747.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2137, pruned_loss=0.03436, over 973308.37 frames.], batch size: 19, lr: 2.31e-04 +2022-05-06 15:13:50,250 INFO [train.py:715] (7/8) Epoch 9, batch 28250, loss[loss=0.1511, simple_loss=0.2279, pruned_loss=0.03718, over 4762.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2148, pruned_loss=0.03499, over 973014.97 frames.], batch size: 19, lr: 2.31e-04 +2022-05-06 15:14:28,530 INFO [train.py:715] (7/8) Epoch 9, batch 28300, loss[loss=0.1521, simple_loss=0.2114, pruned_loss=0.04643, over 4856.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2155, pruned_loss=0.03495, over 973791.61 frames.], batch size: 13, lr: 2.31e-04 +2022-05-06 15:15:07,478 INFO [train.py:715] (7/8) Epoch 9, batch 28350, loss[loss=0.1404, simple_loss=0.2044, pruned_loss=0.03823, over 4937.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2156, pruned_loss=0.03482, over 973706.60 frames.], batch size: 21, lr: 2.31e-04 +2022-05-06 15:15:46,876 INFO [train.py:715] (7/8) Epoch 9, batch 28400, loss[loss=0.1552, simple_loss=0.2221, pruned_loss=0.0441, over 4828.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2155, pruned_loss=0.03484, over 973906.73 frames.], batch size: 13, lr: 2.31e-04 +2022-05-06 15:16:25,954 INFO [train.py:715] (7/8) Epoch 9, batch 28450, loss[loss=0.1429, simple_loss=0.2076, pruned_loss=0.03909, over 4772.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2142, pruned_loss=0.03448, over 973469.50 frames.], batch size: 17, lr: 2.31e-04 +2022-05-06 15:17:04,389 INFO [train.py:715] (7/8) Epoch 9, batch 28500, loss[loss=0.1534, simple_loss=0.2129, pruned_loss=0.04696, over 4950.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2142, pruned_loss=0.03464, over 973712.23 frames.], batch size: 39, lr: 2.31e-04 +2022-05-06 15:17:43,526 INFO [train.py:715] (7/8) Epoch 9, batch 28550, loss[loss=0.156, simple_loss=0.2292, pruned_loss=0.04143, over 4983.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2153, pruned_loss=0.0352, over 973584.57 frames.], batch size: 14, lr: 2.31e-04 +2022-05-06 15:18:22,915 INFO [train.py:715] (7/8) Epoch 9, batch 28600, loss[loss=0.1334, simple_loss=0.2117, pruned_loss=0.02759, over 4857.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2155, pruned_loss=0.03531, over 973600.60 frames.], batch size: 13, lr: 2.31e-04 +2022-05-06 15:19:01,333 INFO [train.py:715] (7/8) Epoch 9, batch 28650, loss[loss=0.1271, simple_loss=0.2018, pruned_loss=0.0262, over 4867.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.0346, over 974196.00 frames.], batch size: 20, lr: 2.31e-04 +2022-05-06 15:19:40,173 INFO [train.py:715] (7/8) Epoch 9, batch 28700, loss[loss=0.1542, simple_loss=0.2272, pruned_loss=0.04062, over 4914.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2147, pruned_loss=0.03434, over 973730.11 frames.], batch size: 17, lr: 2.31e-04 +2022-05-06 15:20:19,625 INFO [train.py:715] (7/8) Epoch 9, batch 28750, loss[loss=0.1497, simple_loss=0.2187, pruned_loss=0.04033, over 4839.00 frames.], tot_loss[loss=0.141, simple_loss=0.2144, pruned_loss=0.03376, over 974165.93 frames.], batch size: 30, lr: 2.31e-04 +2022-05-06 15:20:58,326 INFO [train.py:715] (7/8) Epoch 9, batch 28800, loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03153, over 4832.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2133, pruned_loss=0.03364, over 973474.46 frames.], batch size: 26, lr: 2.31e-04 +2022-05-06 15:21:36,725 INFO [train.py:715] (7/8) Epoch 9, batch 28850, loss[loss=0.1577, simple_loss=0.2314, pruned_loss=0.04194, over 4859.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2143, pruned_loss=0.03401, over 972871.38 frames.], batch size: 20, lr: 2.31e-04 +2022-05-06 15:22:16,104 INFO [train.py:715] (7/8) Epoch 9, batch 28900, loss[loss=0.1416, simple_loss=0.2223, pruned_loss=0.03046, over 4888.00 frames.], tot_loss[loss=0.142, simple_loss=0.2149, pruned_loss=0.03457, over 972800.31 frames.], batch size: 22, lr: 2.31e-04 +2022-05-06 15:22:55,368 INFO [train.py:715] (7/8) Epoch 9, batch 28950, loss[loss=0.1412, simple_loss=0.212, pruned_loss=0.03526, over 4821.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03426, over 973099.96 frames.], batch size: 25, lr: 2.31e-04 +2022-05-06 15:23:33,683 INFO [train.py:715] (7/8) Epoch 9, batch 29000, loss[loss=0.1251, simple_loss=0.1897, pruned_loss=0.03022, over 4844.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03385, over 973043.31 frames.], batch size: 34, lr: 2.31e-04 +2022-05-06 15:24:12,159 INFO [train.py:715] (7/8) Epoch 9, batch 29050, loss[loss=0.1507, simple_loss=0.2284, pruned_loss=0.03646, over 4856.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03382, over 973032.70 frames.], batch size: 34, lr: 2.31e-04 +2022-05-06 15:24:51,100 INFO [train.py:715] (7/8) Epoch 9, batch 29100, loss[loss=0.1297, simple_loss=0.1956, pruned_loss=0.03195, over 4928.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2139, pruned_loss=0.03438, over 971788.07 frames.], batch size: 21, lr: 2.31e-04 +2022-05-06 15:25:30,247 INFO [train.py:715] (7/8) Epoch 9, batch 29150, loss[loss=0.1487, simple_loss=0.2181, pruned_loss=0.03962, over 4766.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2148, pruned_loss=0.03486, over 971972.66 frames.], batch size: 19, lr: 2.31e-04 +2022-05-06 15:26:09,098 INFO [train.py:715] (7/8) Epoch 9, batch 29200, loss[loss=0.1179, simple_loss=0.1976, pruned_loss=0.01911, over 4987.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2152, pruned_loss=0.03491, over 972378.04 frames.], batch size: 14, lr: 2.31e-04 +2022-05-06 15:26:48,462 INFO [train.py:715] (7/8) Epoch 9, batch 29250, loss[loss=0.1461, simple_loss=0.2126, pruned_loss=0.03974, over 4776.00 frames.], tot_loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03529, over 973090.00 frames.], batch size: 13, lr: 2.31e-04 +2022-05-06 15:27:27,200 INFO [train.py:715] (7/8) Epoch 9, batch 29300, loss[loss=0.1186, simple_loss=0.1999, pruned_loss=0.0187, over 4944.00 frames.], tot_loss[loss=0.142, simple_loss=0.2146, pruned_loss=0.03473, over 973233.37 frames.], batch size: 23, lr: 2.31e-04 +2022-05-06 15:28:06,269 INFO [train.py:715] (7/8) Epoch 9, batch 29350, loss[loss=0.1257, simple_loss=0.1935, pruned_loss=0.02892, over 4825.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2135, pruned_loss=0.03433, over 972274.05 frames.], batch size: 13, lr: 2.31e-04 +2022-05-06 15:28:45,212 INFO [train.py:715] (7/8) Epoch 9, batch 29400, loss[loss=0.1427, simple_loss=0.2211, pruned_loss=0.03222, over 4783.00 frames.], tot_loss[loss=0.1415, simple_loss=0.214, pruned_loss=0.03449, over 972139.01 frames.], batch size: 18, lr: 2.31e-04 +2022-05-06 15:29:23,948 INFO [train.py:715] (7/8) Epoch 9, batch 29450, loss[loss=0.1309, simple_loss=0.2079, pruned_loss=0.02694, over 4914.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2146, pruned_loss=0.03499, over 972438.23 frames.], batch size: 29, lr: 2.31e-04 +2022-05-06 15:30:02,407 INFO [train.py:715] (7/8) Epoch 9, batch 29500, loss[loss=0.1509, simple_loss=0.225, pruned_loss=0.03845, over 4798.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2152, pruned_loss=0.035, over 973318.65 frames.], batch size: 21, lr: 2.31e-04 +2022-05-06 15:30:41,338 INFO [train.py:715] (7/8) Epoch 9, batch 29550, loss[loss=0.1679, simple_loss=0.2455, pruned_loss=0.04519, over 4760.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2147, pruned_loss=0.03458, over 973230.06 frames.], batch size: 16, lr: 2.31e-04 +2022-05-06 15:31:20,278 INFO [train.py:715] (7/8) Epoch 9, batch 29600, loss[loss=0.1536, simple_loss=0.2306, pruned_loss=0.03828, over 4756.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2142, pruned_loss=0.03445, over 973298.81 frames.], batch size: 19, lr: 2.31e-04 +2022-05-06 15:31:59,542 INFO [train.py:715] (7/8) Epoch 9, batch 29650, loss[loss=0.1463, simple_loss=0.2127, pruned_loss=0.03993, over 4961.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2142, pruned_loss=0.03443, over 972221.01 frames.], batch size: 35, lr: 2.31e-04 +2022-05-06 15:32:39,147 INFO [train.py:715] (7/8) Epoch 9, batch 29700, loss[loss=0.1484, simple_loss=0.2298, pruned_loss=0.03345, over 4844.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2139, pruned_loss=0.03417, over 972168.80 frames.], batch size: 15, lr: 2.31e-04 +2022-05-06 15:33:17,091 INFO [train.py:715] (7/8) Epoch 9, batch 29750, loss[loss=0.1532, simple_loss=0.2254, pruned_loss=0.04047, over 4690.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2153, pruned_loss=0.03511, over 971544.72 frames.], batch size: 15, lr: 2.31e-04 +2022-05-06 15:33:55,774 INFO [train.py:715] (7/8) Epoch 9, batch 29800, loss[loss=0.1536, simple_loss=0.2334, pruned_loss=0.03695, over 4946.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2153, pruned_loss=0.03498, over 972377.63 frames.], batch size: 23, lr: 2.31e-04 +2022-05-06 15:34:34,896 INFO [train.py:715] (7/8) Epoch 9, batch 29850, loss[loss=0.1127, simple_loss=0.1841, pruned_loss=0.02064, over 4968.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2151, pruned_loss=0.03473, over 972088.40 frames.], batch size: 14, lr: 2.31e-04 +2022-05-06 15:35:13,061 INFO [train.py:715] (7/8) Epoch 9, batch 29900, loss[loss=0.1485, simple_loss=0.2155, pruned_loss=0.04075, over 4784.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2153, pruned_loss=0.03484, over 972864.42 frames.], batch size: 17, lr: 2.31e-04 +2022-05-06 15:35:52,537 INFO [train.py:715] (7/8) Epoch 9, batch 29950, loss[loss=0.138, simple_loss=0.2221, pruned_loss=0.02697, over 4900.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2155, pruned_loss=0.03492, over 972241.95 frames.], batch size: 22, lr: 2.31e-04 +2022-05-06 15:36:31,404 INFO [train.py:715] (7/8) Epoch 9, batch 30000, loss[loss=0.1381, simple_loss=0.2119, pruned_loss=0.03218, over 4687.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2141, pruned_loss=0.03418, over 972793.24 frames.], batch size: 15, lr: 2.31e-04 +2022-05-06 15:36:31,404 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 15:36:40,919 INFO [train.py:742] (7/8) Epoch 9, validation: loss=0.1068, simple_loss=0.1911, pruned_loss=0.01124, over 914524.00 frames. +2022-05-06 15:37:20,164 INFO [train.py:715] (7/8) Epoch 9, batch 30050, loss[loss=0.1515, simple_loss=0.2204, pruned_loss=0.04127, over 4959.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2155, pruned_loss=0.03455, over 973068.76 frames.], batch size: 35, lr: 2.31e-04 +2022-05-06 15:37:58,807 INFO [train.py:715] (7/8) Epoch 9, batch 30100, loss[loss=0.1121, simple_loss=0.1864, pruned_loss=0.01894, over 4807.00 frames.], tot_loss[loss=0.143, simple_loss=0.2159, pruned_loss=0.03503, over 972915.01 frames.], batch size: 25, lr: 2.31e-04 +2022-05-06 15:38:38,129 INFO [train.py:715] (7/8) Epoch 9, batch 30150, loss[loss=0.1511, simple_loss=0.2299, pruned_loss=0.03619, over 4975.00 frames.], tot_loss[loss=0.143, simple_loss=0.2157, pruned_loss=0.03519, over 972828.62 frames.], batch size: 15, lr: 2.31e-04 +2022-05-06 15:39:17,505 INFO [train.py:715] (7/8) Epoch 9, batch 30200, loss[loss=0.1228, simple_loss=0.2015, pruned_loss=0.02206, over 4811.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2158, pruned_loss=0.03498, over 973428.59 frames.], batch size: 14, lr: 2.31e-04 +2022-05-06 15:39:56,689 INFO [train.py:715] (7/8) Epoch 9, batch 30250, loss[loss=0.1235, simple_loss=0.1984, pruned_loss=0.02434, over 4986.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2153, pruned_loss=0.03476, over 972325.50 frames.], batch size: 20, lr: 2.31e-04 +2022-05-06 15:40:35,246 INFO [train.py:715] (7/8) Epoch 9, batch 30300, loss[loss=0.1289, simple_loss=0.2046, pruned_loss=0.02656, over 4981.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2149, pruned_loss=0.03472, over 972829.17 frames.], batch size: 28, lr: 2.31e-04 +2022-05-06 15:41:14,057 INFO [train.py:715] (7/8) Epoch 9, batch 30350, loss[loss=0.1277, simple_loss=0.2033, pruned_loss=0.02603, over 4983.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2146, pruned_loss=0.03455, over 973315.49 frames.], batch size: 33, lr: 2.31e-04 +2022-05-06 15:41:53,485 INFO [train.py:715] (7/8) Epoch 9, batch 30400, loss[loss=0.1408, simple_loss=0.2102, pruned_loss=0.03571, over 4852.00 frames.], tot_loss[loss=0.142, simple_loss=0.2145, pruned_loss=0.03477, over 972999.82 frames.], batch size: 20, lr: 2.31e-04 +2022-05-06 15:42:32,292 INFO [train.py:715] (7/8) Epoch 9, batch 30450, loss[loss=0.1254, simple_loss=0.2013, pruned_loss=0.02472, over 4928.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2154, pruned_loss=0.03519, over 973023.76 frames.], batch size: 35, lr: 2.31e-04 +2022-05-06 15:43:10,916 INFO [train.py:715] (7/8) Epoch 9, batch 30500, loss[loss=0.1271, simple_loss=0.2059, pruned_loss=0.02415, over 4785.00 frames.], tot_loss[loss=0.1435, simple_loss=0.2163, pruned_loss=0.03537, over 973240.85 frames.], batch size: 14, lr: 2.31e-04 +2022-05-06 15:43:49,986 INFO [train.py:715] (7/8) Epoch 9, batch 30550, loss[loss=0.1361, simple_loss=0.2169, pruned_loss=0.02768, over 4898.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2156, pruned_loss=0.03475, over 972921.38 frames.], batch size: 17, lr: 2.31e-04 +2022-05-06 15:44:28,847 INFO [train.py:715] (7/8) Epoch 9, batch 30600, loss[loss=0.1228, simple_loss=0.2, pruned_loss=0.02283, over 4831.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2146, pruned_loss=0.03432, over 973013.68 frames.], batch size: 15, lr: 2.31e-04 +2022-05-06 15:45:06,880 INFO [train.py:715] (7/8) Epoch 9, batch 30650, loss[loss=0.1141, simple_loss=0.1863, pruned_loss=0.02099, over 4831.00 frames.], tot_loss[loss=0.141, simple_loss=0.214, pruned_loss=0.03398, over 972758.11 frames.], batch size: 27, lr: 2.31e-04 +2022-05-06 15:45:45,882 INFO [train.py:715] (7/8) Epoch 9, batch 30700, loss[loss=0.127, simple_loss=0.2101, pruned_loss=0.02191, over 4817.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2133, pruned_loss=0.03364, over 973136.40 frames.], batch size: 27, lr: 2.30e-04 +2022-05-06 15:46:27,570 INFO [train.py:715] (7/8) Epoch 9, batch 30750, loss[loss=0.1335, simple_loss=0.212, pruned_loss=0.02747, over 4927.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03376, over 973655.99 frames.], batch size: 18, lr: 2.30e-04 +2022-05-06 15:47:06,258 INFO [train.py:715] (7/8) Epoch 9, batch 30800, loss[loss=0.1451, simple_loss=0.2211, pruned_loss=0.0346, over 4909.00 frames.], tot_loss[loss=0.141, simple_loss=0.214, pruned_loss=0.03398, over 973312.36 frames.], batch size: 17, lr: 2.30e-04 +2022-05-06 15:47:44,605 INFO [train.py:715] (7/8) Epoch 9, batch 30850, loss[loss=0.1401, simple_loss=0.2156, pruned_loss=0.03231, over 4838.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2138, pruned_loss=0.03396, over 972665.00 frames.], batch size: 30, lr: 2.30e-04 +2022-05-06 15:48:23,858 INFO [train.py:715] (7/8) Epoch 9, batch 30900, loss[loss=0.119, simple_loss=0.201, pruned_loss=0.01852, over 4767.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.03373, over 971472.12 frames.], batch size: 18, lr: 2.30e-04 +2022-05-06 15:49:03,046 INFO [train.py:715] (7/8) Epoch 9, batch 30950, loss[loss=0.1396, simple_loss=0.2159, pruned_loss=0.03171, over 4924.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03359, over 971727.85 frames.], batch size: 18, lr: 2.30e-04 +2022-05-06 15:49:41,534 INFO [train.py:715] (7/8) Epoch 9, batch 31000, loss[loss=0.1604, simple_loss=0.2234, pruned_loss=0.04867, over 4883.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.03362, over 972746.97 frames.], batch size: 19, lr: 2.30e-04 +2022-05-06 15:50:20,508 INFO [train.py:715] (7/8) Epoch 9, batch 31050, loss[loss=0.1274, simple_loss=0.2061, pruned_loss=0.02436, over 4962.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2147, pruned_loss=0.03444, over 971875.87 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 15:50:59,766 INFO [train.py:715] (7/8) Epoch 9, batch 31100, loss[loss=0.1303, simple_loss=0.2142, pruned_loss=0.02315, over 4809.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2139, pruned_loss=0.03441, over 971560.85 frames.], batch size: 25, lr: 2.30e-04 +2022-05-06 15:51:38,435 INFO [train.py:715] (7/8) Epoch 9, batch 31150, loss[loss=0.1267, simple_loss=0.2101, pruned_loss=0.02166, over 4976.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2139, pruned_loss=0.03418, over 971727.80 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 15:52:17,018 INFO [train.py:715] (7/8) Epoch 9, batch 31200, loss[loss=0.1394, simple_loss=0.2103, pruned_loss=0.03425, over 4949.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2134, pruned_loss=0.0339, over 972409.88 frames.], batch size: 24, lr: 2.30e-04 +2022-05-06 15:52:56,551 INFO [train.py:715] (7/8) Epoch 9, batch 31250, loss[loss=0.1377, simple_loss=0.2172, pruned_loss=0.02914, over 4987.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03386, over 973492.75 frames.], batch size: 28, lr: 2.30e-04 +2022-05-06 15:53:36,000 INFO [train.py:715] (7/8) Epoch 9, batch 31300, loss[loss=0.12, simple_loss=0.1987, pruned_loss=0.02068, over 4837.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2137, pruned_loss=0.03383, over 971800.07 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 15:54:14,969 INFO [train.py:715] (7/8) Epoch 9, batch 31350, loss[loss=0.1215, simple_loss=0.1862, pruned_loss=0.0284, over 4847.00 frames.], tot_loss[loss=0.1411, simple_loss=0.214, pruned_loss=0.03407, over 971807.62 frames.], batch size: 13, lr: 2.30e-04 +2022-05-06 15:54:53,758 INFO [train.py:715] (7/8) Epoch 9, batch 31400, loss[loss=0.1638, simple_loss=0.2259, pruned_loss=0.0508, over 4745.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2136, pruned_loss=0.03392, over 971912.64 frames.], batch size: 16, lr: 2.30e-04 +2022-05-06 15:55:32,701 INFO [train.py:715] (7/8) Epoch 9, batch 31450, loss[loss=0.1531, simple_loss=0.2257, pruned_loss=0.04029, over 4941.00 frames.], tot_loss[loss=0.141, simple_loss=0.2137, pruned_loss=0.03415, over 972494.25 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 15:56:11,772 INFO [train.py:715] (7/8) Epoch 9, batch 31500, loss[loss=0.1164, simple_loss=0.1871, pruned_loss=0.02279, over 4927.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03406, over 973017.03 frames.], batch size: 23, lr: 2.30e-04 +2022-05-06 15:56:50,182 INFO [train.py:715] (7/8) Epoch 9, batch 31550, loss[loss=0.1379, simple_loss=0.2126, pruned_loss=0.03162, over 4810.00 frames.], tot_loss[loss=0.141, simple_loss=0.2134, pruned_loss=0.03425, over 972404.47 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 15:57:29,719 INFO [train.py:715] (7/8) Epoch 9, batch 31600, loss[loss=0.1604, simple_loss=0.2235, pruned_loss=0.04864, over 4793.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2137, pruned_loss=0.03455, over 972067.83 frames.], batch size: 14, lr: 2.30e-04 +2022-05-06 15:58:09,721 INFO [train.py:715] (7/8) Epoch 9, batch 31650, loss[loss=0.177, simple_loss=0.2373, pruned_loss=0.05836, over 4970.00 frames.], tot_loss[loss=0.142, simple_loss=0.2141, pruned_loss=0.03498, over 971742.55 frames.], batch size: 14, lr: 2.30e-04 +2022-05-06 15:58:48,442 INFO [train.py:715] (7/8) Epoch 9, batch 31700, loss[loss=0.1235, simple_loss=0.2, pruned_loss=0.02351, over 4917.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2153, pruned_loss=0.03529, over 972257.88 frames.], batch size: 29, lr: 2.30e-04 +2022-05-06 15:59:27,453 INFO [train.py:715] (7/8) Epoch 9, batch 31750, loss[loss=0.1327, simple_loss=0.2083, pruned_loss=0.02859, over 4943.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2151, pruned_loss=0.03526, over 972433.83 frames.], batch size: 24, lr: 2.30e-04 +2022-05-06 16:00:06,081 INFO [train.py:715] (7/8) Epoch 9, batch 31800, loss[loss=0.1214, simple_loss=0.2029, pruned_loss=0.01992, over 4857.00 frames.], tot_loss[loss=0.1411, simple_loss=0.214, pruned_loss=0.03416, over 972429.45 frames.], batch size: 20, lr: 2.30e-04 +2022-05-06 16:00:45,145 INFO [train.py:715] (7/8) Epoch 9, batch 31850, loss[loss=0.1278, simple_loss=0.2007, pruned_loss=0.02744, over 4699.00 frames.], tot_loss[loss=0.1397, simple_loss=0.213, pruned_loss=0.03318, over 971744.49 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 16:01:23,639 INFO [train.py:715] (7/8) Epoch 9, batch 31900, loss[loss=0.154, simple_loss=0.214, pruned_loss=0.04696, over 4989.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2132, pruned_loss=0.03369, over 972602.68 frames.], batch size: 14, lr: 2.30e-04 +2022-05-06 16:02:02,946 INFO [train.py:715] (7/8) Epoch 9, batch 31950, loss[loss=0.1355, simple_loss=0.2005, pruned_loss=0.03527, over 4839.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03365, over 972700.93 frames.], batch size: 13, lr: 2.30e-04 +2022-05-06 16:02:42,219 INFO [train.py:715] (7/8) Epoch 9, batch 32000, loss[loss=0.1448, simple_loss=0.2186, pruned_loss=0.03549, over 4823.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03359, over 972501.45 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:03:20,779 INFO [train.py:715] (7/8) Epoch 9, batch 32050, loss[loss=0.1127, simple_loss=0.188, pruned_loss=0.0187, over 4912.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03398, over 972565.99 frames.], batch size: 23, lr: 2.30e-04 +2022-05-06 16:03:59,268 INFO [train.py:715] (7/8) Epoch 9, batch 32100, loss[loss=0.1462, simple_loss=0.2234, pruned_loss=0.03455, over 4691.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2129, pruned_loss=0.03368, over 972393.53 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 16:04:38,261 INFO [train.py:715] (7/8) Epoch 9, batch 32150, loss[loss=0.1233, simple_loss=0.1972, pruned_loss=0.0247, over 4927.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2122, pruned_loss=0.0335, over 973025.28 frames.], batch size: 29, lr: 2.30e-04 +2022-05-06 16:05:17,702 INFO [train.py:715] (7/8) Epoch 9, batch 32200, loss[loss=0.1327, simple_loss=0.2162, pruned_loss=0.02456, over 4980.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2125, pruned_loss=0.03361, over 972692.41 frames.], batch size: 25, lr: 2.30e-04 +2022-05-06 16:05:55,461 INFO [train.py:715] (7/8) Epoch 9, batch 32250, loss[loss=0.1404, simple_loss=0.2104, pruned_loss=0.03518, over 4957.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2132, pruned_loss=0.03361, over 973436.98 frames.], batch size: 29, lr: 2.30e-04 +2022-05-06 16:06:34,667 INFO [train.py:715] (7/8) Epoch 9, batch 32300, loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02865, over 4811.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2128, pruned_loss=0.0335, over 973075.86 frames.], batch size: 25, lr: 2.30e-04 +2022-05-06 16:07:13,843 INFO [train.py:715] (7/8) Epoch 9, batch 32350, loss[loss=0.1233, simple_loss=0.2015, pruned_loss=0.02255, over 4939.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03361, over 973043.94 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:07:52,331 INFO [train.py:715] (7/8) Epoch 9, batch 32400, loss[loss=0.1032, simple_loss=0.1695, pruned_loss=0.01843, over 4807.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03328, over 972667.62 frames.], batch size: 12, lr: 2.30e-04 +2022-05-06 16:08:31,416 INFO [train.py:715] (7/8) Epoch 9, batch 32450, loss[loss=0.1406, simple_loss=0.2247, pruned_loss=0.02822, over 4809.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.03333, over 972229.28 frames.], batch size: 13, lr: 2.30e-04 +2022-05-06 16:09:10,518 INFO [train.py:715] (7/8) Epoch 9, batch 32500, loss[loss=0.1372, simple_loss=0.218, pruned_loss=0.02817, over 4793.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.03332, over 971171.42 frames.], batch size: 24, lr: 2.30e-04 +2022-05-06 16:09:49,361 INFO [train.py:715] (7/8) Epoch 9, batch 32550, loss[loss=0.1475, simple_loss=0.2187, pruned_loss=0.03815, over 4944.00 frames.], tot_loss[loss=0.1392, simple_loss=0.212, pruned_loss=0.03325, over 971334.36 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:10:27,864 INFO [train.py:715] (7/8) Epoch 9, batch 32600, loss[loss=0.1294, simple_loss=0.2024, pruned_loss=0.02818, over 4981.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2128, pruned_loss=0.03413, over 971763.30 frames.], batch size: 39, lr: 2.30e-04 +2022-05-06 16:11:06,894 INFO [train.py:715] (7/8) Epoch 9, batch 32650, loss[loss=0.1306, simple_loss=0.2001, pruned_loss=0.03053, over 4890.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2129, pruned_loss=0.03415, over 971601.12 frames.], batch size: 17, lr: 2.30e-04 +2022-05-06 16:11:45,874 INFO [train.py:715] (7/8) Epoch 9, batch 32700, loss[loss=0.1494, simple_loss=0.2255, pruned_loss=0.03664, over 4760.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2123, pruned_loss=0.03357, over 971952.46 frames.], batch size: 18, lr: 2.30e-04 +2022-05-06 16:12:24,797 INFO [train.py:715] (7/8) Epoch 9, batch 32750, loss[loss=0.1131, simple_loss=0.1867, pruned_loss=0.01972, over 4790.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.03344, over 972118.13 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:13:03,522 INFO [train.py:715] (7/8) Epoch 9, batch 32800, loss[loss=0.1483, simple_loss=0.2275, pruned_loss=0.03458, over 4787.00 frames.], tot_loss[loss=0.14, simple_loss=0.213, pruned_loss=0.03349, over 971787.71 frames.], batch size: 14, lr: 2.30e-04 +2022-05-06 16:13:42,563 INFO [train.py:715] (7/8) Epoch 9, batch 32850, loss[loss=0.1387, simple_loss=0.2097, pruned_loss=0.03383, over 4710.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2121, pruned_loss=0.03332, over 971369.01 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 16:14:21,305 INFO [train.py:715] (7/8) Epoch 9, batch 32900, loss[loss=0.1649, simple_loss=0.2367, pruned_loss=0.04659, over 4960.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2114, pruned_loss=0.03285, over 971284.96 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:14:59,684 INFO [train.py:715] (7/8) Epoch 9, batch 32950, loss[loss=0.1331, simple_loss=0.2093, pruned_loss=0.02847, over 4985.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2121, pruned_loss=0.03342, over 971173.51 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 16:15:38,641 INFO [train.py:715] (7/8) Epoch 9, batch 33000, loss[loss=0.1353, simple_loss=0.2118, pruned_loss=0.02944, over 4884.00 frames.], tot_loss[loss=0.1395, simple_loss=0.212, pruned_loss=0.03354, over 971296.55 frames.], batch size: 22, lr: 2.30e-04 +2022-05-06 16:15:38,642 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 16:15:48,000 INFO [train.py:742] (7/8) Epoch 9, validation: loss=0.1068, simple_loss=0.1913, pruned_loss=0.01119, over 914524.00 frames. +2022-05-06 16:16:27,264 INFO [train.py:715] (7/8) Epoch 9, batch 33050, loss[loss=0.1332, simple_loss=0.2111, pruned_loss=0.02769, over 4750.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2128, pruned_loss=0.03388, over 972002.00 frames.], batch size: 16, lr: 2.30e-04 +2022-05-06 16:17:06,453 INFO [train.py:715] (7/8) Epoch 9, batch 33100, loss[loss=0.145, simple_loss=0.2208, pruned_loss=0.03459, over 4811.00 frames.], tot_loss[loss=0.1417, simple_loss=0.214, pruned_loss=0.03463, over 971863.40 frames.], batch size: 13, lr: 2.30e-04 +2022-05-06 16:17:45,625 INFO [train.py:715] (7/8) Epoch 9, batch 33150, loss[loss=0.1653, simple_loss=0.2336, pruned_loss=0.04852, over 4777.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2151, pruned_loss=0.03516, over 971348.39 frames.], batch size: 17, lr: 2.30e-04 +2022-05-06 16:18:25,452 INFO [train.py:715] (7/8) Epoch 9, batch 33200, loss[loss=0.1493, simple_loss=0.2265, pruned_loss=0.03606, over 4882.00 frames.], tot_loss[loss=0.1431, simple_loss=0.2154, pruned_loss=0.03538, over 971086.94 frames.], batch size: 39, lr: 2.30e-04 +2022-05-06 16:19:04,996 INFO [train.py:715] (7/8) Epoch 9, batch 33250, loss[loss=0.1411, simple_loss=0.224, pruned_loss=0.02913, over 4777.00 frames.], tot_loss[loss=0.1426, simple_loss=0.215, pruned_loss=0.03515, over 971182.67 frames.], batch size: 14, lr: 2.30e-04 +2022-05-06 16:19:44,052 INFO [train.py:715] (7/8) Epoch 9, batch 33300, loss[loss=0.1271, simple_loss=0.1994, pruned_loss=0.02739, over 4987.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2132, pruned_loss=0.03403, over 971339.09 frames.], batch size: 25, lr: 2.30e-04 +2022-05-06 16:20:23,552 INFO [train.py:715] (7/8) Epoch 9, batch 33350, loss[loss=0.1519, simple_loss=0.2162, pruned_loss=0.04379, over 4767.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03377, over 971382.96 frames.], batch size: 14, lr: 2.30e-04 +2022-05-06 16:21:03,300 INFO [train.py:715] (7/8) Epoch 9, batch 33400, loss[loss=0.1621, simple_loss=0.2255, pruned_loss=0.04933, over 4759.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2139, pruned_loss=0.03428, over 971002.14 frames.], batch size: 16, lr: 2.30e-04 +2022-05-06 16:21:43,054 INFO [train.py:715] (7/8) Epoch 9, batch 33450, loss[loss=0.1169, simple_loss=0.1973, pruned_loss=0.01829, over 4856.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2143, pruned_loss=0.03452, over 971813.73 frames.], batch size: 20, lr: 2.30e-04 +2022-05-06 16:22:22,075 INFO [train.py:715] (7/8) Epoch 9, batch 33500, loss[loss=0.1266, simple_loss=0.2, pruned_loss=0.02665, over 4938.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2145, pruned_loss=0.03453, over 971942.20 frames.], batch size: 29, lr: 2.30e-04 +2022-05-06 16:23:00,827 INFO [train.py:715] (7/8) Epoch 9, batch 33550, loss[loss=0.1635, simple_loss=0.2536, pruned_loss=0.03677, over 4712.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2141, pruned_loss=0.03402, over 971224.51 frames.], batch size: 15, lr: 2.30e-04 +2022-05-06 16:23:40,549 INFO [train.py:715] (7/8) Epoch 9, batch 33600, loss[loss=0.1558, simple_loss=0.223, pruned_loss=0.0443, over 4926.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2148, pruned_loss=0.03442, over 971704.59 frames.], batch size: 23, lr: 2.30e-04 +2022-05-06 16:24:19,321 INFO [train.py:715] (7/8) Epoch 9, batch 33650, loss[loss=0.187, simple_loss=0.258, pruned_loss=0.05797, over 4938.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2155, pruned_loss=0.03475, over 971973.92 frames.], batch size: 21, lr: 2.30e-04 +2022-05-06 16:24:58,235 INFO [train.py:715] (7/8) Epoch 9, batch 33700, loss[loss=0.1386, simple_loss=0.2195, pruned_loss=0.02889, over 4847.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2146, pruned_loss=0.03455, over 971130.16 frames.], batch size: 13, lr: 2.29e-04 +2022-05-06 16:25:37,408 INFO [train.py:715] (7/8) Epoch 9, batch 33750, loss[loss=0.1372, simple_loss=0.208, pruned_loss=0.03326, over 4808.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2145, pruned_loss=0.0343, over 971414.94 frames.], batch size: 25, lr: 2.29e-04 +2022-05-06 16:26:16,197 INFO [train.py:715] (7/8) Epoch 9, batch 33800, loss[loss=0.1368, simple_loss=0.2202, pruned_loss=0.02666, over 4908.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2144, pruned_loss=0.03416, over 971778.75 frames.], batch size: 17, lr: 2.29e-04 +2022-05-06 16:26:54,911 INFO [train.py:715] (7/8) Epoch 9, batch 33850, loss[loss=0.135, simple_loss=0.206, pruned_loss=0.03195, over 4842.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2156, pruned_loss=0.03458, over 971358.15 frames.], batch size: 32, lr: 2.29e-04 +2022-05-06 16:27:33,755 INFO [train.py:715] (7/8) Epoch 9, batch 33900, loss[loss=0.136, simple_loss=0.2059, pruned_loss=0.03302, over 4952.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2151, pruned_loss=0.03484, over 971182.23 frames.], batch size: 21, lr: 2.29e-04 +2022-05-06 16:28:13,484 INFO [train.py:715] (7/8) Epoch 9, batch 33950, loss[loss=0.1519, simple_loss=0.2124, pruned_loss=0.04572, over 4981.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2139, pruned_loss=0.03432, over 971495.93 frames.], batch size: 31, lr: 2.29e-04 +2022-05-06 16:28:52,282 INFO [train.py:715] (7/8) Epoch 9, batch 34000, loss[loss=0.1245, simple_loss=0.2022, pruned_loss=0.02335, over 4768.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03421, over 971704.82 frames.], batch size: 18, lr: 2.29e-04 +2022-05-06 16:29:31,513 INFO [train.py:715] (7/8) Epoch 9, batch 34050, loss[loss=0.1426, simple_loss=0.218, pruned_loss=0.03354, over 4843.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.03414, over 971815.36 frames.], batch size: 32, lr: 2.29e-04 +2022-05-06 16:30:09,975 INFO [train.py:715] (7/8) Epoch 9, batch 34100, loss[loss=0.1455, simple_loss=0.2155, pruned_loss=0.03781, over 4837.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2136, pruned_loss=0.034, over 972388.38 frames.], batch size: 32, lr: 2.29e-04 +2022-05-06 16:30:49,074 INFO [train.py:715] (7/8) Epoch 9, batch 34150, loss[loss=0.1579, simple_loss=0.2336, pruned_loss=0.04107, over 4898.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2133, pruned_loss=0.03404, over 973064.47 frames.], batch size: 19, lr: 2.29e-04 +2022-05-06 16:31:27,542 INFO [train.py:715] (7/8) Epoch 9, batch 34200, loss[loss=0.1327, simple_loss=0.199, pruned_loss=0.03316, over 4789.00 frames.], tot_loss[loss=0.141, simple_loss=0.2135, pruned_loss=0.03423, over 972843.39 frames.], batch size: 24, lr: 2.29e-04 +2022-05-06 16:32:05,779 INFO [train.py:715] (7/8) Epoch 9, batch 34250, loss[loss=0.1232, simple_loss=0.202, pruned_loss=0.02218, over 4929.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2139, pruned_loss=0.03439, over 973292.73 frames.], batch size: 29, lr: 2.29e-04 +2022-05-06 16:32:45,092 INFO [train.py:715] (7/8) Epoch 9, batch 34300, loss[loss=0.1342, simple_loss=0.224, pruned_loss=0.02221, over 4925.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2152, pruned_loss=0.03503, over 972857.82 frames.], batch size: 29, lr: 2.29e-04 +2022-05-06 16:33:23,851 INFO [train.py:715] (7/8) Epoch 9, batch 34350, loss[loss=0.1343, simple_loss=0.2167, pruned_loss=0.02594, over 4864.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2137, pruned_loss=0.03454, over 973427.15 frames.], batch size: 38, lr: 2.29e-04 +2022-05-06 16:34:02,525 INFO [train.py:715] (7/8) Epoch 9, batch 34400, loss[loss=0.1563, simple_loss=0.2326, pruned_loss=0.03996, over 4847.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2147, pruned_loss=0.03482, over 972441.60 frames.], batch size: 30, lr: 2.29e-04 +2022-05-06 16:34:41,412 INFO [train.py:715] (7/8) Epoch 9, batch 34450, loss[loss=0.1385, simple_loss=0.2163, pruned_loss=0.03035, over 4770.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.03419, over 971883.40 frames.], batch size: 18, lr: 2.29e-04 +2022-05-06 16:35:20,343 INFO [train.py:715] (7/8) Epoch 9, batch 34500, loss[loss=0.12, simple_loss=0.1853, pruned_loss=0.02732, over 4786.00 frames.], tot_loss[loss=0.1421, simple_loss=0.215, pruned_loss=0.03462, over 972682.61 frames.], batch size: 12, lr: 2.29e-04 +2022-05-06 16:35:59,376 INFO [train.py:715] (7/8) Epoch 9, batch 34550, loss[loss=0.1497, simple_loss=0.2233, pruned_loss=0.0381, over 4877.00 frames.], tot_loss[loss=0.1428, simple_loss=0.2156, pruned_loss=0.03499, over 972246.09 frames.], batch size: 19, lr: 2.29e-04 +2022-05-06 16:36:38,004 INFO [train.py:715] (7/8) Epoch 9, batch 34600, loss[loss=0.154, simple_loss=0.2233, pruned_loss=0.04234, over 4779.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03514, over 972747.49 frames.], batch size: 18, lr: 2.29e-04 +2022-05-06 16:37:17,107 INFO [train.py:715] (7/8) Epoch 9, batch 34650, loss[loss=0.115, simple_loss=0.1838, pruned_loss=0.02313, over 4826.00 frames.], tot_loss[loss=0.1426, simple_loss=0.2146, pruned_loss=0.03532, over 972789.36 frames.], batch size: 27, lr: 2.29e-04 +2022-05-06 16:37:56,494 INFO [train.py:715] (7/8) Epoch 9, batch 34700, loss[loss=0.1588, simple_loss=0.2197, pruned_loss=0.04898, over 4981.00 frames.], tot_loss[loss=0.142, simple_loss=0.2141, pruned_loss=0.03491, over 973094.45 frames.], batch size: 14, lr: 2.29e-04 +2022-05-06 16:38:34,785 INFO [train.py:715] (7/8) Epoch 9, batch 34750, loss[loss=0.1639, simple_loss=0.237, pruned_loss=0.04544, over 4829.00 frames.], tot_loss[loss=0.1419, simple_loss=0.214, pruned_loss=0.03488, over 972600.18 frames.], batch size: 30, lr: 2.29e-04 +2022-05-06 16:39:12,245 INFO [train.py:715] (7/8) Epoch 9, batch 34800, loss[loss=0.1457, simple_loss=0.2216, pruned_loss=0.03488, over 4921.00 frames.], tot_loss[loss=0.141, simple_loss=0.2131, pruned_loss=0.03448, over 972013.51 frames.], batch size: 18, lr: 2.29e-04 +2022-05-06 16:40:01,156 INFO [train.py:715] (7/8) Epoch 10, batch 0, loss[loss=0.1492, simple_loss=0.2292, pruned_loss=0.03457, over 4865.00 frames.], tot_loss[loss=0.1492, simple_loss=0.2292, pruned_loss=0.03457, over 4865.00 frames.], batch size: 20, lr: 2.19e-04 +2022-05-06 16:40:41,029 INFO [train.py:715] (7/8) Epoch 10, batch 50, loss[loss=0.1281, simple_loss=0.2065, pruned_loss=0.02484, over 4805.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2135, pruned_loss=0.03317, over 219508.88 frames.], batch size: 21, lr: 2.19e-04 +2022-05-06 16:41:20,755 INFO [train.py:715] (7/8) Epoch 10, batch 100, loss[loss=0.1404, simple_loss=0.2183, pruned_loss=0.03121, over 4908.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2135, pruned_loss=0.0332, over 387087.29 frames.], batch size: 19, lr: 2.19e-04 +2022-05-06 16:42:00,751 INFO [train.py:715] (7/8) Epoch 10, batch 150, loss[loss=0.1278, simple_loss=0.2045, pruned_loss=0.02553, over 4921.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.0334, over 516499.92 frames.], batch size: 18, lr: 2.19e-04 +2022-05-06 16:42:41,343 INFO [train.py:715] (7/8) Epoch 10, batch 200, loss[loss=0.1534, simple_loss=0.2311, pruned_loss=0.0378, over 4944.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2137, pruned_loss=0.03349, over 617552.15 frames.], batch size: 24, lr: 2.19e-04 +2022-05-06 16:43:22,397 INFO [train.py:715] (7/8) Epoch 10, batch 250, loss[loss=0.1476, simple_loss=0.2173, pruned_loss=0.03895, over 4696.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2144, pruned_loss=0.03395, over 695813.97 frames.], batch size: 15, lr: 2.19e-04 +2022-05-06 16:44:03,219 INFO [train.py:715] (7/8) Epoch 10, batch 300, loss[loss=0.1704, simple_loss=0.226, pruned_loss=0.05746, over 4916.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2144, pruned_loss=0.03461, over 757509.85 frames.], batch size: 18, lr: 2.19e-04 +2022-05-06 16:44:43,691 INFO [train.py:715] (7/8) Epoch 10, batch 350, loss[loss=0.1186, simple_loss=0.1914, pruned_loss=0.02284, over 4775.00 frames.], tot_loss[loss=0.1421, simple_loss=0.2144, pruned_loss=0.03487, over 804508.88 frames.], batch size: 17, lr: 2.19e-04 +2022-05-06 16:45:25,024 INFO [train.py:715] (7/8) Epoch 10, batch 400, loss[loss=0.1788, simple_loss=0.266, pruned_loss=0.04578, over 4932.00 frames.], tot_loss[loss=0.142, simple_loss=0.2149, pruned_loss=0.03452, over 842324.90 frames.], batch size: 21, lr: 2.19e-04 +2022-05-06 16:46:06,716 INFO [train.py:715] (7/8) Epoch 10, batch 450, loss[loss=0.137, simple_loss=0.2136, pruned_loss=0.03025, over 4813.00 frames.], tot_loss[loss=0.1424, simple_loss=0.215, pruned_loss=0.03488, over 871187.99 frames.], batch size: 13, lr: 2.19e-04 +2022-05-06 16:46:47,447 INFO [train.py:715] (7/8) Epoch 10, batch 500, loss[loss=0.135, simple_loss=0.2132, pruned_loss=0.02846, over 4822.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2139, pruned_loss=0.03399, over 893614.27 frames.], batch size: 25, lr: 2.19e-04 +2022-05-06 16:47:28,883 INFO [train.py:715] (7/8) Epoch 10, batch 550, loss[loss=0.1549, simple_loss=0.2182, pruned_loss=0.04579, over 4897.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2141, pruned_loss=0.03416, over 911295.84 frames.], batch size: 17, lr: 2.19e-04 +2022-05-06 16:48:10,023 INFO [train.py:715] (7/8) Epoch 10, batch 600, loss[loss=0.1497, simple_loss=0.2236, pruned_loss=0.03788, over 4767.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2139, pruned_loss=0.0344, over 924373.02 frames.], batch size: 18, lr: 2.19e-04 +2022-05-06 16:48:50,537 INFO [train.py:715] (7/8) Epoch 10, batch 650, loss[loss=0.1353, simple_loss=0.2128, pruned_loss=0.02892, over 4873.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2133, pruned_loss=0.03381, over 935428.67 frames.], batch size: 22, lr: 2.19e-04 +2022-05-06 16:49:31,186 INFO [train.py:715] (7/8) Epoch 10, batch 700, loss[loss=0.1711, simple_loss=0.2428, pruned_loss=0.04972, over 4964.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2133, pruned_loss=0.03387, over 943732.12 frames.], batch size: 15, lr: 2.19e-04 +2022-05-06 16:50:12,727 INFO [train.py:715] (7/8) Epoch 10, batch 750, loss[loss=0.152, simple_loss=0.223, pruned_loss=0.0405, over 4750.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2136, pruned_loss=0.03405, over 950224.90 frames.], batch size: 19, lr: 2.19e-04 +2022-05-06 16:50:54,001 INFO [train.py:715] (7/8) Epoch 10, batch 800, loss[loss=0.1466, simple_loss=0.2268, pruned_loss=0.03325, over 4831.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2134, pruned_loss=0.03406, over 956335.57 frames.], batch size: 30, lr: 2.19e-04 +2022-05-06 16:51:34,424 INFO [train.py:715] (7/8) Epoch 10, batch 850, loss[loss=0.1686, simple_loss=0.2595, pruned_loss=0.03885, over 4973.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2138, pruned_loss=0.0342, over 960358.80 frames.], batch size: 24, lr: 2.19e-04 +2022-05-06 16:52:15,230 INFO [train.py:715] (7/8) Epoch 10, batch 900, loss[loss=0.1436, simple_loss=0.211, pruned_loss=0.03813, over 4790.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2137, pruned_loss=0.03437, over 964142.73 frames.], batch size: 21, lr: 2.19e-04 +2022-05-06 16:52:55,738 INFO [train.py:715] (7/8) Epoch 10, batch 950, loss[loss=0.1591, simple_loss=0.23, pruned_loss=0.04416, over 4815.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2135, pruned_loss=0.0344, over 965949.58 frames.], batch size: 25, lr: 2.19e-04 +2022-05-06 16:53:35,736 INFO [train.py:715] (7/8) Epoch 10, batch 1000, loss[loss=0.1417, simple_loss=0.2176, pruned_loss=0.03285, over 4912.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03414, over 966752.27 frames.], batch size: 18, lr: 2.19e-04 +2022-05-06 16:54:14,959 INFO [train.py:715] (7/8) Epoch 10, batch 1050, loss[loss=0.145, simple_loss=0.2168, pruned_loss=0.03661, over 4809.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2132, pruned_loss=0.03407, over 967118.30 frames.], batch size: 13, lr: 2.19e-04 +2022-05-06 16:54:55,331 INFO [train.py:715] (7/8) Epoch 10, batch 1100, loss[loss=0.1725, simple_loss=0.2473, pruned_loss=0.04887, over 4704.00 frames.], tot_loss[loss=0.1414, simple_loss=0.214, pruned_loss=0.03441, over 968001.35 frames.], batch size: 15, lr: 2.19e-04 +2022-05-06 16:55:34,630 INFO [train.py:715] (7/8) Epoch 10, batch 1150, loss[loss=0.1178, simple_loss=0.1937, pruned_loss=0.02092, over 4790.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.0341, over 969499.33 frames.], batch size: 24, lr: 2.19e-04 +2022-05-06 16:56:13,830 INFO [train.py:715] (7/8) Epoch 10, batch 1200, loss[loss=0.1035, simple_loss=0.1774, pruned_loss=0.01481, over 4904.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2128, pruned_loss=0.03387, over 970134.43 frames.], batch size: 19, lr: 2.19e-04 +2022-05-06 16:56:53,601 INFO [train.py:715] (7/8) Epoch 10, batch 1250, loss[loss=0.1338, simple_loss=0.2101, pruned_loss=0.02875, over 4770.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2125, pruned_loss=0.03379, over 969707.27 frames.], batch size: 18, lr: 2.19e-04 +2022-05-06 16:57:32,224 INFO [train.py:715] (7/8) Epoch 10, batch 1300, loss[loss=0.1502, simple_loss=0.2102, pruned_loss=0.04505, over 4787.00 frames.], tot_loss[loss=0.1397, simple_loss=0.212, pruned_loss=0.03372, over 968929.95 frames.], batch size: 18, lr: 2.19e-04 +2022-05-06 16:58:11,020 INFO [train.py:715] (7/8) Epoch 10, batch 1350, loss[loss=0.15, simple_loss=0.2156, pruned_loss=0.04225, over 4911.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2124, pruned_loss=0.03373, over 969602.21 frames.], batch size: 17, lr: 2.19e-04 +2022-05-06 16:58:49,196 INFO [train.py:715] (7/8) Epoch 10, batch 1400, loss[loss=0.1216, simple_loss=0.203, pruned_loss=0.02013, over 4788.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2123, pruned_loss=0.03368, over 970293.52 frames.], batch size: 17, lr: 2.19e-04 +2022-05-06 16:59:28,747 INFO [train.py:715] (7/8) Epoch 10, batch 1450, loss[loss=0.1238, simple_loss=0.1969, pruned_loss=0.02537, over 4898.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2126, pruned_loss=0.03392, over 970891.77 frames.], batch size: 22, lr: 2.19e-04 +2022-05-06 17:00:07,718 INFO [train.py:715] (7/8) Epoch 10, batch 1500, loss[loss=0.1173, simple_loss=0.1922, pruned_loss=0.02115, over 4815.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2126, pruned_loss=0.03408, over 969897.47 frames.], batch size: 26, lr: 2.19e-04 +2022-05-06 17:00:46,477 INFO [train.py:715] (7/8) Epoch 10, batch 1550, loss[loss=0.108, simple_loss=0.1864, pruned_loss=0.0148, over 4986.00 frames.], tot_loss[loss=0.1399, simple_loss=0.212, pruned_loss=0.03392, over 971060.18 frames.], batch size: 28, lr: 2.19e-04 +2022-05-06 17:01:25,571 INFO [train.py:715] (7/8) Epoch 10, batch 1600, loss[loss=0.1388, simple_loss=0.2152, pruned_loss=0.03127, over 4905.00 frames.], tot_loss[loss=0.14, simple_loss=0.2121, pruned_loss=0.03396, over 971142.97 frames.], batch size: 19, lr: 2.19e-04 +2022-05-06 17:02:04,990 INFO [train.py:715] (7/8) Epoch 10, batch 1650, loss[loss=0.1222, simple_loss=0.2043, pruned_loss=0.02007, over 4983.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2131, pruned_loss=0.03459, over 971134.57 frames.], batch size: 24, lr: 2.19e-04 +2022-05-06 17:02:43,708 INFO [train.py:715] (7/8) Epoch 10, batch 1700, loss[loss=0.1327, simple_loss=0.2123, pruned_loss=0.02656, over 4719.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2123, pruned_loss=0.03401, over 970890.70 frames.], batch size: 12, lr: 2.19e-04 +2022-05-06 17:03:22,054 INFO [train.py:715] (7/8) Epoch 10, batch 1750, loss[loss=0.1209, simple_loss=0.2012, pruned_loss=0.02027, over 4900.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2115, pruned_loss=0.03349, over 971459.93 frames.], batch size: 22, lr: 2.19e-04 +2022-05-06 17:04:02,178 INFO [train.py:715] (7/8) Epoch 10, batch 1800, loss[loss=0.1287, simple_loss=0.1957, pruned_loss=0.03084, over 4779.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2111, pruned_loss=0.0338, over 971592.04 frames.], batch size: 18, lr: 2.19e-04 +2022-05-06 17:04:41,816 INFO [train.py:715] (7/8) Epoch 10, batch 1850, loss[loss=0.1345, simple_loss=0.2164, pruned_loss=0.02623, over 4755.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2117, pruned_loss=0.03377, over 971228.50 frames.], batch size: 16, lr: 2.19e-04 +2022-05-06 17:05:20,554 INFO [train.py:715] (7/8) Epoch 10, batch 1900, loss[loss=0.1361, simple_loss=0.2163, pruned_loss=0.02799, over 4921.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2116, pruned_loss=0.03372, over 971712.62 frames.], batch size: 39, lr: 2.19e-04 +2022-05-06 17:05:59,512 INFO [train.py:715] (7/8) Epoch 10, batch 1950, loss[loss=0.136, simple_loss=0.2016, pruned_loss=0.03516, over 4959.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2115, pruned_loss=0.03388, over 972639.90 frames.], batch size: 14, lr: 2.18e-04 +2022-05-06 17:06:39,839 INFO [train.py:715] (7/8) Epoch 10, batch 2000, loss[loss=0.1246, simple_loss=0.2021, pruned_loss=0.02351, over 4942.00 frames.], tot_loss[loss=0.1399, simple_loss=0.212, pruned_loss=0.0339, over 972166.45 frames.], batch size: 23, lr: 2.18e-04 +2022-05-06 17:07:19,137 INFO [train.py:715] (7/8) Epoch 10, batch 2050, loss[loss=0.1505, simple_loss=0.2252, pruned_loss=0.03788, over 4930.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2137, pruned_loss=0.03491, over 972712.42 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:07:57,720 INFO [train.py:715] (7/8) Epoch 10, batch 2100, loss[loss=0.1425, simple_loss=0.2139, pruned_loss=0.03551, over 4977.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2135, pruned_loss=0.0345, over 972710.70 frames.], batch size: 14, lr: 2.18e-04 +2022-05-06 17:08:37,351 INFO [train.py:715] (7/8) Epoch 10, batch 2150, loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.03277, over 4821.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2125, pruned_loss=0.03416, over 971403.93 frames.], batch size: 13, lr: 2.18e-04 +2022-05-06 17:09:16,489 INFO [train.py:715] (7/8) Epoch 10, batch 2200, loss[loss=0.1587, simple_loss=0.2207, pruned_loss=0.04833, over 4778.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2122, pruned_loss=0.034, over 971906.88 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:09:55,195 INFO [train.py:715] (7/8) Epoch 10, batch 2250, loss[loss=0.1408, simple_loss=0.2086, pruned_loss=0.03649, over 4837.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2129, pruned_loss=0.03424, over 971824.17 frames.], batch size: 15, lr: 2.18e-04 +2022-05-06 17:10:33,971 INFO [train.py:715] (7/8) Epoch 10, batch 2300, loss[loss=0.1573, simple_loss=0.2218, pruned_loss=0.04639, over 4861.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2123, pruned_loss=0.03411, over 971676.70 frames.], batch size: 32, lr: 2.18e-04 +2022-05-06 17:11:13,696 INFO [train.py:715] (7/8) Epoch 10, batch 2350, loss[loss=0.1354, simple_loss=0.1954, pruned_loss=0.03767, over 4856.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2112, pruned_loss=0.03362, over 970947.05 frames.], batch size: 30, lr: 2.18e-04 +2022-05-06 17:11:52,505 INFO [train.py:715] (7/8) Epoch 10, batch 2400, loss[loss=0.1245, simple_loss=0.1996, pruned_loss=0.02464, over 4928.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2106, pruned_loss=0.03352, over 971179.49 frames.], batch size: 29, lr: 2.18e-04 +2022-05-06 17:12:31,237 INFO [train.py:715] (7/8) Epoch 10, batch 2450, loss[loss=0.1376, simple_loss=0.2186, pruned_loss=0.02829, over 4794.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2109, pruned_loss=0.03324, over 970203.58 frames.], batch size: 21, lr: 2.18e-04 +2022-05-06 17:13:10,538 INFO [train.py:715] (7/8) Epoch 10, batch 2500, loss[loss=0.1705, simple_loss=0.2364, pruned_loss=0.05228, over 4849.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2109, pruned_loss=0.03332, over 971031.99 frames.], batch size: 34, lr: 2.18e-04 +2022-05-06 17:13:49,921 INFO [train.py:715] (7/8) Epoch 10, batch 2550, loss[loss=0.15, simple_loss=0.2203, pruned_loss=0.03981, over 4907.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2112, pruned_loss=0.0335, over 971038.97 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:14:29,345 INFO [train.py:715] (7/8) Epoch 10, batch 2600, loss[loss=0.1324, simple_loss=0.1983, pruned_loss=0.03326, over 4902.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2124, pruned_loss=0.03395, over 971171.39 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:15:08,459 INFO [train.py:715] (7/8) Epoch 10, batch 2650, loss[loss=0.1325, simple_loss=0.2052, pruned_loss=0.02996, over 4752.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2127, pruned_loss=0.03385, over 971929.60 frames.], batch size: 16, lr: 2.18e-04 +2022-05-06 17:15:47,660 INFO [train.py:715] (7/8) Epoch 10, batch 2700, loss[loss=0.1566, simple_loss=0.2267, pruned_loss=0.04323, over 4960.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2134, pruned_loss=0.03406, over 972457.64 frames.], batch size: 39, lr: 2.18e-04 +2022-05-06 17:16:26,376 INFO [train.py:715] (7/8) Epoch 10, batch 2750, loss[loss=0.1469, simple_loss=0.2135, pruned_loss=0.0402, over 4960.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2139, pruned_loss=0.0341, over 973271.10 frames.], batch size: 24, lr: 2.18e-04 +2022-05-06 17:17:05,080 INFO [train.py:715] (7/8) Epoch 10, batch 2800, loss[loss=0.142, simple_loss=0.2178, pruned_loss=0.0331, over 4916.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2143, pruned_loss=0.03417, over 972704.37 frames.], batch size: 29, lr: 2.18e-04 +2022-05-06 17:17:43,819 INFO [train.py:715] (7/8) Epoch 10, batch 2850, loss[loss=0.1518, simple_loss=0.2239, pruned_loss=0.03986, over 4882.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2141, pruned_loss=0.03404, over 972009.32 frames.], batch size: 16, lr: 2.18e-04 +2022-05-06 17:18:23,064 INFO [train.py:715] (7/8) Epoch 10, batch 2900, loss[loss=0.1249, simple_loss=0.204, pruned_loss=0.02295, over 4908.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03355, over 971720.66 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:19:02,255 INFO [train.py:715] (7/8) Epoch 10, batch 2950, loss[loss=0.1468, simple_loss=0.2105, pruned_loss=0.0415, over 4969.00 frames.], tot_loss[loss=0.1395, simple_loss=0.212, pruned_loss=0.03354, over 971667.37 frames.], batch size: 35, lr: 2.18e-04 +2022-05-06 17:19:40,634 INFO [train.py:715] (7/8) Epoch 10, batch 3000, loss[loss=0.1549, simple_loss=0.2302, pruned_loss=0.03979, over 4774.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2122, pruned_loss=0.03345, over 972686.43 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:19:40,634 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 17:19:50,101 INFO [train.py:742] (7/8) Epoch 10, validation: loss=0.1065, simple_loss=0.1908, pruned_loss=0.01113, over 914524.00 frames. +2022-05-06 17:20:28,628 INFO [train.py:715] (7/8) Epoch 10, batch 3050, loss[loss=0.1168, simple_loss=0.188, pruned_loss=0.02282, over 4769.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.03365, over 972515.30 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:21:07,570 INFO [train.py:715] (7/8) Epoch 10, batch 3100, loss[loss=0.1394, simple_loss=0.2094, pruned_loss=0.03469, over 4878.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2135, pruned_loss=0.0335, over 972753.87 frames.], batch size: 13, lr: 2.18e-04 +2022-05-06 17:21:46,719 INFO [train.py:715] (7/8) Epoch 10, batch 3150, loss[loss=0.1424, simple_loss=0.2187, pruned_loss=0.033, over 4819.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2129, pruned_loss=0.03369, over 971772.43 frames.], batch size: 25, lr: 2.18e-04 +2022-05-06 17:22:25,531 INFO [train.py:715] (7/8) Epoch 10, batch 3200, loss[loss=0.1433, simple_loss=0.2266, pruned_loss=0.03003, over 4748.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03399, over 972325.19 frames.], batch size: 16, lr: 2.18e-04 +2022-05-06 17:23:03,966 INFO [train.py:715] (7/8) Epoch 10, batch 3250, loss[loss=0.1572, simple_loss=0.2252, pruned_loss=0.04458, over 4877.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2142, pruned_loss=0.0344, over 972056.26 frames.], batch size: 32, lr: 2.18e-04 +2022-05-06 17:23:44,491 INFO [train.py:715] (7/8) Epoch 10, batch 3300, loss[loss=0.1443, simple_loss=0.2204, pruned_loss=0.03412, over 4949.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.03464, over 973110.96 frames.], batch size: 24, lr: 2.18e-04 +2022-05-06 17:24:24,206 INFO [train.py:715] (7/8) Epoch 10, batch 3350, loss[loss=0.1334, simple_loss=0.2031, pruned_loss=0.03183, over 4806.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.0345, over 973778.75 frames.], batch size: 21, lr: 2.18e-04 +2022-05-06 17:25:04,070 INFO [train.py:715] (7/8) Epoch 10, batch 3400, loss[loss=0.1632, simple_loss=0.2399, pruned_loss=0.04322, over 4689.00 frames.], tot_loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03509, over 973504.46 frames.], batch size: 15, lr: 2.18e-04 +2022-05-06 17:25:44,889 INFO [train.py:715] (7/8) Epoch 10, batch 3450, loss[loss=0.1661, simple_loss=0.2397, pruned_loss=0.04624, over 4895.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2145, pruned_loss=0.03493, over 973542.84 frames.], batch size: 19, lr: 2.18e-04 +2022-05-06 17:26:26,603 INFO [train.py:715] (7/8) Epoch 10, batch 3500, loss[loss=0.1315, simple_loss=0.2083, pruned_loss=0.02736, over 4906.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2137, pruned_loss=0.03423, over 972671.02 frames.], batch size: 19, lr: 2.18e-04 +2022-05-06 17:27:07,262 INFO [train.py:715] (7/8) Epoch 10, batch 3550, loss[loss=0.1351, simple_loss=0.2114, pruned_loss=0.02946, over 4764.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2133, pruned_loss=0.03406, over 971797.82 frames.], batch size: 19, lr: 2.18e-04 +2022-05-06 17:27:48,540 INFO [train.py:715] (7/8) Epoch 10, batch 3600, loss[loss=0.1368, simple_loss=0.2062, pruned_loss=0.03377, over 4928.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2135, pruned_loss=0.03436, over 971635.93 frames.], batch size: 23, lr: 2.18e-04 +2022-05-06 17:28:29,169 INFO [train.py:715] (7/8) Epoch 10, batch 3650, loss[loss=0.144, simple_loss=0.2187, pruned_loss=0.03464, over 4843.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2132, pruned_loss=0.03418, over 971917.81 frames.], batch size: 15, lr: 2.18e-04 +2022-05-06 17:29:10,568 INFO [train.py:715] (7/8) Epoch 10, batch 3700, loss[loss=0.1468, simple_loss=0.2192, pruned_loss=0.03717, over 4875.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2131, pruned_loss=0.03418, over 971955.44 frames.], batch size: 16, lr: 2.18e-04 +2022-05-06 17:29:51,153 INFO [train.py:715] (7/8) Epoch 10, batch 3750, loss[loss=0.1498, simple_loss=0.2237, pruned_loss=0.03791, over 4764.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2128, pruned_loss=0.03402, over 971589.86 frames.], batch size: 14, lr: 2.18e-04 +2022-05-06 17:30:32,382 INFO [train.py:715] (7/8) Epoch 10, batch 3800, loss[loss=0.1279, simple_loss=0.1909, pruned_loss=0.03246, over 4735.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2118, pruned_loss=0.03338, over 972287.42 frames.], batch size: 12, lr: 2.18e-04 +2022-05-06 17:31:13,747 INFO [train.py:715] (7/8) Epoch 10, batch 3850, loss[loss=0.1316, simple_loss=0.2052, pruned_loss=0.029, over 4963.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2124, pruned_loss=0.03366, over 972746.37 frames.], batch size: 15, lr: 2.18e-04 +2022-05-06 17:31:54,697 INFO [train.py:715] (7/8) Epoch 10, batch 3900, loss[loss=0.1154, simple_loss=0.1783, pruned_loss=0.02625, over 4791.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2127, pruned_loss=0.03395, over 973006.81 frames.], batch size: 12, lr: 2.18e-04 +2022-05-06 17:32:36,894 INFO [train.py:715] (7/8) Epoch 10, batch 3950, loss[loss=0.1334, simple_loss=0.2042, pruned_loss=0.03133, over 4908.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2131, pruned_loss=0.03453, over 973008.37 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:33:16,173 INFO [train.py:715] (7/8) Epoch 10, batch 4000, loss[loss=0.1429, simple_loss=0.2215, pruned_loss=0.03216, over 4849.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2126, pruned_loss=0.03438, over 972933.61 frames.], batch size: 20, lr: 2.18e-04 +2022-05-06 17:33:55,837 INFO [train.py:715] (7/8) Epoch 10, batch 4050, loss[loss=0.1495, simple_loss=0.2234, pruned_loss=0.03783, over 4740.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2127, pruned_loss=0.0338, over 972404.48 frames.], batch size: 16, lr: 2.18e-04 +2022-05-06 17:34:34,557 INFO [train.py:715] (7/8) Epoch 10, batch 4100, loss[loss=0.126, simple_loss=0.1893, pruned_loss=0.03135, over 4854.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03394, over 972700.59 frames.], batch size: 32, lr: 2.18e-04 +2022-05-06 17:35:13,435 INFO [train.py:715] (7/8) Epoch 10, batch 4150, loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03303, over 4847.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2132, pruned_loss=0.03418, over 972789.56 frames.], batch size: 20, lr: 2.18e-04 +2022-05-06 17:35:52,992 INFO [train.py:715] (7/8) Epoch 10, batch 4200, loss[loss=0.1395, simple_loss=0.2067, pruned_loss=0.03616, over 4909.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2124, pruned_loss=0.0336, over 972988.11 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:36:31,676 INFO [train.py:715] (7/8) Epoch 10, batch 4250, loss[loss=0.1472, simple_loss=0.2153, pruned_loss=0.03956, over 4817.00 frames.], tot_loss[loss=0.1405, simple_loss=0.213, pruned_loss=0.03402, over 972404.09 frames.], batch size: 26, lr: 2.18e-04 +2022-05-06 17:37:10,490 INFO [train.py:715] (7/8) Epoch 10, batch 4300, loss[loss=0.1555, simple_loss=0.2329, pruned_loss=0.03904, over 4982.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2128, pruned_loss=0.03378, over 972410.83 frames.], batch size: 28, lr: 2.18e-04 +2022-05-06 17:37:49,694 INFO [train.py:715] (7/8) Epoch 10, batch 4350, loss[loss=0.1623, simple_loss=0.2306, pruned_loss=0.04703, over 4778.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2129, pruned_loss=0.03407, over 972382.66 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:38:28,651 INFO [train.py:715] (7/8) Epoch 10, batch 4400, loss[loss=0.1545, simple_loss=0.2334, pruned_loss=0.03777, over 4891.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2129, pruned_loss=0.03396, over 972560.37 frames.], batch size: 19, lr: 2.18e-04 +2022-05-06 17:39:07,614 INFO [train.py:715] (7/8) Epoch 10, batch 4450, loss[loss=0.1592, simple_loss=0.2292, pruned_loss=0.04458, over 4859.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2133, pruned_loss=0.03404, over 972976.21 frames.], batch size: 20, lr: 2.18e-04 +2022-05-06 17:39:46,323 INFO [train.py:715] (7/8) Epoch 10, batch 4500, loss[loss=0.1263, simple_loss=0.1992, pruned_loss=0.02672, over 4935.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2138, pruned_loss=0.03414, over 973139.90 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:40:25,799 INFO [train.py:715] (7/8) Epoch 10, batch 4550, loss[loss=0.188, simple_loss=0.2452, pruned_loss=0.06543, over 4749.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2139, pruned_loss=0.03413, over 973367.57 frames.], batch size: 19, lr: 2.18e-04 +2022-05-06 17:41:04,678 INFO [train.py:715] (7/8) Epoch 10, batch 4600, loss[loss=0.1271, simple_loss=0.2009, pruned_loss=0.02666, over 4789.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03407, over 972768.41 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:41:43,577 INFO [train.py:715] (7/8) Epoch 10, batch 4650, loss[loss=0.111, simple_loss=0.1856, pruned_loss=0.01821, over 4769.00 frames.], tot_loss[loss=0.142, simple_loss=0.2147, pruned_loss=0.03463, over 972317.09 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:42:23,832 INFO [train.py:715] (7/8) Epoch 10, batch 4700, loss[loss=0.1397, simple_loss=0.2184, pruned_loss=0.03054, over 4873.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.03464, over 972400.55 frames.], batch size: 22, lr: 2.18e-04 +2022-05-06 17:43:03,976 INFO [train.py:715] (7/8) Epoch 10, batch 4750, loss[loss=0.1737, simple_loss=0.236, pruned_loss=0.0557, over 4922.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2148, pruned_loss=0.03444, over 972295.63 frames.], batch size: 39, lr: 2.18e-04 +2022-05-06 17:43:43,164 INFO [train.py:715] (7/8) Epoch 10, batch 4800, loss[loss=0.1208, simple_loss=0.2001, pruned_loss=0.02072, over 4778.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2153, pruned_loss=0.03471, over 972317.60 frames.], batch size: 14, lr: 2.18e-04 +2022-05-06 17:44:22,997 INFO [train.py:715] (7/8) Epoch 10, batch 4850, loss[loss=0.1185, simple_loss=0.1949, pruned_loss=0.02102, over 4778.00 frames.], tot_loss[loss=0.142, simple_loss=0.2149, pruned_loss=0.03452, over 972510.75 frames.], batch size: 17, lr: 2.18e-04 +2022-05-06 17:45:02,948 INFO [train.py:715] (7/8) Epoch 10, batch 4900, loss[loss=0.1256, simple_loss=0.2003, pruned_loss=0.02547, over 4655.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2153, pruned_loss=0.03464, over 972341.17 frames.], batch size: 13, lr: 2.18e-04 +2022-05-06 17:45:42,397 INFO [train.py:715] (7/8) Epoch 10, batch 4950, loss[loss=0.1471, simple_loss=0.2168, pruned_loss=0.03869, over 4744.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2146, pruned_loss=0.03395, over 971656.06 frames.], batch size: 16, lr: 2.18e-04 +2022-05-06 17:46:21,437 INFO [train.py:715] (7/8) Epoch 10, batch 5000, loss[loss=0.1223, simple_loss=0.2077, pruned_loss=0.01849, over 4898.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2137, pruned_loss=0.03368, over 971736.57 frames.], batch size: 19, lr: 2.18e-04 +2022-05-06 17:47:00,597 INFO [train.py:715] (7/8) Epoch 10, batch 5050, loss[loss=0.1374, simple_loss=0.2026, pruned_loss=0.03606, over 4873.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03403, over 970989.28 frames.], batch size: 32, lr: 2.18e-04 +2022-05-06 17:47:39,528 INFO [train.py:715] (7/8) Epoch 10, batch 5100, loss[loss=0.1272, simple_loss=0.2035, pruned_loss=0.02545, over 4781.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2139, pruned_loss=0.03383, over 971943.44 frames.], batch size: 18, lr: 2.18e-04 +2022-05-06 17:48:18,802 INFO [train.py:715] (7/8) Epoch 10, batch 5150, loss[loss=0.1908, simple_loss=0.2479, pruned_loss=0.06687, over 4655.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2144, pruned_loss=0.03412, over 973013.30 frames.], batch size: 13, lr: 2.18e-04 +2022-05-06 17:48:58,637 INFO [train.py:715] (7/8) Epoch 10, batch 5200, loss[loss=0.1614, simple_loss=0.233, pruned_loss=0.04491, over 4938.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2144, pruned_loss=0.03414, over 973251.55 frames.], batch size: 39, lr: 2.17e-04 +2022-05-06 17:49:38,474 INFO [train.py:715] (7/8) Epoch 10, batch 5250, loss[loss=0.1718, simple_loss=0.243, pruned_loss=0.05033, over 4871.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2144, pruned_loss=0.03414, over 973193.88 frames.], batch size: 30, lr: 2.17e-04 +2022-05-06 17:50:17,852 INFO [train.py:715] (7/8) Epoch 10, batch 5300, loss[loss=0.1408, simple_loss=0.2264, pruned_loss=0.02757, over 4879.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2146, pruned_loss=0.03449, over 972872.93 frames.], batch size: 22, lr: 2.17e-04 +2022-05-06 17:50:57,194 INFO [train.py:715] (7/8) Epoch 10, batch 5350, loss[loss=0.1633, simple_loss=0.2362, pruned_loss=0.04525, over 4910.00 frames.], tot_loss[loss=0.142, simple_loss=0.2148, pruned_loss=0.03456, over 972954.39 frames.], batch size: 17, lr: 2.17e-04 +2022-05-06 17:51:37,024 INFO [train.py:715] (7/8) Epoch 10, batch 5400, loss[loss=0.133, simple_loss=0.2101, pruned_loss=0.02798, over 4839.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2146, pruned_loss=0.03434, over 972145.91 frames.], batch size: 30, lr: 2.17e-04 +2022-05-06 17:52:16,938 INFO [train.py:715] (7/8) Epoch 10, batch 5450, loss[loss=0.1596, simple_loss=0.228, pruned_loss=0.04561, over 4903.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2141, pruned_loss=0.03386, over 973133.62 frames.], batch size: 18, lr: 2.17e-04 +2022-05-06 17:52:56,345 INFO [train.py:715] (7/8) Epoch 10, batch 5500, loss[loss=0.1611, simple_loss=0.2227, pruned_loss=0.04976, over 4795.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2146, pruned_loss=0.03406, over 973028.17 frames.], batch size: 21, lr: 2.17e-04 +2022-05-06 17:53:36,104 INFO [train.py:715] (7/8) Epoch 10, batch 5550, loss[loss=0.1474, simple_loss=0.2238, pruned_loss=0.03547, over 4807.00 frames.], tot_loss[loss=0.141, simple_loss=0.214, pruned_loss=0.03399, over 972387.52 frames.], batch size: 21, lr: 2.17e-04 +2022-05-06 17:54:16,054 INFO [train.py:715] (7/8) Epoch 10, batch 5600, loss[loss=0.1899, simple_loss=0.2463, pruned_loss=0.06679, over 4833.00 frames.], tot_loss[loss=0.141, simple_loss=0.2142, pruned_loss=0.03391, over 972450.00 frames.], batch size: 30, lr: 2.17e-04 +2022-05-06 17:54:55,814 INFO [train.py:715] (7/8) Epoch 10, batch 5650, loss[loss=0.1429, simple_loss=0.2227, pruned_loss=0.03155, over 4987.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2143, pruned_loss=0.03455, over 972777.31 frames.], batch size: 26, lr: 2.17e-04 +2022-05-06 17:55:34,981 INFO [train.py:715] (7/8) Epoch 10, batch 5700, loss[loss=0.139, simple_loss=0.2087, pruned_loss=0.03464, over 4943.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2144, pruned_loss=0.03449, over 972559.98 frames.], batch size: 23, lr: 2.17e-04 +2022-05-06 17:56:15,022 INFO [train.py:715] (7/8) Epoch 10, batch 5750, loss[loss=0.1412, simple_loss=0.2203, pruned_loss=0.03111, over 4958.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2138, pruned_loss=0.03392, over 972733.62 frames.], batch size: 24, lr: 2.17e-04 +2022-05-06 17:56:54,688 INFO [train.py:715] (7/8) Epoch 10, batch 5800, loss[loss=0.156, simple_loss=0.222, pruned_loss=0.04496, over 4822.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2128, pruned_loss=0.03377, over 973720.07 frames.], batch size: 13, lr: 2.17e-04 +2022-05-06 17:57:34,210 INFO [train.py:715] (7/8) Epoch 10, batch 5850, loss[loss=0.1358, simple_loss=0.2076, pruned_loss=0.03196, over 4757.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2127, pruned_loss=0.03389, over 974196.78 frames.], batch size: 19, lr: 2.17e-04 +2022-05-06 17:58:14,027 INFO [train.py:715] (7/8) Epoch 10, batch 5900, loss[loss=0.1155, simple_loss=0.1851, pruned_loss=0.02297, over 4698.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2128, pruned_loss=0.03399, over 973065.28 frames.], batch size: 15, lr: 2.17e-04 +2022-05-06 17:58:53,764 INFO [train.py:715] (7/8) Epoch 10, batch 5950, loss[loss=0.1437, simple_loss=0.232, pruned_loss=0.0277, over 4971.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.03335, over 972660.25 frames.], batch size: 24, lr: 2.17e-04 +2022-05-06 17:59:33,429 INFO [train.py:715] (7/8) Epoch 10, batch 6000, loss[loss=0.1434, simple_loss=0.21, pruned_loss=0.03842, over 4848.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2123, pruned_loss=0.03316, over 972459.52 frames.], batch size: 30, lr: 2.17e-04 +2022-05-06 17:59:33,430 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 17:59:42,753 INFO [train.py:742] (7/8) Epoch 10, validation: loss=0.1067, simple_loss=0.1909, pruned_loss=0.01126, over 914524.00 frames. +2022-05-06 18:00:22,326 INFO [train.py:715] (7/8) Epoch 10, batch 6050, loss[loss=0.1437, simple_loss=0.2073, pruned_loss=0.04004, over 4857.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03298, over 973032.75 frames.], batch size: 32, lr: 2.17e-04 +2022-05-06 18:01:00,747 INFO [train.py:715] (7/8) Epoch 10, batch 6100, loss[loss=0.1584, simple_loss=0.2395, pruned_loss=0.0386, over 4927.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03303, over 973418.77 frames.], batch size: 39, lr: 2.17e-04 +2022-05-06 18:01:40,211 INFO [train.py:715] (7/8) Epoch 10, batch 6150, loss[loss=0.1287, simple_loss=0.2056, pruned_loss=0.02594, over 4968.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03288, over 973373.46 frames.], batch size: 15, lr: 2.17e-04 +2022-05-06 18:02:20,071 INFO [train.py:715] (7/8) Epoch 10, batch 6200, loss[loss=0.1448, simple_loss=0.2098, pruned_loss=0.03989, over 4943.00 frames.], tot_loss[loss=0.14, simple_loss=0.2132, pruned_loss=0.0334, over 973845.01 frames.], batch size: 29, lr: 2.17e-04 +2022-05-06 18:02:59,941 INFO [train.py:715] (7/8) Epoch 10, batch 6250, loss[loss=0.1265, simple_loss=0.2002, pruned_loss=0.02637, over 4965.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2126, pruned_loss=0.03297, over 973358.71 frames.], batch size: 35, lr: 2.17e-04 +2022-05-06 18:03:39,471 INFO [train.py:715] (7/8) Epoch 10, batch 6300, loss[loss=0.1268, simple_loss=0.1997, pruned_loss=0.02696, over 4989.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2126, pruned_loss=0.03331, over 972960.22 frames.], batch size: 20, lr: 2.17e-04 +2022-05-06 18:04:19,279 INFO [train.py:715] (7/8) Epoch 10, batch 6350, loss[loss=0.1243, simple_loss=0.1889, pruned_loss=0.02985, over 4858.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2129, pruned_loss=0.03321, over 972915.06 frames.], batch size: 20, lr: 2.17e-04 +2022-05-06 18:04:58,329 INFO [train.py:715] (7/8) Epoch 10, batch 6400, loss[loss=0.1291, simple_loss=0.2031, pruned_loss=0.02759, over 4883.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2118, pruned_loss=0.03279, over 972065.81 frames.], batch size: 22, lr: 2.17e-04 +2022-05-06 18:05:36,736 INFO [train.py:715] (7/8) Epoch 10, batch 6450, loss[loss=0.1614, simple_loss=0.2248, pruned_loss=0.04895, over 4785.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03279, over 972705.40 frames.], batch size: 14, lr: 2.17e-04 +2022-05-06 18:06:15,662 INFO [train.py:715] (7/8) Epoch 10, batch 6500, loss[loss=0.1489, simple_loss=0.2212, pruned_loss=0.03829, over 4865.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03311, over 972564.60 frames.], batch size: 32, lr: 2.17e-04 +2022-05-06 18:06:54,776 INFO [train.py:715] (7/8) Epoch 10, batch 6550, loss[loss=0.1206, simple_loss=0.1898, pruned_loss=0.02572, over 4787.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2133, pruned_loss=0.03351, over 972044.20 frames.], batch size: 14, lr: 2.17e-04 +2022-05-06 18:07:33,915 INFO [train.py:715] (7/8) Epoch 10, batch 6600, loss[loss=0.1158, simple_loss=0.1985, pruned_loss=0.01654, over 4820.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.03347, over 972707.48 frames.], batch size: 27, lr: 2.17e-04 +2022-05-06 18:08:12,468 INFO [train.py:715] (7/8) Epoch 10, batch 6650, loss[loss=0.1207, simple_loss=0.1962, pruned_loss=0.02253, over 4961.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2132, pruned_loss=0.03376, over 972187.15 frames.], batch size: 24, lr: 2.17e-04 +2022-05-06 18:08:52,600 INFO [train.py:715] (7/8) Epoch 10, batch 6700, loss[loss=0.1285, simple_loss=0.1949, pruned_loss=0.03107, over 4993.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2137, pruned_loss=0.03408, over 972237.42 frames.], batch size: 14, lr: 2.17e-04 +2022-05-06 18:09:31,855 INFO [train.py:715] (7/8) Epoch 10, batch 6750, loss[loss=0.1425, simple_loss=0.2139, pruned_loss=0.03552, over 4717.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2139, pruned_loss=0.03425, over 972776.72 frames.], batch size: 15, lr: 2.17e-04 +2022-05-06 18:10:10,547 INFO [train.py:715] (7/8) Epoch 10, batch 6800, loss[loss=0.1429, simple_loss=0.2155, pruned_loss=0.03515, over 4968.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03423, over 973105.29 frames.], batch size: 14, lr: 2.17e-04 +2022-05-06 18:10:50,414 INFO [train.py:715] (7/8) Epoch 10, batch 6850, loss[loss=0.1255, simple_loss=0.2126, pruned_loss=0.0192, over 4929.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2136, pruned_loss=0.03351, over 971837.56 frames.], batch size: 29, lr: 2.17e-04 +2022-05-06 18:11:29,657 INFO [train.py:715] (7/8) Epoch 10, batch 6900, loss[loss=0.1653, simple_loss=0.2421, pruned_loss=0.04424, over 4711.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03303, over 971471.39 frames.], batch size: 15, lr: 2.17e-04 +2022-05-06 18:12:08,735 INFO [train.py:715] (7/8) Epoch 10, batch 6950, loss[loss=0.1255, simple_loss=0.1989, pruned_loss=0.02602, over 4835.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2117, pruned_loss=0.0332, over 971503.61 frames.], batch size: 15, lr: 2.17e-04 +2022-05-06 18:12:48,641 INFO [train.py:715] (7/8) Epoch 10, batch 7000, loss[loss=0.1284, simple_loss=0.1902, pruned_loss=0.03333, over 4640.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.03356, over 971459.54 frames.], batch size: 13, lr: 2.17e-04 +2022-05-06 18:13:28,550 INFO [train.py:715] (7/8) Epoch 10, batch 7050, loss[loss=0.1426, simple_loss=0.2176, pruned_loss=0.03387, over 4933.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2132, pruned_loss=0.03395, over 971993.55 frames.], batch size: 21, lr: 2.17e-04 +2022-05-06 18:14:07,746 INFO [train.py:715] (7/8) Epoch 10, batch 7100, loss[loss=0.1675, simple_loss=0.2497, pruned_loss=0.04262, over 4883.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2131, pruned_loss=0.03396, over 971891.42 frames.], batch size: 22, lr: 2.17e-04 +2022-05-06 18:14:46,904 INFO [train.py:715] (7/8) Epoch 10, batch 7150, loss[loss=0.1265, simple_loss=0.1926, pruned_loss=0.0302, over 4898.00 frames.], tot_loss[loss=0.14, simple_loss=0.2126, pruned_loss=0.03368, over 972162.77 frames.], batch size: 19, lr: 2.17e-04 +2022-05-06 18:15:26,298 INFO [train.py:715] (7/8) Epoch 10, batch 7200, loss[loss=0.1456, simple_loss=0.2209, pruned_loss=0.03522, over 4958.00 frames.], tot_loss[loss=0.1402, simple_loss=0.213, pruned_loss=0.03371, over 971942.14 frames.], batch size: 21, lr: 2.17e-04 +2022-05-06 18:16:05,420 INFO [train.py:715] (7/8) Epoch 10, batch 7250, loss[loss=0.1359, simple_loss=0.2129, pruned_loss=0.02939, over 4769.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2132, pruned_loss=0.0337, over 972505.94 frames.], batch size: 18, lr: 2.17e-04 +2022-05-06 18:16:44,411 INFO [train.py:715] (7/8) Epoch 10, batch 7300, loss[loss=0.1365, simple_loss=0.2164, pruned_loss=0.02831, over 4792.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03344, over 972600.82 frames.], batch size: 21, lr: 2.17e-04 +2022-05-06 18:17:23,329 INFO [train.py:715] (7/8) Epoch 10, batch 7350, loss[loss=0.1547, simple_loss=0.2333, pruned_loss=0.03804, over 4970.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2132, pruned_loss=0.03383, over 973510.78 frames.], batch size: 15, lr: 2.17e-04 +2022-05-06 18:18:02,743 INFO [train.py:715] (7/8) Epoch 10, batch 7400, loss[loss=0.1436, simple_loss=0.2196, pruned_loss=0.03382, over 4696.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2133, pruned_loss=0.03371, over 972362.85 frames.], batch size: 15, lr: 2.17e-04 +2022-05-06 18:18:41,887 INFO [train.py:715] (7/8) Epoch 10, batch 7450, loss[loss=0.1376, simple_loss=0.21, pruned_loss=0.03264, over 4923.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2136, pruned_loss=0.03367, over 972373.44 frames.], batch size: 18, lr: 2.17e-04 +2022-05-06 18:19:20,017 INFO [train.py:715] (7/8) Epoch 10, batch 7500, loss[loss=0.1231, simple_loss=0.203, pruned_loss=0.02157, over 4939.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2136, pruned_loss=0.03359, over 972221.22 frames.], batch size: 29, lr: 2.17e-04 +2022-05-06 18:19:59,644 INFO [train.py:715] (7/8) Epoch 10, batch 7550, loss[loss=0.1437, simple_loss=0.2071, pruned_loss=0.0401, over 4796.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.0338, over 972301.97 frames.], batch size: 24, lr: 2.17e-04 +2022-05-06 18:20:38,459 INFO [train.py:715] (7/8) Epoch 10, batch 7600, loss[loss=0.1249, simple_loss=0.2052, pruned_loss=0.0223, over 4698.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2132, pruned_loss=0.03377, over 972575.05 frames.], batch size: 15, lr: 2.17e-04 +2022-05-06 18:21:17,039 INFO [train.py:715] (7/8) Epoch 10, batch 7650, loss[loss=0.1352, simple_loss=0.2049, pruned_loss=0.03273, over 4793.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2126, pruned_loss=0.03326, over 971990.67 frames.], batch size: 24, lr: 2.17e-04 +2022-05-06 18:21:56,438 INFO [train.py:715] (7/8) Epoch 10, batch 7700, loss[loss=0.1445, simple_loss=0.2191, pruned_loss=0.03495, over 4774.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03332, over 972314.01 frames.], batch size: 18, lr: 2.17e-04 +2022-05-06 18:22:35,796 INFO [train.py:715] (7/8) Epoch 10, batch 7750, loss[loss=0.1267, simple_loss=0.2077, pruned_loss=0.0228, over 4749.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2137, pruned_loss=0.03367, over 972836.93 frames.], batch size: 16, lr: 2.17e-04 +2022-05-06 18:23:15,173 INFO [train.py:715] (7/8) Epoch 10, batch 7800, loss[loss=0.1446, simple_loss=0.2175, pruned_loss=0.03585, over 4942.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2131, pruned_loss=0.03336, over 973453.54 frames.], batch size: 23, lr: 2.17e-04 +2022-05-06 18:23:53,547 INFO [train.py:715] (7/8) Epoch 10, batch 7850, loss[loss=0.166, simple_loss=0.2426, pruned_loss=0.04475, over 4952.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2138, pruned_loss=0.034, over 973571.72 frames.], batch size: 21, lr: 2.17e-04 +2022-05-06 18:24:33,023 INFO [train.py:715] (7/8) Epoch 10, batch 7900, loss[loss=0.1342, simple_loss=0.2067, pruned_loss=0.03087, over 4773.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03344, over 972841.10 frames.], batch size: 18, lr: 2.17e-04 +2022-05-06 18:25:12,546 INFO [train.py:715] (7/8) Epoch 10, batch 7950, loss[loss=0.1559, simple_loss=0.2297, pruned_loss=0.04105, over 4780.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03382, over 972553.76 frames.], batch size: 18, lr: 2.17e-04 +2022-05-06 18:25:51,359 INFO [train.py:715] (7/8) Epoch 10, batch 8000, loss[loss=0.1519, simple_loss=0.2199, pruned_loss=0.04194, over 4785.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.0341, over 971920.64 frames.], batch size: 17, lr: 2.17e-04 +2022-05-06 18:26:30,790 INFO [train.py:715] (7/8) Epoch 10, batch 8050, loss[loss=0.1211, simple_loss=0.1912, pruned_loss=0.02554, over 4635.00 frames.], tot_loss[loss=0.1416, simple_loss=0.214, pruned_loss=0.03459, over 972092.81 frames.], batch size: 13, lr: 2.17e-04 +2022-05-06 18:27:10,411 INFO [train.py:715] (7/8) Epoch 10, batch 8100, loss[loss=0.1312, simple_loss=0.2071, pruned_loss=0.02765, over 4948.00 frames.], tot_loss[loss=0.1417, simple_loss=0.2142, pruned_loss=0.03462, over 971988.96 frames.], batch size: 24, lr: 2.17e-04 +2022-05-06 18:27:49,304 INFO [train.py:715] (7/8) Epoch 10, batch 8150, loss[loss=0.1747, simple_loss=0.2583, pruned_loss=0.0455, over 4979.00 frames.], tot_loss[loss=0.142, simple_loss=0.2144, pruned_loss=0.03477, over 971611.87 frames.], batch size: 15, lr: 2.17e-04 +2022-05-06 18:28:27,917 INFO [train.py:715] (7/8) Epoch 10, batch 8200, loss[loss=0.1208, simple_loss=0.1957, pruned_loss=0.02295, over 4981.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03395, over 971667.78 frames.], batch size: 25, lr: 2.17e-04 +2022-05-06 18:29:07,591 INFO [train.py:715] (7/8) Epoch 10, batch 8250, loss[loss=0.1454, simple_loss=0.2133, pruned_loss=0.03876, over 4835.00 frames.], tot_loss[loss=0.1413, simple_loss=0.214, pruned_loss=0.03435, over 972055.23 frames.], batch size: 30, lr: 2.17e-04 +2022-05-06 18:29:46,989 INFO [train.py:715] (7/8) Epoch 10, batch 8300, loss[loss=0.1154, simple_loss=0.1901, pruned_loss=0.0204, over 4946.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03406, over 972408.05 frames.], batch size: 24, lr: 2.17e-04 +2022-05-06 18:30:25,736 INFO [train.py:715] (7/8) Epoch 10, batch 8350, loss[loss=0.1482, simple_loss=0.2199, pruned_loss=0.03827, over 4682.00 frames.], tot_loss[loss=0.1414, simple_loss=0.214, pruned_loss=0.03443, over 971197.23 frames.], batch size: 15, lr: 2.17e-04 +2022-05-06 18:31:05,470 INFO [train.py:715] (7/8) Epoch 10, batch 8400, loss[loss=0.1359, simple_loss=0.2069, pruned_loss=0.03244, over 4864.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2138, pruned_loss=0.03421, over 970679.47 frames.], batch size: 20, lr: 2.17e-04 +2022-05-06 18:31:44,985 INFO [train.py:715] (7/8) Epoch 10, batch 8450, loss[loss=0.1357, simple_loss=0.2097, pruned_loss=0.03081, over 4987.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2138, pruned_loss=0.03452, over 971419.81 frames.], batch size: 31, lr: 2.16e-04 +2022-05-06 18:32:23,266 INFO [train.py:715] (7/8) Epoch 10, batch 8500, loss[loss=0.146, simple_loss=0.223, pruned_loss=0.03449, over 4872.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2139, pruned_loss=0.03464, over 971967.44 frames.], batch size: 16, lr: 2.16e-04 +2022-05-06 18:33:02,052 INFO [train.py:715] (7/8) Epoch 10, batch 8550, loss[loss=0.1735, simple_loss=0.2487, pruned_loss=0.04921, over 4795.00 frames.], tot_loss[loss=0.1407, simple_loss=0.213, pruned_loss=0.03419, over 972310.77 frames.], batch size: 24, lr: 2.16e-04 +2022-05-06 18:33:41,302 INFO [train.py:715] (7/8) Epoch 10, batch 8600, loss[loss=0.1432, simple_loss=0.2097, pruned_loss=0.03832, over 4794.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2132, pruned_loss=0.03398, over 972095.56 frames.], batch size: 21, lr: 2.16e-04 +2022-05-06 18:34:19,984 INFO [train.py:715] (7/8) Epoch 10, batch 8650, loss[loss=0.14, simple_loss=0.1962, pruned_loss=0.04186, over 4849.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2131, pruned_loss=0.03396, over 971608.92 frames.], batch size: 12, lr: 2.16e-04 +2022-05-06 18:34:58,633 INFO [train.py:715] (7/8) Epoch 10, batch 8700, loss[loss=0.1621, simple_loss=0.2321, pruned_loss=0.04609, over 4986.00 frames.], tot_loss[loss=0.1408, simple_loss=0.213, pruned_loss=0.03427, over 971168.37 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 18:35:37,460 INFO [train.py:715] (7/8) Epoch 10, batch 8750, loss[loss=0.1104, simple_loss=0.1866, pruned_loss=0.01708, over 4907.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2131, pruned_loss=0.03469, over 971129.28 frames.], batch size: 29, lr: 2.16e-04 +2022-05-06 18:36:15,829 INFO [train.py:715] (7/8) Epoch 10, batch 8800, loss[loss=0.1359, simple_loss=0.2122, pruned_loss=0.02975, over 4793.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2126, pruned_loss=0.03415, over 971062.85 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 18:36:54,707 INFO [train.py:715] (7/8) Epoch 10, batch 8850, loss[loss=0.1374, simple_loss=0.2085, pruned_loss=0.03314, over 4965.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2135, pruned_loss=0.03436, over 971714.21 frames.], batch size: 35, lr: 2.16e-04 +2022-05-06 18:37:34,279 INFO [train.py:715] (7/8) Epoch 10, batch 8900, loss[loss=0.1315, simple_loss=0.1986, pruned_loss=0.03219, over 4961.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2133, pruned_loss=0.03386, over 971277.55 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 18:38:13,790 INFO [train.py:715] (7/8) Epoch 10, batch 8950, loss[loss=0.1419, simple_loss=0.2153, pruned_loss=0.03423, over 4957.00 frames.], tot_loss[loss=0.1412, simple_loss=0.214, pruned_loss=0.03417, over 971118.24 frames.], batch size: 14, lr: 2.16e-04 +2022-05-06 18:38:53,305 INFO [train.py:715] (7/8) Epoch 10, batch 9000, loss[loss=0.1527, simple_loss=0.2205, pruned_loss=0.04245, over 4909.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2141, pruned_loss=0.03407, over 970860.86 frames.], batch size: 19, lr: 2.16e-04 +2022-05-06 18:38:53,305 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 18:39:02,858 INFO [train.py:742] (7/8) Epoch 10, validation: loss=0.1064, simple_loss=0.1907, pruned_loss=0.01106, over 914524.00 frames. +2022-05-06 18:39:42,087 INFO [train.py:715] (7/8) Epoch 10, batch 9050, loss[loss=0.133, simple_loss=0.2023, pruned_loss=0.03186, over 4842.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03422, over 970913.59 frames.], batch size: 34, lr: 2.16e-04 +2022-05-06 18:40:21,151 INFO [train.py:715] (7/8) Epoch 10, batch 9100, loss[loss=0.1133, simple_loss=0.1967, pruned_loss=0.01499, over 4969.00 frames.], tot_loss[loss=0.1422, simple_loss=0.2151, pruned_loss=0.03466, over 971737.61 frames.], batch size: 24, lr: 2.16e-04 +2022-05-06 18:41:01,482 INFO [train.py:715] (7/8) Epoch 10, batch 9150, loss[loss=0.1636, simple_loss=0.2355, pruned_loss=0.04583, over 4749.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2142, pruned_loss=0.0343, over 972274.70 frames.], batch size: 19, lr: 2.16e-04 +2022-05-06 18:41:40,997 INFO [train.py:715] (7/8) Epoch 10, batch 9200, loss[loss=0.147, simple_loss=0.2108, pruned_loss=0.04161, over 4929.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2139, pruned_loss=0.03446, over 972985.54 frames.], batch size: 35, lr: 2.16e-04 +2022-05-06 18:42:20,447 INFO [train.py:715] (7/8) Epoch 10, batch 9250, loss[loss=0.1156, simple_loss=0.1909, pruned_loss=0.0201, over 4957.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2144, pruned_loss=0.03467, over 973542.76 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 18:43:00,265 INFO [train.py:715] (7/8) Epoch 10, batch 9300, loss[loss=0.1257, simple_loss=0.2051, pruned_loss=0.02315, over 4763.00 frames.], tot_loss[loss=0.141, simple_loss=0.2137, pruned_loss=0.03418, over 973681.04 frames.], batch size: 19, lr: 2.16e-04 +2022-05-06 18:43:39,886 INFO [train.py:715] (7/8) Epoch 10, batch 9350, loss[loss=0.1459, simple_loss=0.2235, pruned_loss=0.0341, over 4927.00 frames.], tot_loss[loss=0.141, simple_loss=0.2138, pruned_loss=0.03416, over 972909.17 frames.], batch size: 18, lr: 2.16e-04 +2022-05-06 18:44:19,394 INFO [train.py:715] (7/8) Epoch 10, batch 9400, loss[loss=0.1624, simple_loss=0.2369, pruned_loss=0.04398, over 4775.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2145, pruned_loss=0.0342, over 972532.36 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 18:44:58,982 INFO [train.py:715] (7/8) Epoch 10, batch 9450, loss[loss=0.1774, simple_loss=0.2314, pruned_loss=0.0617, over 4974.00 frames.], tot_loss[loss=0.1421, simple_loss=0.215, pruned_loss=0.03459, over 972599.46 frames.], batch size: 28, lr: 2.16e-04 +2022-05-06 18:45:38,376 INFO [train.py:715] (7/8) Epoch 10, batch 9500, loss[loss=0.1447, simple_loss=0.2237, pruned_loss=0.03278, over 4939.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2144, pruned_loss=0.03476, over 973421.21 frames.], batch size: 29, lr: 2.16e-04 +2022-05-06 18:46:17,357 INFO [train.py:715] (7/8) Epoch 10, batch 9550, loss[loss=0.1382, simple_loss=0.2038, pruned_loss=0.03635, over 4835.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03431, over 974102.44 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 18:46:55,768 INFO [train.py:715] (7/8) Epoch 10, batch 9600, loss[loss=0.1328, simple_loss=0.1985, pruned_loss=0.03354, over 4764.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2134, pruned_loss=0.03361, over 973636.25 frames.], batch size: 14, lr: 2.16e-04 +2022-05-06 18:47:34,909 INFO [train.py:715] (7/8) Epoch 10, batch 9650, loss[loss=0.1243, simple_loss=0.1954, pruned_loss=0.02661, over 4824.00 frames.], tot_loss[loss=0.1399, simple_loss=0.213, pruned_loss=0.03338, over 973636.41 frames.], batch size: 13, lr: 2.16e-04 +2022-05-06 18:48:14,561 INFO [train.py:715] (7/8) Epoch 10, batch 9700, loss[loss=0.1523, simple_loss=0.2282, pruned_loss=0.03821, over 4940.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.0333, over 972964.29 frames.], batch size: 23, lr: 2.16e-04 +2022-05-06 18:48:52,979 INFO [train.py:715] (7/8) Epoch 10, batch 9750, loss[loss=0.1651, simple_loss=0.2324, pruned_loss=0.0489, over 4771.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2132, pruned_loss=0.03373, over 973009.60 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 18:49:32,216 INFO [train.py:715] (7/8) Epoch 10, batch 9800, loss[loss=0.1659, simple_loss=0.2226, pruned_loss=0.05457, over 4819.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03377, over 972185.35 frames.], batch size: 13, lr: 2.16e-04 +2022-05-06 18:50:11,750 INFO [train.py:715] (7/8) Epoch 10, batch 9850, loss[loss=0.1419, simple_loss=0.216, pruned_loss=0.03385, over 4751.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2146, pruned_loss=0.03423, over 972456.34 frames.], batch size: 19, lr: 2.16e-04 +2022-05-06 18:50:51,057 INFO [train.py:715] (7/8) Epoch 10, batch 9900, loss[loss=0.1553, simple_loss=0.2256, pruned_loss=0.0425, over 4917.00 frames.], tot_loss[loss=0.1419, simple_loss=0.2146, pruned_loss=0.03462, over 973537.40 frames.], batch size: 23, lr: 2.16e-04 +2022-05-06 18:51:30,048 INFO [train.py:715] (7/8) Epoch 10, batch 9950, loss[loss=0.1367, simple_loss=0.2109, pruned_loss=0.0313, over 4871.00 frames.], tot_loss[loss=0.1423, simple_loss=0.2149, pruned_loss=0.03482, over 973434.72 frames.], batch size: 30, lr: 2.16e-04 +2022-05-06 18:52:10,245 INFO [train.py:715] (7/8) Epoch 10, batch 10000, loss[loss=0.1174, simple_loss=0.1924, pruned_loss=0.02121, over 4784.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03379, over 972966.78 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 18:52:49,844 INFO [train.py:715] (7/8) Epoch 10, batch 10050, loss[loss=0.1507, simple_loss=0.2229, pruned_loss=0.03928, over 4963.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2133, pruned_loss=0.03411, over 973238.36 frames.], batch size: 28, lr: 2.16e-04 +2022-05-06 18:53:27,869 INFO [train.py:715] (7/8) Epoch 10, batch 10100, loss[loss=0.1292, simple_loss=0.2013, pruned_loss=0.02856, over 4904.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2131, pruned_loss=0.03373, over 972944.43 frames.], batch size: 19, lr: 2.16e-04 +2022-05-06 18:54:06,609 INFO [train.py:715] (7/8) Epoch 10, batch 10150, loss[loss=0.1274, simple_loss=0.2086, pruned_loss=0.02311, over 4894.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2135, pruned_loss=0.03391, over 972556.00 frames.], batch size: 16, lr: 2.16e-04 +2022-05-06 18:54:46,541 INFO [train.py:715] (7/8) Epoch 10, batch 10200, loss[loss=0.203, simple_loss=0.2418, pruned_loss=0.0821, over 4694.00 frames.], tot_loss[loss=0.1405, simple_loss=0.213, pruned_loss=0.03396, over 972473.43 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 18:55:25,659 INFO [train.py:715] (7/8) Epoch 10, batch 10250, loss[loss=0.1402, simple_loss=0.2169, pruned_loss=0.03172, over 4838.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03358, over 971799.49 frames.], batch size: 30, lr: 2.16e-04 +2022-05-06 18:56:04,513 INFO [train.py:715] (7/8) Epoch 10, batch 10300, loss[loss=0.139, simple_loss=0.2084, pruned_loss=0.03478, over 4791.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2129, pruned_loss=0.03352, over 972741.54 frames.], batch size: 13, lr: 2.16e-04 +2022-05-06 18:56:44,442 INFO [train.py:715] (7/8) Epoch 10, batch 10350, loss[loss=0.1523, simple_loss=0.2236, pruned_loss=0.04047, over 4953.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.03315, over 973061.35 frames.], batch size: 39, lr: 2.16e-04 +2022-05-06 18:57:24,438 INFO [train.py:715] (7/8) Epoch 10, batch 10400, loss[loss=0.1524, simple_loss=0.228, pruned_loss=0.03838, over 4893.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.03362, over 973639.97 frames.], batch size: 19, lr: 2.16e-04 +2022-05-06 18:58:02,845 INFO [train.py:715] (7/8) Epoch 10, batch 10450, loss[loss=0.1446, simple_loss=0.2239, pruned_loss=0.0327, over 4769.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2125, pruned_loss=0.03358, over 973725.25 frames.], batch size: 18, lr: 2.16e-04 +2022-05-06 18:58:41,112 INFO [train.py:715] (7/8) Epoch 10, batch 10500, loss[loss=0.1343, simple_loss=0.2098, pruned_loss=0.02941, over 4877.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2128, pruned_loss=0.03393, over 973327.33 frames.], batch size: 16, lr: 2.16e-04 +2022-05-06 18:59:20,245 INFO [train.py:715] (7/8) Epoch 10, batch 10550, loss[loss=0.168, simple_loss=0.2351, pruned_loss=0.05044, over 4856.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.03416, over 973568.77 frames.], batch size: 32, lr: 2.16e-04 +2022-05-06 18:59:59,206 INFO [train.py:715] (7/8) Epoch 10, batch 10600, loss[loss=0.1216, simple_loss=0.2023, pruned_loss=0.02044, over 4783.00 frames.], tot_loss[loss=0.1403, simple_loss=0.213, pruned_loss=0.03383, over 973037.89 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 19:00:37,421 INFO [train.py:715] (7/8) Epoch 10, batch 10650, loss[loss=0.1444, simple_loss=0.2202, pruned_loss=0.03429, over 4841.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2143, pruned_loss=0.03434, over 973423.11 frames.], batch size: 30, lr: 2.16e-04 +2022-05-06 19:01:16,842 INFO [train.py:715] (7/8) Epoch 10, batch 10700, loss[loss=0.1786, simple_loss=0.2389, pruned_loss=0.05918, over 4818.00 frames.], tot_loss[loss=0.1425, simple_loss=0.2154, pruned_loss=0.03483, over 972230.78 frames.], batch size: 13, lr: 2.16e-04 +2022-05-06 19:01:56,166 INFO [train.py:715] (7/8) Epoch 10, batch 10750, loss[loss=0.114, simple_loss=0.1899, pruned_loss=0.01909, over 4966.00 frames.], tot_loss[loss=0.1418, simple_loss=0.2147, pruned_loss=0.03449, over 972096.39 frames.], batch size: 28, lr: 2.16e-04 +2022-05-06 19:02:34,991 INFO [train.py:715] (7/8) Epoch 10, batch 10800, loss[loss=0.1254, simple_loss=0.1921, pruned_loss=0.02934, over 4983.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2137, pruned_loss=0.03403, over 972716.99 frames.], batch size: 14, lr: 2.16e-04 +2022-05-06 19:03:13,437 INFO [train.py:715] (7/8) Epoch 10, batch 10850, loss[loss=0.1376, simple_loss=0.2119, pruned_loss=0.03161, over 4825.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.03332, over 972370.79 frames.], batch size: 26, lr: 2.16e-04 +2022-05-06 19:03:52,881 INFO [train.py:715] (7/8) Epoch 10, batch 10900, loss[loss=0.1142, simple_loss=0.1903, pruned_loss=0.01907, over 4781.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.03349, over 972471.75 frames.], batch size: 14, lr: 2.16e-04 +2022-05-06 19:04:31,791 INFO [train.py:715] (7/8) Epoch 10, batch 10950, loss[loss=0.168, simple_loss=0.2411, pruned_loss=0.04741, over 4792.00 frames.], tot_loss[loss=0.1408, simple_loss=0.214, pruned_loss=0.03378, over 971750.59 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 19:05:10,348 INFO [train.py:715] (7/8) Epoch 10, batch 11000, loss[loss=0.1377, simple_loss=0.2155, pruned_loss=0.02995, over 4805.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2139, pruned_loss=0.03364, over 971701.10 frames.], batch size: 25, lr: 2.16e-04 +2022-05-06 19:05:49,498 INFO [train.py:715] (7/8) Epoch 10, batch 11050, loss[loss=0.1592, simple_loss=0.2225, pruned_loss=0.04795, over 4913.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2139, pruned_loss=0.03353, over 971967.42 frames.], batch size: 18, lr: 2.16e-04 +2022-05-06 19:06:29,283 INFO [train.py:715] (7/8) Epoch 10, batch 11100, loss[loss=0.1255, simple_loss=0.1896, pruned_loss=0.03068, over 4920.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.03325, over 972392.33 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 19:07:07,080 INFO [train.py:715] (7/8) Epoch 10, batch 11150, loss[loss=0.1186, simple_loss=0.1982, pruned_loss=0.01947, over 4955.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2128, pruned_loss=0.03286, over 972162.12 frames.], batch size: 24, lr: 2.16e-04 +2022-05-06 19:07:46,336 INFO [train.py:715] (7/8) Epoch 10, batch 11200, loss[loss=0.1261, simple_loss=0.2118, pruned_loss=0.02018, over 4947.00 frames.], tot_loss[loss=0.1384, simple_loss=0.212, pruned_loss=0.03241, over 971669.38 frames.], batch size: 24, lr: 2.16e-04 +2022-05-06 19:08:25,406 INFO [train.py:715] (7/8) Epoch 10, batch 11250, loss[loss=0.1427, simple_loss=0.2139, pruned_loss=0.03575, over 4870.00 frames.], tot_loss[loss=0.1387, simple_loss=0.212, pruned_loss=0.03271, over 971918.42 frames.], batch size: 22, lr: 2.16e-04 +2022-05-06 19:09:03,756 INFO [train.py:715] (7/8) Epoch 10, batch 11300, loss[loss=0.1705, simple_loss=0.2353, pruned_loss=0.05284, over 4972.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03256, over 972274.88 frames.], batch size: 35, lr: 2.16e-04 +2022-05-06 19:09:42,493 INFO [train.py:715] (7/8) Epoch 10, batch 11350, loss[loss=0.131, simple_loss=0.2111, pruned_loss=0.0255, over 4866.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03294, over 972081.48 frames.], batch size: 16, lr: 2.16e-04 +2022-05-06 19:10:21,471 INFO [train.py:715] (7/8) Epoch 10, batch 11400, loss[loss=0.1284, simple_loss=0.1905, pruned_loss=0.03315, over 4791.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03343, over 971685.56 frames.], batch size: 12, lr: 2.16e-04 +2022-05-06 19:11:00,939 INFO [train.py:715] (7/8) Epoch 10, batch 11450, loss[loss=0.1642, simple_loss=0.2274, pruned_loss=0.05048, over 4797.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03332, over 971867.48 frames.], batch size: 17, lr: 2.16e-04 +2022-05-06 19:11:38,823 INFO [train.py:715] (7/8) Epoch 10, batch 11500, loss[loss=0.1122, simple_loss=0.1827, pruned_loss=0.02087, over 4801.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03364, over 971726.22 frames.], batch size: 25, lr: 2.16e-04 +2022-05-06 19:12:17,873 INFO [train.py:715] (7/8) Epoch 10, batch 11550, loss[loss=0.1302, simple_loss=0.2051, pruned_loss=0.02763, over 4777.00 frames.], tot_loss[loss=0.1394, simple_loss=0.212, pruned_loss=0.03338, over 972086.95 frames.], batch size: 14, lr: 2.16e-04 +2022-05-06 19:12:57,426 INFO [train.py:715] (7/8) Epoch 10, batch 11600, loss[loss=0.126, simple_loss=0.1908, pruned_loss=0.03061, over 4799.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2116, pruned_loss=0.03344, over 972195.67 frames.], batch size: 14, lr: 2.16e-04 +2022-05-06 19:13:35,826 INFO [train.py:715] (7/8) Epoch 10, batch 11650, loss[loss=0.1447, simple_loss=0.2182, pruned_loss=0.03563, over 4749.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2113, pruned_loss=0.03303, over 971715.74 frames.], batch size: 16, lr: 2.16e-04 +2022-05-06 19:14:14,884 INFO [train.py:715] (7/8) Epoch 10, batch 11700, loss[loss=0.1453, simple_loss=0.2149, pruned_loss=0.03785, over 4828.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03288, over 970915.30 frames.], batch size: 15, lr: 2.16e-04 +2022-05-06 19:14:53,454 INFO [train.py:715] (7/8) Epoch 10, batch 11750, loss[loss=0.1453, simple_loss=0.2205, pruned_loss=0.03502, over 4694.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.033, over 970184.85 frames.], batch size: 15, lr: 2.15e-04 +2022-05-06 19:15:32,373 INFO [train.py:715] (7/8) Epoch 10, batch 11800, loss[loss=0.1213, simple_loss=0.1956, pruned_loss=0.02354, over 4764.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2123, pruned_loss=0.0334, over 971042.63 frames.], batch size: 12, lr: 2.15e-04 +2022-05-06 19:16:10,396 INFO [train.py:715] (7/8) Epoch 10, batch 11850, loss[loss=0.1291, simple_loss=0.2045, pruned_loss=0.02686, over 4775.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03283, over 971303.34 frames.], batch size: 18, lr: 2.15e-04 +2022-05-06 19:16:49,164 INFO [train.py:715] (7/8) Epoch 10, batch 11900, loss[loss=0.1318, simple_loss=0.1972, pruned_loss=0.03322, over 4950.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2129, pruned_loss=0.03349, over 971651.48 frames.], batch size: 35, lr: 2.15e-04 +2022-05-06 19:17:30,484 INFO [train.py:715] (7/8) Epoch 10, batch 11950, loss[loss=0.1593, simple_loss=0.2224, pruned_loss=0.04813, over 4771.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.03317, over 971017.36 frames.], batch size: 12, lr: 2.15e-04 +2022-05-06 19:18:09,373 INFO [train.py:715] (7/8) Epoch 10, batch 12000, loss[loss=0.1431, simple_loss=0.2068, pruned_loss=0.0397, over 4785.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2129, pruned_loss=0.03315, over 970149.28 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:18:09,374 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 19:18:19,016 INFO [train.py:742] (7/8) Epoch 10, validation: loss=0.1065, simple_loss=0.1908, pruned_loss=0.01105, over 914524.00 frames. +2022-05-06 19:18:57,899 INFO [train.py:715] (7/8) Epoch 10, batch 12050, loss[loss=0.1358, simple_loss=0.2018, pruned_loss=0.03485, over 4977.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.0328, over 972230.58 frames.], batch size: 31, lr: 2.15e-04 +2022-05-06 19:19:37,116 INFO [train.py:715] (7/8) Epoch 10, batch 12100, loss[loss=0.1439, simple_loss=0.2132, pruned_loss=0.03734, over 4943.00 frames.], tot_loss[loss=0.1395, simple_loss=0.213, pruned_loss=0.03295, over 971869.40 frames.], batch size: 39, lr: 2.15e-04 +2022-05-06 19:20:16,375 INFO [train.py:715] (7/8) Epoch 10, batch 12150, loss[loss=0.1383, simple_loss=0.218, pruned_loss=0.02929, over 4697.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2139, pruned_loss=0.03354, over 970947.55 frames.], batch size: 15, lr: 2.15e-04 +2022-05-06 19:20:55,543 INFO [train.py:715] (7/8) Epoch 10, batch 12200, loss[loss=0.1088, simple_loss=0.1804, pruned_loss=0.01864, over 4865.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2131, pruned_loss=0.03323, over 970877.60 frames.], batch size: 22, lr: 2.15e-04 +2022-05-06 19:21:34,088 INFO [train.py:715] (7/8) Epoch 10, batch 12250, loss[loss=0.1295, simple_loss=0.2081, pruned_loss=0.02547, over 4825.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2135, pruned_loss=0.03366, over 970999.94 frames.], batch size: 25, lr: 2.15e-04 +2022-05-06 19:22:13,029 INFO [train.py:715] (7/8) Epoch 10, batch 12300, loss[loss=0.1509, simple_loss=0.2203, pruned_loss=0.04071, over 4769.00 frames.], tot_loss[loss=0.141, simple_loss=0.2137, pruned_loss=0.0341, over 971146.58 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:22:51,959 INFO [train.py:715] (7/8) Epoch 10, batch 12350, loss[loss=0.1547, simple_loss=0.23, pruned_loss=0.03972, over 4767.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2142, pruned_loss=0.03413, over 971866.41 frames.], batch size: 18, lr: 2.15e-04 +2022-05-06 19:23:30,789 INFO [train.py:715] (7/8) Epoch 10, batch 12400, loss[loss=0.1289, simple_loss=0.211, pruned_loss=0.02342, over 4989.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.03367, over 971682.35 frames.], batch size: 28, lr: 2.15e-04 +2022-05-06 19:24:09,220 INFO [train.py:715] (7/8) Epoch 10, batch 12450, loss[loss=0.1208, simple_loss=0.1927, pruned_loss=0.02445, over 4787.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2129, pruned_loss=0.03336, over 970874.65 frames.], batch size: 21, lr: 2.15e-04 +2022-05-06 19:24:48,249 INFO [train.py:715] (7/8) Epoch 10, batch 12500, loss[loss=0.1386, simple_loss=0.2222, pruned_loss=0.02751, over 4973.00 frames.], tot_loss[loss=0.139, simple_loss=0.2125, pruned_loss=0.03271, over 971994.74 frames.], batch size: 28, lr: 2.15e-04 +2022-05-06 19:25:27,027 INFO [train.py:715] (7/8) Epoch 10, batch 12550, loss[loss=0.1474, simple_loss=0.2234, pruned_loss=0.03569, over 4867.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2133, pruned_loss=0.03343, over 971917.20 frames.], batch size: 30, lr: 2.15e-04 +2022-05-06 19:26:05,182 INFO [train.py:715] (7/8) Epoch 10, batch 12600, loss[loss=0.1178, simple_loss=0.1962, pruned_loss=0.01972, over 4801.00 frames.], tot_loss[loss=0.1398, simple_loss=0.213, pruned_loss=0.0333, over 972518.14 frames.], batch size: 21, lr: 2.15e-04 +2022-05-06 19:26:43,472 INFO [train.py:715] (7/8) Epoch 10, batch 12650, loss[loss=0.1252, simple_loss=0.2049, pruned_loss=0.02277, over 4781.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03354, over 972558.49 frames.], batch size: 18, lr: 2.15e-04 +2022-05-06 19:27:22,410 INFO [train.py:715] (7/8) Epoch 10, batch 12700, loss[loss=0.1431, simple_loss=0.2181, pruned_loss=0.03405, over 4847.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.03333, over 972520.61 frames.], batch size: 26, lr: 2.15e-04 +2022-05-06 19:28:00,752 INFO [train.py:715] (7/8) Epoch 10, batch 12750, loss[loss=0.1167, simple_loss=0.187, pruned_loss=0.02317, over 4871.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2119, pruned_loss=0.0334, over 972093.47 frames.], batch size: 20, lr: 2.15e-04 +2022-05-06 19:28:39,217 INFO [train.py:715] (7/8) Epoch 10, batch 12800, loss[loss=0.1265, simple_loss=0.2117, pruned_loss=0.02062, over 4955.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2114, pruned_loss=0.03288, over 972345.24 frames.], batch size: 24, lr: 2.15e-04 +2022-05-06 19:29:18,645 INFO [train.py:715] (7/8) Epoch 10, batch 12850, loss[loss=0.1202, simple_loss=0.1958, pruned_loss=0.0223, over 4897.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2119, pruned_loss=0.03337, over 972785.62 frames.], batch size: 19, lr: 2.15e-04 +2022-05-06 19:29:57,812 INFO [train.py:715] (7/8) Epoch 10, batch 12900, loss[loss=0.1317, simple_loss=0.2174, pruned_loss=0.02302, over 4927.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.03329, over 972223.80 frames.], batch size: 23, lr: 2.15e-04 +2022-05-06 19:30:36,229 INFO [train.py:715] (7/8) Epoch 10, batch 12950, loss[loss=0.1497, simple_loss=0.2147, pruned_loss=0.04234, over 4905.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2117, pruned_loss=0.0332, over 972303.37 frames.], batch size: 19, lr: 2.15e-04 +2022-05-06 19:31:14,799 INFO [train.py:715] (7/8) Epoch 10, batch 13000, loss[loss=0.1443, simple_loss=0.2198, pruned_loss=0.03438, over 4871.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2123, pruned_loss=0.03355, over 971764.62 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:31:54,376 INFO [train.py:715] (7/8) Epoch 10, batch 13050, loss[loss=0.1373, simple_loss=0.2133, pruned_loss=0.03062, over 4639.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2122, pruned_loss=0.03352, over 972628.85 frames.], batch size: 13, lr: 2.15e-04 +2022-05-06 19:32:32,858 INFO [train.py:715] (7/8) Epoch 10, batch 13100, loss[loss=0.1385, simple_loss=0.2184, pruned_loss=0.02934, over 4858.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2137, pruned_loss=0.03419, over 972347.18 frames.], batch size: 20, lr: 2.15e-04 +2022-05-06 19:33:11,985 INFO [train.py:715] (7/8) Epoch 10, batch 13150, loss[loss=0.1174, simple_loss=0.1927, pruned_loss=0.02104, over 4788.00 frames.], tot_loss[loss=0.141, simple_loss=0.2138, pruned_loss=0.03409, over 972530.63 frames.], batch size: 17, lr: 2.15e-04 +2022-05-06 19:33:51,010 INFO [train.py:715] (7/8) Epoch 10, batch 13200, loss[loss=0.17, simple_loss=0.2436, pruned_loss=0.04825, over 4756.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2134, pruned_loss=0.03388, over 972685.65 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:34:30,049 INFO [train.py:715] (7/8) Epoch 10, batch 13250, loss[loss=0.1264, simple_loss=0.1935, pruned_loss=0.02959, over 4962.00 frames.], tot_loss[loss=0.141, simple_loss=0.2135, pruned_loss=0.03423, over 972536.30 frames.], batch size: 35, lr: 2.15e-04 +2022-05-06 19:35:08,701 INFO [train.py:715] (7/8) Epoch 10, batch 13300, loss[loss=0.1377, simple_loss=0.2041, pruned_loss=0.03569, over 4956.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2134, pruned_loss=0.03392, over 973707.51 frames.], batch size: 21, lr: 2.15e-04 +2022-05-06 19:35:47,102 INFO [train.py:715] (7/8) Epoch 10, batch 13350, loss[loss=0.181, simple_loss=0.2521, pruned_loss=0.05494, over 4872.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03376, over 973692.32 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:36:26,374 INFO [train.py:715] (7/8) Epoch 10, batch 13400, loss[loss=0.1681, simple_loss=0.2298, pruned_loss=0.05323, over 4896.00 frames.], tot_loss[loss=0.141, simple_loss=0.2139, pruned_loss=0.03408, over 973202.62 frames.], batch size: 22, lr: 2.15e-04 +2022-05-06 19:37:04,725 INFO [train.py:715] (7/8) Epoch 10, batch 13450, loss[loss=0.1371, simple_loss=0.1979, pruned_loss=0.03811, over 4960.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03378, over 973716.60 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:37:42,966 INFO [train.py:715] (7/8) Epoch 10, batch 13500, loss[loss=0.1367, simple_loss=0.2105, pruned_loss=0.0315, over 4800.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2128, pruned_loss=0.03352, over 973486.78 frames.], batch size: 25, lr: 2.15e-04 +2022-05-06 19:38:22,034 INFO [train.py:715] (7/8) Epoch 10, batch 13550, loss[loss=0.1252, simple_loss=0.1968, pruned_loss=0.02677, over 4888.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2123, pruned_loss=0.03316, over 973475.89 frames.], batch size: 19, lr: 2.15e-04 +2022-05-06 19:39:00,607 INFO [train.py:715] (7/8) Epoch 10, batch 13600, loss[loss=0.1132, simple_loss=0.191, pruned_loss=0.01772, over 4866.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2118, pruned_loss=0.03305, over 972013.90 frames.], batch size: 12, lr: 2.15e-04 +2022-05-06 19:39:39,006 INFO [train.py:715] (7/8) Epoch 10, batch 13650, loss[loss=0.1255, simple_loss=0.2057, pruned_loss=0.02267, over 4974.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.03313, over 971814.75 frames.], batch size: 28, lr: 2.15e-04 +2022-05-06 19:40:17,580 INFO [train.py:715] (7/8) Epoch 10, batch 13700, loss[loss=0.1385, simple_loss=0.1982, pruned_loss=0.03946, over 4748.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2118, pruned_loss=0.03322, over 971953.65 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:40:57,645 INFO [train.py:715] (7/8) Epoch 10, batch 13750, loss[loss=0.1295, simple_loss=0.2076, pruned_loss=0.02573, over 4836.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2119, pruned_loss=0.03363, over 971713.00 frames.], batch size: 12, lr: 2.15e-04 +2022-05-06 19:41:37,006 INFO [train.py:715] (7/8) Epoch 10, batch 13800, loss[loss=0.1384, simple_loss=0.2158, pruned_loss=0.03048, over 4762.00 frames.], tot_loss[loss=0.14, simple_loss=0.2125, pruned_loss=0.03376, over 971825.90 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:42:15,512 INFO [train.py:715] (7/8) Epoch 10, batch 13850, loss[loss=0.1232, simple_loss=0.2017, pruned_loss=0.02241, over 4880.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2129, pruned_loss=0.03398, over 971760.50 frames.], batch size: 22, lr: 2.15e-04 +2022-05-06 19:42:55,148 INFO [train.py:715] (7/8) Epoch 10, batch 13900, loss[loss=0.1193, simple_loss=0.1931, pruned_loss=0.02273, over 4989.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2134, pruned_loss=0.03423, over 972581.48 frames.], batch size: 25, lr: 2.15e-04 +2022-05-06 19:43:33,826 INFO [train.py:715] (7/8) Epoch 10, batch 13950, loss[loss=0.1127, simple_loss=0.1992, pruned_loss=0.01312, over 4840.00 frames.], tot_loss[loss=0.1403, simple_loss=0.213, pruned_loss=0.03378, over 971997.21 frames.], batch size: 13, lr: 2.15e-04 +2022-05-06 19:44:12,833 INFO [train.py:715] (7/8) Epoch 10, batch 14000, loss[loss=0.1308, simple_loss=0.2003, pruned_loss=0.03067, over 4838.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2137, pruned_loss=0.03364, over 972023.93 frames.], batch size: 26, lr: 2.15e-04 +2022-05-06 19:44:51,238 INFO [train.py:715] (7/8) Epoch 10, batch 14050, loss[loss=0.1317, simple_loss=0.2001, pruned_loss=0.03169, over 4963.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03391, over 972210.14 frames.], batch size: 24, lr: 2.15e-04 +2022-05-06 19:45:30,771 INFO [train.py:715] (7/8) Epoch 10, batch 14100, loss[loss=0.1178, simple_loss=0.1962, pruned_loss=0.01974, over 4989.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2139, pruned_loss=0.03411, over 972116.41 frames.], batch size: 26, lr: 2.15e-04 +2022-05-06 19:46:09,126 INFO [train.py:715] (7/8) Epoch 10, batch 14150, loss[loss=0.14, simple_loss=0.2101, pruned_loss=0.03498, over 4820.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2138, pruned_loss=0.03396, over 972771.52 frames.], batch size: 15, lr: 2.15e-04 +2022-05-06 19:46:47,034 INFO [train.py:715] (7/8) Epoch 10, batch 14200, loss[loss=0.1359, simple_loss=0.2171, pruned_loss=0.02734, over 4985.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2137, pruned_loss=0.03387, over 972724.93 frames.], batch size: 26, lr: 2.15e-04 +2022-05-06 19:47:26,635 INFO [train.py:715] (7/8) Epoch 10, batch 14250, loss[loss=0.1483, simple_loss=0.2132, pruned_loss=0.0417, over 4752.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2135, pruned_loss=0.03352, over 972590.10 frames.], batch size: 19, lr: 2.15e-04 +2022-05-06 19:48:05,010 INFO [train.py:715] (7/8) Epoch 10, batch 14300, loss[loss=0.1477, simple_loss=0.2109, pruned_loss=0.04227, over 4757.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2138, pruned_loss=0.03359, over 971565.02 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:48:43,129 INFO [train.py:715] (7/8) Epoch 10, batch 14350, loss[loss=0.1345, simple_loss=0.2106, pruned_loss=0.02914, over 4803.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2138, pruned_loss=0.03396, over 972674.39 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:49:21,566 INFO [train.py:715] (7/8) Epoch 10, batch 14400, loss[loss=0.125, simple_loss=0.1982, pruned_loss=0.0259, over 4698.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2144, pruned_loss=0.03442, over 972962.65 frames.], batch size: 15, lr: 2.15e-04 +2022-05-06 19:50:01,193 INFO [train.py:715] (7/8) Epoch 10, batch 14450, loss[loss=0.1529, simple_loss=0.2199, pruned_loss=0.04292, over 4959.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03428, over 973286.47 frames.], batch size: 28, lr: 2.15e-04 +2022-05-06 19:50:39,560 INFO [train.py:715] (7/8) Epoch 10, batch 14500, loss[loss=0.1333, simple_loss=0.21, pruned_loss=0.0283, over 4974.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03409, over 972863.05 frames.], batch size: 28, lr: 2.15e-04 +2022-05-06 19:51:17,698 INFO [train.py:715] (7/8) Epoch 10, batch 14550, loss[loss=0.154, simple_loss=0.2415, pruned_loss=0.03326, over 4819.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.0335, over 973267.38 frames.], batch size: 25, lr: 2.15e-04 +2022-05-06 19:51:57,347 INFO [train.py:715] (7/8) Epoch 10, batch 14600, loss[loss=0.1497, simple_loss=0.2233, pruned_loss=0.0381, over 4781.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2131, pruned_loss=0.03318, over 972947.91 frames.], batch size: 17, lr: 2.15e-04 +2022-05-06 19:52:35,980 INFO [train.py:715] (7/8) Epoch 10, batch 14650, loss[loss=0.1227, simple_loss=0.1952, pruned_loss=0.02514, over 4959.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2115, pruned_loss=0.03254, over 972530.77 frames.], batch size: 24, lr: 2.15e-04 +2022-05-06 19:53:14,370 INFO [train.py:715] (7/8) Epoch 10, batch 14700, loss[loss=0.1353, simple_loss=0.205, pruned_loss=0.03283, over 4813.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2112, pruned_loss=0.03279, over 971873.91 frames.], batch size: 27, lr: 2.15e-04 +2022-05-06 19:53:53,327 INFO [train.py:715] (7/8) Epoch 10, batch 14750, loss[loss=0.1134, simple_loss=0.183, pruned_loss=0.02189, over 4817.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2113, pruned_loss=0.03291, over 971300.41 frames.], batch size: 27, lr: 2.15e-04 +2022-05-06 19:54:33,132 INFO [train.py:715] (7/8) Epoch 10, batch 14800, loss[loss=0.1153, simple_loss=0.1872, pruned_loss=0.02175, over 4745.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2112, pruned_loss=0.03274, over 971466.81 frames.], batch size: 16, lr: 2.15e-04 +2022-05-06 19:55:12,160 INFO [train.py:715] (7/8) Epoch 10, batch 14850, loss[loss=0.1284, simple_loss=0.1957, pruned_loss=0.03053, over 4786.00 frames.], tot_loss[loss=0.139, simple_loss=0.212, pruned_loss=0.03299, over 971990.10 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:55:50,175 INFO [train.py:715] (7/8) Epoch 10, batch 14900, loss[loss=0.1487, simple_loss=0.2212, pruned_loss=0.0381, over 4952.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03325, over 972200.83 frames.], batch size: 39, lr: 2.15e-04 +2022-05-06 19:56:30,294 INFO [train.py:715] (7/8) Epoch 10, batch 14950, loss[loss=0.1423, simple_loss=0.2119, pruned_loss=0.03636, over 4966.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2129, pruned_loss=0.03364, over 971931.75 frames.], batch size: 35, lr: 2.15e-04 +2022-05-06 19:57:09,816 INFO [train.py:715] (7/8) Epoch 10, batch 15000, loss[loss=0.1571, simple_loss=0.2381, pruned_loss=0.03807, over 4988.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03317, over 972343.51 frames.], batch size: 14, lr: 2.15e-04 +2022-05-06 19:57:09,816 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 19:57:19,461 INFO [train.py:742] (7/8) Epoch 10, validation: loss=0.1065, simple_loss=0.1909, pruned_loss=0.01111, over 914524.00 frames. +2022-05-06 19:57:59,086 INFO [train.py:715] (7/8) Epoch 10, batch 15050, loss[loss=0.1153, simple_loss=0.1957, pruned_loss=0.01744, over 4823.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.03348, over 972598.09 frames.], batch size: 27, lr: 2.15e-04 +2022-05-06 19:58:38,146 INFO [train.py:715] (7/8) Epoch 10, batch 15100, loss[loss=0.1274, simple_loss=0.2038, pruned_loss=0.02551, over 4926.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2134, pruned_loss=0.03399, over 972720.83 frames.], batch size: 23, lr: 2.15e-04 +2022-05-06 19:59:17,366 INFO [train.py:715] (7/8) Epoch 10, batch 15150, loss[loss=0.1639, simple_loss=0.2287, pruned_loss=0.04956, over 4899.00 frames.], tot_loss[loss=0.141, simple_loss=0.2137, pruned_loss=0.03415, over 973038.51 frames.], batch size: 18, lr: 2.14e-04 +2022-05-06 19:59:56,360 INFO [train.py:715] (7/8) Epoch 10, batch 15200, loss[loss=0.1173, simple_loss=0.1855, pruned_loss=0.0245, over 4791.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03349, over 971771.51 frames.], batch size: 24, lr: 2.14e-04 +2022-05-06 20:00:35,742 INFO [train.py:715] (7/8) Epoch 10, batch 15250, loss[loss=0.1791, simple_loss=0.2526, pruned_loss=0.05283, over 4814.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03299, over 972430.35 frames.], batch size: 25, lr: 2.14e-04 +2022-05-06 20:01:14,789 INFO [train.py:715] (7/8) Epoch 10, batch 15300, loss[loss=0.1366, simple_loss=0.2085, pruned_loss=0.03232, over 4833.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.03269, over 972025.81 frames.], batch size: 30, lr: 2.14e-04 +2022-05-06 20:01:54,059 INFO [train.py:715] (7/8) Epoch 10, batch 15350, loss[loss=0.1236, simple_loss=0.2005, pruned_loss=0.02334, over 4948.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.0323, over 972091.84 frames.], batch size: 21, lr: 2.14e-04 +2022-05-06 20:02:34,127 INFO [train.py:715] (7/8) Epoch 10, batch 15400, loss[loss=0.1226, simple_loss=0.2034, pruned_loss=0.02086, over 4813.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2112, pruned_loss=0.03233, over 971969.11 frames.], batch size: 12, lr: 2.14e-04 +2022-05-06 20:03:13,392 INFO [train.py:715] (7/8) Epoch 10, batch 15450, loss[loss=0.1338, simple_loss=0.216, pruned_loss=0.02583, over 4924.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2104, pruned_loss=0.03233, over 971475.63 frames.], batch size: 18, lr: 2.14e-04 +2022-05-06 20:03:53,465 INFO [train.py:715] (7/8) Epoch 10, batch 15500, loss[loss=0.1225, simple_loss=0.21, pruned_loss=0.01748, over 4812.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03214, over 972134.74 frames.], batch size: 26, lr: 2.14e-04 +2022-05-06 20:04:32,472 INFO [train.py:715] (7/8) Epoch 10, batch 15550, loss[loss=0.1817, simple_loss=0.2519, pruned_loss=0.05576, over 4788.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03208, over 971496.59 frames.], batch size: 17, lr: 2.14e-04 +2022-05-06 20:05:11,888 INFO [train.py:715] (7/8) Epoch 10, batch 15600, loss[loss=0.1495, simple_loss=0.226, pruned_loss=0.03653, over 4869.00 frames.], tot_loss[loss=0.1381, simple_loss=0.211, pruned_loss=0.03257, over 972306.61 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:05:50,247 INFO [train.py:715] (7/8) Epoch 10, batch 15650, loss[loss=0.1434, simple_loss=0.2161, pruned_loss=0.03538, over 4773.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03265, over 972143.89 frames.], batch size: 19, lr: 2.14e-04 +2022-05-06 20:06:28,933 INFO [train.py:715] (7/8) Epoch 10, batch 15700, loss[loss=0.1563, simple_loss=0.2207, pruned_loss=0.04589, over 4846.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2122, pruned_loss=0.03314, over 971223.38 frames.], batch size: 32, lr: 2.14e-04 +2022-05-06 20:07:08,407 INFO [train.py:715] (7/8) Epoch 10, batch 15750, loss[loss=0.1258, simple_loss=0.1956, pruned_loss=0.02801, over 4987.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2125, pruned_loss=0.03341, over 970989.32 frames.], batch size: 28, lr: 2.14e-04 +2022-05-06 20:07:46,972 INFO [train.py:715] (7/8) Epoch 10, batch 15800, loss[loss=0.1882, simple_loss=0.2593, pruned_loss=0.05858, over 4710.00 frames.], tot_loss[loss=0.14, simple_loss=0.2131, pruned_loss=0.0334, over 971430.11 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:08:26,771 INFO [train.py:715] (7/8) Epoch 10, batch 15850, loss[loss=0.1328, simple_loss=0.2106, pruned_loss=0.02747, over 4898.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2133, pruned_loss=0.03356, over 972230.37 frames.], batch size: 19, lr: 2.14e-04 +2022-05-06 20:09:05,640 INFO [train.py:715] (7/8) Epoch 10, batch 15900, loss[loss=0.1678, simple_loss=0.232, pruned_loss=0.05182, over 4863.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2126, pruned_loss=0.03306, over 972095.90 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:09:44,837 INFO [train.py:715] (7/8) Epoch 10, batch 15950, loss[loss=0.1242, simple_loss=0.1949, pruned_loss=0.02678, over 4790.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2128, pruned_loss=0.03284, over 971445.91 frames.], batch size: 24, lr: 2.14e-04 +2022-05-06 20:10:23,756 INFO [train.py:715] (7/8) Epoch 10, batch 16000, loss[loss=0.1504, simple_loss=0.2202, pruned_loss=0.04031, over 4823.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2133, pruned_loss=0.03328, over 971976.89 frames.], batch size: 27, lr: 2.14e-04 +2022-05-06 20:11:02,645 INFO [train.py:715] (7/8) Epoch 10, batch 16050, loss[loss=0.1184, simple_loss=0.1846, pruned_loss=0.02612, over 4993.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2128, pruned_loss=0.03306, over 972257.86 frames.], batch size: 14, lr: 2.14e-04 +2022-05-06 20:11:41,917 INFO [train.py:715] (7/8) Epoch 10, batch 16100, loss[loss=0.1252, simple_loss=0.1926, pruned_loss=0.02892, over 4863.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03327, over 971567.20 frames.], batch size: 20, lr: 2.14e-04 +2022-05-06 20:12:21,129 INFO [train.py:715] (7/8) Epoch 10, batch 16150, loss[loss=0.1227, simple_loss=0.1873, pruned_loss=0.02899, over 4634.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2115, pruned_loss=0.03298, over 971484.42 frames.], batch size: 13, lr: 2.14e-04 +2022-05-06 20:13:01,098 INFO [train.py:715] (7/8) Epoch 10, batch 16200, loss[loss=0.1684, simple_loss=0.2361, pruned_loss=0.05036, over 4944.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2119, pruned_loss=0.03339, over 971117.84 frames.], batch size: 29, lr: 2.14e-04 +2022-05-06 20:13:40,636 INFO [train.py:715] (7/8) Epoch 10, batch 16250, loss[loss=0.1336, simple_loss=0.1971, pruned_loss=0.03509, over 4862.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03332, over 971258.07 frames.], batch size: 32, lr: 2.14e-04 +2022-05-06 20:14:19,847 INFO [train.py:715] (7/8) Epoch 10, batch 16300, loss[loss=0.1526, simple_loss=0.2217, pruned_loss=0.04175, over 4915.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2128, pruned_loss=0.03348, over 971726.77 frames.], batch size: 18, lr: 2.14e-04 +2022-05-06 20:14:59,852 INFO [train.py:715] (7/8) Epoch 10, batch 16350, loss[loss=0.1341, simple_loss=0.2022, pruned_loss=0.03301, over 4948.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2126, pruned_loss=0.03332, over 971853.96 frames.], batch size: 21, lr: 2.14e-04 +2022-05-06 20:15:39,246 INFO [train.py:715] (7/8) Epoch 10, batch 16400, loss[loss=0.1668, simple_loss=0.2275, pruned_loss=0.05301, over 4830.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2127, pruned_loss=0.03309, over 971179.16 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:16:18,984 INFO [train.py:715] (7/8) Epoch 10, batch 16450, loss[loss=0.1254, simple_loss=0.1861, pruned_loss=0.03231, over 4852.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03306, over 971619.34 frames.], batch size: 30, lr: 2.14e-04 +2022-05-06 20:16:57,472 INFO [train.py:715] (7/8) Epoch 10, batch 16500, loss[loss=0.1459, simple_loss=0.2062, pruned_loss=0.04281, over 4758.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2119, pruned_loss=0.03325, over 971020.11 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:17:36,178 INFO [train.py:715] (7/8) Epoch 10, batch 16550, loss[loss=0.1508, simple_loss=0.2254, pruned_loss=0.03813, over 4866.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.03323, over 971672.17 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:18:15,835 INFO [train.py:715] (7/8) Epoch 10, batch 16600, loss[loss=0.1436, simple_loss=0.2163, pruned_loss=0.03551, over 4796.00 frames.], tot_loss[loss=0.1394, simple_loss=0.212, pruned_loss=0.03344, over 971818.69 frames.], batch size: 17, lr: 2.14e-04 +2022-05-06 20:18:54,013 INFO [train.py:715] (7/8) Epoch 10, batch 16650, loss[loss=0.1161, simple_loss=0.194, pruned_loss=0.01908, over 4932.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2124, pruned_loss=0.03359, over 972428.78 frames.], batch size: 29, lr: 2.14e-04 +2022-05-06 20:19:33,370 INFO [train.py:715] (7/8) Epoch 10, batch 16700, loss[loss=0.1388, simple_loss=0.2081, pruned_loss=0.03471, over 4921.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.0335, over 972719.55 frames.], batch size: 18, lr: 2.14e-04 +2022-05-06 20:20:12,358 INFO [train.py:715] (7/8) Epoch 10, batch 16750, loss[loss=0.1615, simple_loss=0.2317, pruned_loss=0.04561, over 4811.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.0331, over 973078.45 frames.], batch size: 21, lr: 2.14e-04 +2022-05-06 20:20:52,512 INFO [train.py:715] (7/8) Epoch 10, batch 16800, loss[loss=0.1374, simple_loss=0.2092, pruned_loss=0.03277, over 4754.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.03358, over 973014.68 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:21:31,831 INFO [train.py:715] (7/8) Epoch 10, batch 16850, loss[loss=0.1583, simple_loss=0.2291, pruned_loss=0.04375, over 4869.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2132, pruned_loss=0.03383, over 971948.08 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:22:11,635 INFO [train.py:715] (7/8) Epoch 10, batch 16900, loss[loss=0.1364, simple_loss=0.2144, pruned_loss=0.02921, over 4792.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.03349, over 971598.29 frames.], batch size: 24, lr: 2.14e-04 +2022-05-06 20:22:51,675 INFO [train.py:715] (7/8) Epoch 10, batch 16950, loss[loss=0.1516, simple_loss=0.2327, pruned_loss=0.03528, over 4863.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2123, pruned_loss=0.03341, over 971840.91 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:23:29,924 INFO [train.py:715] (7/8) Epoch 10, batch 17000, loss[loss=0.1416, simple_loss=0.213, pruned_loss=0.03512, over 4823.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2118, pruned_loss=0.03316, over 971993.83 frames.], batch size: 25, lr: 2.14e-04 +2022-05-06 20:24:09,511 INFO [train.py:715] (7/8) Epoch 10, batch 17050, loss[loss=0.1485, simple_loss=0.2212, pruned_loss=0.03788, over 4799.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2128, pruned_loss=0.03305, over 972978.99 frames.], batch size: 17, lr: 2.14e-04 +2022-05-06 20:24:48,211 INFO [train.py:715] (7/8) Epoch 10, batch 17100, loss[loss=0.1431, simple_loss=0.2168, pruned_loss=0.03464, over 4826.00 frames.], tot_loss[loss=0.1386, simple_loss=0.212, pruned_loss=0.0326, over 972299.62 frames.], batch size: 13, lr: 2.14e-04 +2022-05-06 20:25:27,437 INFO [train.py:715] (7/8) Epoch 10, batch 17150, loss[loss=0.1126, simple_loss=0.19, pruned_loss=0.01762, over 4979.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.03274, over 971887.43 frames.], batch size: 14, lr: 2.14e-04 +2022-05-06 20:26:07,397 INFO [train.py:715] (7/8) Epoch 10, batch 17200, loss[loss=0.1729, simple_loss=0.2516, pruned_loss=0.04709, over 4748.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03297, over 971430.27 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:26:47,006 INFO [train.py:715] (7/8) Epoch 10, batch 17250, loss[loss=0.1539, simple_loss=0.2197, pruned_loss=0.04404, over 4975.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2129, pruned_loss=0.03315, over 972187.09 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:27:26,663 INFO [train.py:715] (7/8) Epoch 10, batch 17300, loss[loss=0.1144, simple_loss=0.1892, pruned_loss=0.01976, over 4813.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2136, pruned_loss=0.03331, over 971618.83 frames.], batch size: 25, lr: 2.14e-04 +2022-05-06 20:28:05,428 INFO [train.py:715] (7/8) Epoch 10, batch 17350, loss[loss=0.1298, simple_loss=0.205, pruned_loss=0.02727, over 4951.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2147, pruned_loss=0.03385, over 970970.27 frames.], batch size: 21, lr: 2.14e-04 +2022-05-06 20:28:44,830 INFO [train.py:715] (7/8) Epoch 10, batch 17400, loss[loss=0.1298, simple_loss=0.2002, pruned_loss=0.02969, over 4924.00 frames.], tot_loss[loss=0.1405, simple_loss=0.214, pruned_loss=0.03353, over 971698.90 frames.], batch size: 23, lr: 2.14e-04 +2022-05-06 20:29:24,007 INFO [train.py:715] (7/8) Epoch 10, batch 17450, loss[loss=0.13, simple_loss=0.2026, pruned_loss=0.02864, over 4971.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2134, pruned_loss=0.03364, over 971720.96 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:30:02,982 INFO [train.py:715] (7/8) Epoch 10, batch 17500, loss[loss=0.15, simple_loss=0.2211, pruned_loss=0.03942, over 4863.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2136, pruned_loss=0.03377, over 971110.54 frames.], batch size: 30, lr: 2.14e-04 +2022-05-06 20:30:42,977 INFO [train.py:715] (7/8) Epoch 10, batch 17550, loss[loss=0.1293, simple_loss=0.2076, pruned_loss=0.02554, over 4973.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2129, pruned_loss=0.03377, over 971460.43 frames.], batch size: 25, lr: 2.14e-04 +2022-05-06 20:31:21,948 INFO [train.py:715] (7/8) Epoch 10, batch 17600, loss[loss=0.1392, simple_loss=0.2115, pruned_loss=0.03342, over 4916.00 frames.], tot_loss[loss=0.14, simple_loss=0.2124, pruned_loss=0.03385, over 971907.97 frames.], batch size: 23, lr: 2.14e-04 +2022-05-06 20:32:01,507 INFO [train.py:715] (7/8) Epoch 10, batch 17650, loss[loss=0.1388, simple_loss=0.2098, pruned_loss=0.03385, over 4984.00 frames.], tot_loss[loss=0.139, simple_loss=0.2115, pruned_loss=0.03326, over 971914.37 frames.], batch size: 39, lr: 2.14e-04 +2022-05-06 20:32:40,270 INFO [train.py:715] (7/8) Epoch 10, batch 17700, loss[loss=0.14, simple_loss=0.1989, pruned_loss=0.04052, over 4967.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2118, pruned_loss=0.03367, over 972014.26 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:33:20,045 INFO [train.py:715] (7/8) Epoch 10, batch 17750, loss[loss=0.1275, simple_loss=0.2062, pruned_loss=0.02443, over 4792.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2118, pruned_loss=0.03363, over 972039.03 frames.], batch size: 24, lr: 2.14e-04 +2022-05-06 20:33:59,770 INFO [train.py:715] (7/8) Epoch 10, batch 17800, loss[loss=0.1223, simple_loss=0.1976, pruned_loss=0.02346, over 4835.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2129, pruned_loss=0.03422, over 971473.47 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:34:38,718 INFO [train.py:715] (7/8) Epoch 10, batch 17850, loss[loss=0.1314, simple_loss=0.2132, pruned_loss=0.02482, over 4749.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2128, pruned_loss=0.03368, over 971337.37 frames.], batch size: 19, lr: 2.14e-04 +2022-05-06 20:35:18,472 INFO [train.py:715] (7/8) Epoch 10, batch 17900, loss[loss=0.1285, simple_loss=0.1928, pruned_loss=0.03207, over 4831.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2126, pruned_loss=0.03336, over 971546.08 frames.], batch size: 13, lr: 2.14e-04 +2022-05-06 20:35:57,406 INFO [train.py:715] (7/8) Epoch 10, batch 17950, loss[loss=0.1595, simple_loss=0.2378, pruned_loss=0.04057, over 4954.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2127, pruned_loss=0.03323, over 972307.86 frames.], batch size: 39, lr: 2.14e-04 +2022-05-06 20:36:36,024 INFO [train.py:715] (7/8) Epoch 10, batch 18000, loss[loss=0.1189, simple_loss=0.1931, pruned_loss=0.02229, over 4895.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.03374, over 972465.96 frames.], batch size: 17, lr: 2.14e-04 +2022-05-06 20:36:36,025 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 20:36:45,529 INFO [train.py:742] (7/8) Epoch 10, validation: loss=0.1064, simple_loss=0.1906, pruned_loss=0.01104, over 914524.00 frames. +2022-05-06 20:37:24,884 INFO [train.py:715] (7/8) Epoch 10, batch 18050, loss[loss=0.1298, simple_loss=0.2061, pruned_loss=0.02675, over 4803.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2133, pruned_loss=0.03354, over 973378.36 frames.], batch size: 21, lr: 2.14e-04 +2022-05-06 20:38:03,976 INFO [train.py:715] (7/8) Epoch 10, batch 18100, loss[loss=0.1388, simple_loss=0.2175, pruned_loss=0.03004, over 4944.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2134, pruned_loss=0.03343, over 973518.11 frames.], batch size: 21, lr: 2.14e-04 +2022-05-06 20:38:43,264 INFO [train.py:715] (7/8) Epoch 10, batch 18150, loss[loss=0.1286, simple_loss=0.204, pruned_loss=0.02659, over 4800.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2135, pruned_loss=0.03354, over 972287.28 frames.], batch size: 24, lr: 2.14e-04 +2022-05-06 20:39:21,940 INFO [train.py:715] (7/8) Epoch 10, batch 18200, loss[loss=0.1353, simple_loss=0.2105, pruned_loss=0.03008, over 4991.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03368, over 972143.50 frames.], batch size: 14, lr: 2.14e-04 +2022-05-06 20:40:00,618 INFO [train.py:715] (7/8) Epoch 10, batch 18250, loss[loss=0.1198, simple_loss=0.1935, pruned_loss=0.02304, over 4908.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2133, pruned_loss=0.03382, over 971653.41 frames.], batch size: 19, lr: 2.14e-04 +2022-05-06 20:40:40,111 INFO [train.py:715] (7/8) Epoch 10, batch 18300, loss[loss=0.1763, simple_loss=0.2472, pruned_loss=0.0527, over 4852.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2141, pruned_loss=0.03438, over 971029.38 frames.], batch size: 15, lr: 2.14e-04 +2022-05-06 20:41:19,475 INFO [train.py:715] (7/8) Epoch 10, batch 18350, loss[loss=0.1573, simple_loss=0.234, pruned_loss=0.04029, over 4742.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2132, pruned_loss=0.03413, over 971610.98 frames.], batch size: 16, lr: 2.14e-04 +2022-05-06 20:41:57,961 INFO [train.py:715] (7/8) Epoch 10, batch 18400, loss[loss=0.1375, simple_loss=0.2016, pruned_loss=0.03665, over 4938.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03376, over 971434.43 frames.], batch size: 23, lr: 2.14e-04 +2022-05-06 20:42:37,151 INFO [train.py:715] (7/8) Epoch 10, batch 18450, loss[loss=0.1475, simple_loss=0.2216, pruned_loss=0.03671, over 4869.00 frames.], tot_loss[loss=0.1402, simple_loss=0.213, pruned_loss=0.03365, over 971188.04 frames.], batch size: 20, lr: 2.14e-04 +2022-05-06 20:43:16,007 INFO [train.py:715] (7/8) Epoch 10, batch 18500, loss[loss=0.1345, simple_loss=0.2092, pruned_loss=0.02991, over 4973.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.0332, over 971712.43 frames.], batch size: 24, lr: 2.14e-04 +2022-05-06 20:43:55,529 INFO [train.py:715] (7/8) Epoch 10, batch 18550, loss[loss=0.1609, simple_loss=0.2261, pruned_loss=0.0479, over 4866.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03329, over 971573.21 frames.], batch size: 32, lr: 2.13e-04 +2022-05-06 20:44:33,842 INFO [train.py:715] (7/8) Epoch 10, batch 18600, loss[loss=0.1247, simple_loss=0.2038, pruned_loss=0.02285, over 4961.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2122, pruned_loss=0.03351, over 971385.78 frames.], batch size: 24, lr: 2.13e-04 +2022-05-06 20:45:13,255 INFO [train.py:715] (7/8) Epoch 10, batch 18650, loss[loss=0.1154, simple_loss=0.1809, pruned_loss=0.02492, over 4967.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2112, pruned_loss=0.0331, over 970766.85 frames.], batch size: 35, lr: 2.13e-04 +2022-05-06 20:45:52,995 INFO [train.py:715] (7/8) Epoch 10, batch 18700, loss[loss=0.1296, simple_loss=0.2064, pruned_loss=0.02638, over 4780.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2119, pruned_loss=0.03338, over 970854.53 frames.], batch size: 12, lr: 2.13e-04 +2022-05-06 20:46:31,255 INFO [train.py:715] (7/8) Epoch 10, batch 18750, loss[loss=0.1123, simple_loss=0.194, pruned_loss=0.01535, over 4835.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2125, pruned_loss=0.03365, over 970936.63 frames.], batch size: 27, lr: 2.13e-04 +2022-05-06 20:47:10,635 INFO [train.py:715] (7/8) Epoch 10, batch 18800, loss[loss=0.1357, simple_loss=0.2134, pruned_loss=0.02897, over 4941.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2131, pruned_loss=0.03421, over 971507.73 frames.], batch size: 21, lr: 2.13e-04 +2022-05-06 20:47:50,112 INFO [train.py:715] (7/8) Epoch 10, batch 18850, loss[loss=0.134, simple_loss=0.2204, pruned_loss=0.02386, over 4805.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2132, pruned_loss=0.03373, over 972749.02 frames.], batch size: 25, lr: 2.13e-04 +2022-05-06 20:48:29,014 INFO [train.py:715] (7/8) Epoch 10, batch 18900, loss[loss=0.1346, simple_loss=0.203, pruned_loss=0.03307, over 4838.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2127, pruned_loss=0.0338, over 972424.74 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 20:49:08,064 INFO [train.py:715] (7/8) Epoch 10, batch 18950, loss[loss=0.1113, simple_loss=0.1856, pruned_loss=0.01855, over 4740.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2124, pruned_loss=0.03352, over 972168.06 frames.], batch size: 12, lr: 2.13e-04 +2022-05-06 20:49:48,336 INFO [train.py:715] (7/8) Epoch 10, batch 19000, loss[loss=0.1293, simple_loss=0.2001, pruned_loss=0.0293, over 4856.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.03336, over 972045.03 frames.], batch size: 20, lr: 2.13e-04 +2022-05-06 20:50:27,642 INFO [train.py:715] (7/8) Epoch 10, batch 19050, loss[loss=0.1394, simple_loss=0.2148, pruned_loss=0.032, over 4942.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2125, pruned_loss=0.03346, over 971812.53 frames.], batch size: 29, lr: 2.13e-04 +2022-05-06 20:51:06,453 INFO [train.py:715] (7/8) Epoch 10, batch 19100, loss[loss=0.1716, simple_loss=0.2513, pruned_loss=0.04589, over 4813.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.03335, over 972319.60 frames.], batch size: 26, lr: 2.13e-04 +2022-05-06 20:51:46,327 INFO [train.py:715] (7/8) Epoch 10, batch 19150, loss[loss=0.1569, simple_loss=0.2216, pruned_loss=0.04611, over 4899.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2122, pruned_loss=0.03321, over 972884.26 frames.], batch size: 19, lr: 2.13e-04 +2022-05-06 20:52:26,496 INFO [train.py:715] (7/8) Epoch 10, batch 19200, loss[loss=0.1786, simple_loss=0.2529, pruned_loss=0.05212, over 4939.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03308, over 973578.02 frames.], batch size: 21, lr: 2.13e-04 +2022-05-06 20:53:06,172 INFO [train.py:715] (7/8) Epoch 10, batch 19250, loss[loss=0.1535, simple_loss=0.2212, pruned_loss=0.04292, over 4853.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2122, pruned_loss=0.03354, over 974173.19 frames.], batch size: 30, lr: 2.13e-04 +2022-05-06 20:53:46,069 INFO [train.py:715] (7/8) Epoch 10, batch 19300, loss[loss=0.1108, simple_loss=0.1847, pruned_loss=0.01842, over 4772.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03332, over 973876.32 frames.], batch size: 19, lr: 2.13e-04 +2022-05-06 20:54:26,473 INFO [train.py:715] (7/8) Epoch 10, batch 19350, loss[loss=0.1455, simple_loss=0.2159, pruned_loss=0.03758, over 4818.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2118, pruned_loss=0.03325, over 973549.57 frames.], batch size: 27, lr: 2.13e-04 +2022-05-06 20:55:06,651 INFO [train.py:715] (7/8) Epoch 10, batch 19400, loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03288, over 4852.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03313, over 972931.96 frames.], batch size: 32, lr: 2.13e-04 +2022-05-06 20:55:45,795 INFO [train.py:715] (7/8) Epoch 10, batch 19450, loss[loss=0.1353, simple_loss=0.2163, pruned_loss=0.02709, over 4913.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2126, pruned_loss=0.03302, over 972463.16 frames.], batch size: 23, lr: 2.13e-04 +2022-05-06 20:56:25,410 INFO [train.py:715] (7/8) Epoch 10, batch 19500, loss[loss=0.1398, simple_loss=0.2094, pruned_loss=0.03513, over 4845.00 frames.], tot_loss[loss=0.14, simple_loss=0.213, pruned_loss=0.03345, over 973336.58 frames.], batch size: 12, lr: 2.13e-04 +2022-05-06 20:57:04,609 INFO [train.py:715] (7/8) Epoch 10, batch 19550, loss[loss=0.1582, simple_loss=0.2289, pruned_loss=0.0438, over 4938.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03362, over 973090.12 frames.], batch size: 23, lr: 2.13e-04 +2022-05-06 20:57:43,332 INFO [train.py:715] (7/8) Epoch 10, batch 19600, loss[loss=0.121, simple_loss=0.2039, pruned_loss=0.01905, over 4778.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03357, over 972465.59 frames.], batch size: 18, lr: 2.13e-04 +2022-05-06 20:58:22,309 INFO [train.py:715] (7/8) Epoch 10, batch 19650, loss[loss=0.1182, simple_loss=0.1985, pruned_loss=0.01897, over 4903.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.03354, over 972455.54 frames.], batch size: 18, lr: 2.13e-04 +2022-05-06 20:59:01,943 INFO [train.py:715] (7/8) Epoch 10, batch 19700, loss[loss=0.1546, simple_loss=0.2395, pruned_loss=0.03483, over 4981.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.0333, over 972683.35 frames.], batch size: 39, lr: 2.13e-04 +2022-05-06 20:59:41,299 INFO [train.py:715] (7/8) Epoch 10, batch 19750, loss[loss=0.1122, simple_loss=0.1892, pruned_loss=0.0176, over 4840.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2123, pruned_loss=0.03354, over 971615.76 frames.], batch size: 13, lr: 2.13e-04 +2022-05-06 21:00:19,605 INFO [train.py:715] (7/8) Epoch 10, batch 19800, loss[loss=0.1514, simple_loss=0.2179, pruned_loss=0.04238, over 4778.00 frames.], tot_loss[loss=0.14, simple_loss=0.2126, pruned_loss=0.03365, over 971535.88 frames.], batch size: 14, lr: 2.13e-04 +2022-05-06 21:00:59,243 INFO [train.py:715] (7/8) Epoch 10, batch 19850, loss[loss=0.1156, simple_loss=0.1974, pruned_loss=0.01688, over 4639.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03339, over 971088.89 frames.], batch size: 13, lr: 2.13e-04 +2022-05-06 21:01:38,760 INFO [train.py:715] (7/8) Epoch 10, batch 19900, loss[loss=0.136, simple_loss=0.2068, pruned_loss=0.03253, over 4824.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2124, pruned_loss=0.03358, over 971495.91 frames.], batch size: 27, lr: 2.13e-04 +2022-05-06 21:02:19,875 INFO [train.py:715] (7/8) Epoch 10, batch 19950, loss[loss=0.1125, simple_loss=0.191, pruned_loss=0.01702, over 4931.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2118, pruned_loss=0.0333, over 972273.93 frames.], batch size: 29, lr: 2.13e-04 +2022-05-06 21:02:58,933 INFO [train.py:715] (7/8) Epoch 10, batch 20000, loss[loss=0.1367, simple_loss=0.2064, pruned_loss=0.03352, over 4978.00 frames.], tot_loss[loss=0.1395, simple_loss=0.212, pruned_loss=0.03348, over 971738.37 frames.], batch size: 28, lr: 2.13e-04 +2022-05-06 21:03:37,948 INFO [train.py:715] (7/8) Epoch 10, batch 20050, loss[loss=0.155, simple_loss=0.2224, pruned_loss=0.04376, over 4835.00 frames.], tot_loss[loss=0.139, simple_loss=0.2117, pruned_loss=0.03311, over 972322.31 frames.], batch size: 30, lr: 2.13e-04 +2022-05-06 21:04:17,429 INFO [train.py:715] (7/8) Epoch 10, batch 20100, loss[loss=0.1134, simple_loss=0.1895, pruned_loss=0.01865, over 4800.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2112, pruned_loss=0.03272, over 972307.33 frames.], batch size: 24, lr: 2.13e-04 +2022-05-06 21:04:55,530 INFO [train.py:715] (7/8) Epoch 10, batch 20150, loss[loss=0.1188, simple_loss=0.1987, pruned_loss=0.01944, over 4932.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2108, pruned_loss=0.03235, over 972387.41 frames.], batch size: 29, lr: 2.13e-04 +2022-05-06 21:05:34,943 INFO [train.py:715] (7/8) Epoch 10, batch 20200, loss[loss=0.1236, simple_loss=0.1921, pruned_loss=0.02756, over 4822.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2115, pruned_loss=0.03296, over 971619.96 frames.], batch size: 13, lr: 2.13e-04 +2022-05-06 21:06:13,960 INFO [train.py:715] (7/8) Epoch 10, batch 20250, loss[loss=0.1548, simple_loss=0.2327, pruned_loss=0.03842, over 4707.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03288, over 972082.76 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 21:06:52,619 INFO [train.py:715] (7/8) Epoch 10, batch 20300, loss[loss=0.1843, simple_loss=0.2434, pruned_loss=0.06264, over 4778.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.03322, over 971241.57 frames.], batch size: 17, lr: 2.13e-04 +2022-05-06 21:07:31,404 INFO [train.py:715] (7/8) Epoch 10, batch 20350, loss[loss=0.1127, simple_loss=0.1886, pruned_loss=0.01841, over 4990.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2122, pruned_loss=0.03318, over 971513.06 frames.], batch size: 25, lr: 2.13e-04 +2022-05-06 21:08:10,511 INFO [train.py:715] (7/8) Epoch 10, batch 20400, loss[loss=0.16, simple_loss=0.2412, pruned_loss=0.03938, over 4967.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2128, pruned_loss=0.03312, over 971649.84 frames.], batch size: 39, lr: 2.13e-04 +2022-05-06 21:08:49,428 INFO [train.py:715] (7/8) Epoch 10, batch 20450, loss[loss=0.1238, simple_loss=0.1965, pruned_loss=0.02554, over 4908.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2138, pruned_loss=0.03359, over 972344.02 frames.], batch size: 18, lr: 2.13e-04 +2022-05-06 21:09:27,881 INFO [train.py:715] (7/8) Epoch 10, batch 20500, loss[loss=0.139, simple_loss=0.218, pruned_loss=0.02997, over 4916.00 frames.], tot_loss[loss=0.1411, simple_loss=0.214, pruned_loss=0.03414, over 971622.17 frames.], batch size: 39, lr: 2.13e-04 +2022-05-06 21:10:06,952 INFO [train.py:715] (7/8) Epoch 10, batch 20550, loss[loss=0.1279, simple_loss=0.1973, pruned_loss=0.02924, over 4943.00 frames.], tot_loss[loss=0.1412, simple_loss=0.2137, pruned_loss=0.03431, over 972313.98 frames.], batch size: 21, lr: 2.13e-04 +2022-05-06 21:10:46,034 INFO [train.py:715] (7/8) Epoch 10, batch 20600, loss[loss=0.1598, simple_loss=0.2308, pruned_loss=0.04442, over 4903.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.03429, over 972590.14 frames.], batch size: 17, lr: 2.13e-04 +2022-05-06 21:11:25,464 INFO [train.py:715] (7/8) Epoch 10, batch 20650, loss[loss=0.1381, simple_loss=0.2184, pruned_loss=0.02884, over 4935.00 frames.], tot_loss[loss=0.1415, simple_loss=0.2144, pruned_loss=0.03425, over 973049.45 frames.], batch size: 21, lr: 2.13e-04 +2022-05-06 21:12:04,258 INFO [train.py:715] (7/8) Epoch 10, batch 20700, loss[loss=0.1154, simple_loss=0.1873, pruned_loss=0.02177, over 4781.00 frames.], tot_loss[loss=0.141, simple_loss=0.214, pruned_loss=0.03404, over 972508.98 frames.], batch size: 12, lr: 2.13e-04 +2022-05-06 21:12:44,600 INFO [train.py:715] (7/8) Epoch 10, batch 20750, loss[loss=0.1152, simple_loss=0.1926, pruned_loss=0.01887, over 4942.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2135, pruned_loss=0.03375, over 972606.55 frames.], batch size: 21, lr: 2.13e-04 +2022-05-06 21:13:24,577 INFO [train.py:715] (7/8) Epoch 10, batch 20800, loss[loss=0.127, simple_loss=0.1975, pruned_loss=0.02829, over 4981.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2131, pruned_loss=0.03353, over 971994.93 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 21:14:03,351 INFO [train.py:715] (7/8) Epoch 10, batch 20850, loss[loss=0.1145, simple_loss=0.187, pruned_loss=0.02098, over 4919.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03298, over 971833.19 frames.], batch size: 23, lr: 2.13e-04 +2022-05-06 21:14:43,295 INFO [train.py:715] (7/8) Epoch 10, batch 20900, loss[loss=0.1139, simple_loss=0.1921, pruned_loss=0.0179, over 4706.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03292, over 972024.69 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 21:15:23,755 INFO [train.py:715] (7/8) Epoch 10, batch 20950, loss[loss=0.1435, simple_loss=0.21, pruned_loss=0.03854, over 4874.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2133, pruned_loss=0.03356, over 972722.16 frames.], batch size: 16, lr: 2.13e-04 +2022-05-06 21:16:02,703 INFO [train.py:715] (7/8) Epoch 10, batch 21000, loss[loss=0.1685, simple_loss=0.2405, pruned_loss=0.04826, over 4929.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2133, pruned_loss=0.03385, over 973485.41 frames.], batch size: 39, lr: 2.13e-04 +2022-05-06 21:16:02,704 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 21:16:12,204 INFO [train.py:742] (7/8) Epoch 10, validation: loss=0.1065, simple_loss=0.1909, pruned_loss=0.01111, over 914524.00 frames. +2022-05-06 21:16:51,726 INFO [train.py:715] (7/8) Epoch 10, batch 21050, loss[loss=0.111, simple_loss=0.1881, pruned_loss=0.01695, over 4832.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2134, pruned_loss=0.03348, over 973086.53 frames.], batch size: 26, lr: 2.13e-04 +2022-05-06 21:17:32,534 INFO [train.py:715] (7/8) Epoch 10, batch 21100, loss[loss=0.1515, simple_loss=0.2238, pruned_loss=0.03966, over 4839.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2135, pruned_loss=0.03362, over 972088.00 frames.], batch size: 20, lr: 2.13e-04 +2022-05-06 21:18:14,011 INFO [train.py:715] (7/8) Epoch 10, batch 21150, loss[loss=0.1419, simple_loss=0.2197, pruned_loss=0.03204, over 4793.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2133, pruned_loss=0.03367, over 973131.07 frames.], batch size: 14, lr: 2.13e-04 +2022-05-06 21:18:55,100 INFO [train.py:715] (7/8) Epoch 10, batch 21200, loss[loss=0.1632, simple_loss=0.2324, pruned_loss=0.04695, over 4740.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2137, pruned_loss=0.03372, over 972568.85 frames.], batch size: 16, lr: 2.13e-04 +2022-05-06 21:19:35,765 INFO [train.py:715] (7/8) Epoch 10, batch 21250, loss[loss=0.1746, simple_loss=0.264, pruned_loss=0.0426, over 4969.00 frames.], tot_loss[loss=0.14, simple_loss=0.2135, pruned_loss=0.03325, over 973045.04 frames.], batch size: 28, lr: 2.13e-04 +2022-05-06 21:20:17,431 INFO [train.py:715] (7/8) Epoch 10, batch 21300, loss[loss=0.148, simple_loss=0.2156, pruned_loss=0.04021, over 4897.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.03333, over 972968.84 frames.], batch size: 17, lr: 2.13e-04 +2022-05-06 21:20:58,697 INFO [train.py:715] (7/8) Epoch 10, batch 21350, loss[loss=0.1331, simple_loss=0.2121, pruned_loss=0.02705, over 4873.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2135, pruned_loss=0.0336, over 972770.31 frames.], batch size: 32, lr: 2.13e-04 +2022-05-06 21:21:39,104 INFO [train.py:715] (7/8) Epoch 10, batch 21400, loss[loss=0.1269, simple_loss=0.2031, pruned_loss=0.0253, over 4822.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2135, pruned_loss=0.0334, over 972386.19 frames.], batch size: 12, lr: 2.13e-04 +2022-05-06 21:22:20,537 INFO [train.py:715] (7/8) Epoch 10, batch 21450, loss[loss=0.1243, simple_loss=0.195, pruned_loss=0.02674, over 4787.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2137, pruned_loss=0.03333, over 973065.43 frames.], batch size: 14, lr: 2.13e-04 +2022-05-06 21:23:02,356 INFO [train.py:715] (7/8) Epoch 10, batch 21500, loss[loss=0.1503, simple_loss=0.2211, pruned_loss=0.03974, over 4843.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2138, pruned_loss=0.03344, over 972695.33 frames.], batch size: 30, lr: 2.13e-04 +2022-05-06 21:23:43,375 INFO [train.py:715] (7/8) Epoch 10, batch 21550, loss[loss=0.1212, simple_loss=0.1972, pruned_loss=0.02255, over 4889.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2135, pruned_loss=0.03356, over 972618.80 frames.], batch size: 19, lr: 2.13e-04 +2022-05-06 21:24:24,261 INFO [train.py:715] (7/8) Epoch 10, batch 21600, loss[loss=0.1266, simple_loss=0.2081, pruned_loss=0.02253, over 4933.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2139, pruned_loss=0.03386, over 972536.70 frames.], batch size: 29, lr: 2.13e-04 +2022-05-06 21:25:06,212 INFO [train.py:715] (7/8) Epoch 10, batch 21650, loss[loss=0.1554, simple_loss=0.2217, pruned_loss=0.04459, over 4854.00 frames.], tot_loss[loss=0.1408, simple_loss=0.214, pruned_loss=0.0338, over 972091.06 frames.], batch size: 15, lr: 2.13e-04 +2022-05-06 21:25:47,751 INFO [train.py:715] (7/8) Epoch 10, batch 21700, loss[loss=0.1433, simple_loss=0.2255, pruned_loss=0.0305, over 4927.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2141, pruned_loss=0.03369, over 972950.18 frames.], batch size: 29, lr: 2.13e-04 +2022-05-06 21:26:28,011 INFO [train.py:715] (7/8) Epoch 10, batch 21750, loss[loss=0.1652, simple_loss=0.235, pruned_loss=0.04772, over 4877.00 frames.], tot_loss[loss=0.1413, simple_loss=0.2142, pruned_loss=0.03418, over 972801.98 frames.], batch size: 16, lr: 2.13e-04 +2022-05-06 21:27:08,996 INFO [train.py:715] (7/8) Epoch 10, batch 21800, loss[loss=0.1935, simple_loss=0.2479, pruned_loss=0.06959, over 4982.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03412, over 973036.17 frames.], batch size: 39, lr: 2.13e-04 +2022-05-06 21:27:50,697 INFO [train.py:715] (7/8) Epoch 10, batch 21850, loss[loss=0.1677, simple_loss=0.2317, pruned_loss=0.05182, over 4916.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2134, pruned_loss=0.03398, over 973739.18 frames.], batch size: 18, lr: 2.13e-04 +2022-05-06 21:28:31,165 INFO [train.py:715] (7/8) Epoch 10, batch 21900, loss[loss=0.1197, simple_loss=0.1914, pruned_loss=0.02399, over 4975.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03335, over 972889.83 frames.], batch size: 14, lr: 2.13e-04 +2022-05-06 21:29:11,915 INFO [train.py:715] (7/8) Epoch 10, batch 21950, loss[loss=0.1499, simple_loss=0.2187, pruned_loss=0.04058, over 4912.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.0333, over 972205.77 frames.], batch size: 23, lr: 2.13e-04 +2022-05-06 21:29:53,132 INFO [train.py:715] (7/8) Epoch 10, batch 22000, loss[loss=0.1064, simple_loss=0.1832, pruned_loss=0.01479, over 4748.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03297, over 972100.00 frames.], batch size: 12, lr: 2.12e-04 +2022-05-06 21:30:33,465 INFO [train.py:715] (7/8) Epoch 10, batch 22050, loss[loss=0.1513, simple_loss=0.2176, pruned_loss=0.04252, over 4685.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03315, over 971758.23 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:31:14,081 INFO [train.py:715] (7/8) Epoch 10, batch 22100, loss[loss=0.1057, simple_loss=0.1728, pruned_loss=0.01933, over 4953.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03325, over 972231.50 frames.], batch size: 14, lr: 2.12e-04 +2022-05-06 21:31:54,936 INFO [train.py:715] (7/8) Epoch 10, batch 22150, loss[loss=0.1758, simple_loss=0.2427, pruned_loss=0.05449, over 4819.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03318, over 972186.74 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:32:35,990 INFO [train.py:715] (7/8) Epoch 10, batch 22200, loss[loss=0.1366, simple_loss=0.2109, pruned_loss=0.03115, over 4838.00 frames.], tot_loss[loss=0.1392, simple_loss=0.212, pruned_loss=0.0332, over 971661.10 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:33:16,089 INFO [train.py:715] (7/8) Epoch 10, batch 22250, loss[loss=0.09987, simple_loss=0.1753, pruned_loss=0.01221, over 4926.00 frames.], tot_loss[loss=0.1393, simple_loss=0.212, pruned_loss=0.03329, over 971582.32 frames.], batch size: 23, lr: 2.12e-04 +2022-05-06 21:33:56,742 INFO [train.py:715] (7/8) Epoch 10, batch 22300, loss[loss=0.1188, simple_loss=0.1958, pruned_loss=0.02094, over 4808.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03296, over 972339.17 frames.], batch size: 26, lr: 2.12e-04 +2022-05-06 21:34:37,776 INFO [train.py:715] (7/8) Epoch 10, batch 22350, loss[loss=0.1186, simple_loss=0.1984, pruned_loss=0.01943, over 4898.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03297, over 971862.52 frames.], batch size: 16, lr: 2.12e-04 +2022-05-06 21:35:17,621 INFO [train.py:715] (7/8) Epoch 10, batch 22400, loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.02998, over 4874.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.0333, over 970835.82 frames.], batch size: 22, lr: 2.12e-04 +2022-05-06 21:35:56,790 INFO [train.py:715] (7/8) Epoch 10, batch 22450, loss[loss=0.1313, simple_loss=0.2052, pruned_loss=0.02872, over 4917.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2129, pruned_loss=0.03371, over 971000.29 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 21:36:36,730 INFO [train.py:715] (7/8) Epoch 10, batch 22500, loss[loss=0.1159, simple_loss=0.1934, pruned_loss=0.01918, over 4803.00 frames.], tot_loss[loss=0.141, simple_loss=0.2135, pruned_loss=0.03431, over 971235.36 frames.], batch size: 21, lr: 2.12e-04 +2022-05-06 21:37:17,614 INFO [train.py:715] (7/8) Epoch 10, batch 22550, loss[loss=0.1238, simple_loss=0.2048, pruned_loss=0.02144, over 4698.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2134, pruned_loss=0.03416, over 970407.93 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:37:56,429 INFO [train.py:715] (7/8) Epoch 10, batch 22600, loss[loss=0.1188, simple_loss=0.1905, pruned_loss=0.02351, over 4875.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2136, pruned_loss=0.03408, over 969972.12 frames.], batch size: 22, lr: 2.12e-04 +2022-05-06 21:38:37,514 INFO [train.py:715] (7/8) Epoch 10, batch 22650, loss[loss=0.1516, simple_loss=0.2202, pruned_loss=0.04151, over 4779.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2132, pruned_loss=0.03415, over 970732.48 frames.], batch size: 17, lr: 2.12e-04 +2022-05-06 21:39:19,369 INFO [train.py:715] (7/8) Epoch 10, batch 22700, loss[loss=0.1544, simple_loss=0.2239, pruned_loss=0.04248, over 4812.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2128, pruned_loss=0.03404, over 971707.13 frames.], batch size: 26, lr: 2.12e-04 +2022-05-06 21:40:00,104 INFO [train.py:715] (7/8) Epoch 10, batch 22750, loss[loss=0.1311, simple_loss=0.2072, pruned_loss=0.02751, over 4768.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03382, over 971792.98 frames.], batch size: 19, lr: 2.12e-04 +2022-05-06 21:40:41,327 INFO [train.py:715] (7/8) Epoch 10, batch 22800, loss[loss=0.1105, simple_loss=0.1934, pruned_loss=0.01378, over 4833.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.03367, over 971712.39 frames.], batch size: 13, lr: 2.12e-04 +2022-05-06 21:41:22,878 INFO [train.py:715] (7/8) Epoch 10, batch 22850, loss[loss=0.101, simple_loss=0.1795, pruned_loss=0.01128, over 4853.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03392, over 972271.81 frames.], batch size: 13, lr: 2.12e-04 +2022-05-06 21:42:04,582 INFO [train.py:715] (7/8) Epoch 10, batch 22900, loss[loss=0.1339, simple_loss=0.1988, pruned_loss=0.03447, over 4827.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2136, pruned_loss=0.03378, over 972430.58 frames.], batch size: 13, lr: 2.12e-04 +2022-05-06 21:42:45,056 INFO [train.py:715] (7/8) Epoch 10, batch 22950, loss[loss=0.1554, simple_loss=0.2259, pruned_loss=0.04246, over 4810.00 frames.], tot_loss[loss=0.1408, simple_loss=0.214, pruned_loss=0.03377, over 971698.65 frames.], batch size: 26, lr: 2.12e-04 +2022-05-06 21:43:27,072 INFO [train.py:715] (7/8) Epoch 10, batch 23000, loss[loss=0.1431, simple_loss=0.2029, pruned_loss=0.04162, over 4797.00 frames.], tot_loss[loss=0.1409, simple_loss=0.214, pruned_loss=0.03386, over 970949.80 frames.], batch size: 21, lr: 2.12e-04 +2022-05-06 21:44:09,141 INFO [train.py:715] (7/8) Epoch 10, batch 23050, loss[loss=0.1099, simple_loss=0.1887, pruned_loss=0.01548, over 4791.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2138, pruned_loss=0.03335, over 971291.30 frames.], batch size: 12, lr: 2.12e-04 +2022-05-06 21:44:49,656 INFO [train.py:715] (7/8) Epoch 10, batch 23100, loss[loss=0.1292, simple_loss=0.1934, pruned_loss=0.03245, over 4990.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2138, pruned_loss=0.03343, over 971626.81 frames.], batch size: 25, lr: 2.12e-04 +2022-05-06 21:45:30,864 INFO [train.py:715] (7/8) Epoch 10, batch 23150, loss[loss=0.1206, simple_loss=0.194, pruned_loss=0.02361, over 4823.00 frames.], tot_loss[loss=0.1405, simple_loss=0.214, pruned_loss=0.03347, over 971424.72 frames.], batch size: 25, lr: 2.12e-04 +2022-05-06 21:46:12,876 INFO [train.py:715] (7/8) Epoch 10, batch 23200, loss[loss=0.1312, simple_loss=0.2045, pruned_loss=0.02894, over 4881.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03323, over 971233.40 frames.], batch size: 16, lr: 2.12e-04 +2022-05-06 21:46:54,163 INFO [train.py:715] (7/8) Epoch 10, batch 23250, loss[loss=0.1427, simple_loss=0.2114, pruned_loss=0.03699, over 4975.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2137, pruned_loss=0.0335, over 971981.86 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:47:34,842 INFO [train.py:715] (7/8) Epoch 10, batch 23300, loss[loss=0.1351, simple_loss=0.2127, pruned_loss=0.02871, over 4807.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2141, pruned_loss=0.03363, over 971371.27 frames.], batch size: 26, lr: 2.12e-04 +2022-05-06 21:48:16,730 INFO [train.py:715] (7/8) Epoch 10, batch 23350, loss[loss=0.1373, simple_loss=0.2167, pruned_loss=0.029, over 4750.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2133, pruned_loss=0.0331, over 971446.73 frames.], batch size: 19, lr: 2.12e-04 +2022-05-06 21:48:58,865 INFO [train.py:715] (7/8) Epoch 10, batch 23400, loss[loss=0.1765, simple_loss=0.2443, pruned_loss=0.05432, over 4954.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2129, pruned_loss=0.03296, over 972023.63 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:49:39,778 INFO [train.py:715] (7/8) Epoch 10, batch 23450, loss[loss=0.1347, simple_loss=0.1975, pruned_loss=0.0359, over 4702.00 frames.], tot_loss[loss=0.1396, simple_loss=0.213, pruned_loss=0.03312, over 972675.76 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:50:20,140 INFO [train.py:715] (7/8) Epoch 10, batch 23500, loss[loss=0.1881, simple_loss=0.2421, pruned_loss=0.06703, over 4947.00 frames.], tot_loss[loss=0.1396, simple_loss=0.213, pruned_loss=0.03309, over 973265.58 frames.], batch size: 35, lr: 2.12e-04 +2022-05-06 21:51:02,205 INFO [train.py:715] (7/8) Epoch 10, batch 23550, loss[loss=0.148, simple_loss=0.2166, pruned_loss=0.03968, over 4987.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.03328, over 973042.66 frames.], batch size: 24, lr: 2.12e-04 +2022-05-06 21:51:43,362 INFO [train.py:715] (7/8) Epoch 10, batch 23600, loss[loss=0.129, simple_loss=0.2112, pruned_loss=0.02342, over 4980.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2126, pruned_loss=0.03314, over 972931.90 frames.], batch size: 24, lr: 2.12e-04 +2022-05-06 21:52:23,121 INFO [train.py:715] (7/8) Epoch 10, batch 23650, loss[loss=0.1573, simple_loss=0.2211, pruned_loss=0.04676, over 4911.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.03351, over 972557.67 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 21:53:03,643 INFO [train.py:715] (7/8) Epoch 10, batch 23700, loss[loss=0.1157, simple_loss=0.1846, pruned_loss=0.02339, over 4755.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03381, over 972797.55 frames.], batch size: 19, lr: 2.12e-04 +2022-05-06 21:53:44,221 INFO [train.py:715] (7/8) Epoch 10, batch 23750, loss[loss=0.144, simple_loss=0.2154, pruned_loss=0.03633, over 4805.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2124, pruned_loss=0.03354, over 972389.68 frames.], batch size: 21, lr: 2.12e-04 +2022-05-06 21:54:24,361 INFO [train.py:715] (7/8) Epoch 10, batch 23800, loss[loss=0.1235, simple_loss=0.2067, pruned_loss=0.0202, over 4894.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2132, pruned_loss=0.03377, over 972821.95 frames.], batch size: 19, lr: 2.12e-04 +2022-05-06 21:55:04,954 INFO [train.py:715] (7/8) Epoch 10, batch 23850, loss[loss=0.1216, simple_loss=0.207, pruned_loss=0.01815, over 4775.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2136, pruned_loss=0.03351, over 973188.70 frames.], batch size: 14, lr: 2.12e-04 +2022-05-06 21:55:46,220 INFO [train.py:715] (7/8) Epoch 10, batch 23900, loss[loss=0.1441, simple_loss=0.224, pruned_loss=0.03206, over 4923.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2131, pruned_loss=0.03326, over 973203.55 frames.], batch size: 23, lr: 2.12e-04 +2022-05-06 21:56:25,837 INFO [train.py:715] (7/8) Epoch 10, batch 23950, loss[loss=0.133, simple_loss=0.21, pruned_loss=0.02802, over 4690.00 frames.], tot_loss[loss=0.14, simple_loss=0.2131, pruned_loss=0.03343, over 973338.82 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:57:06,219 INFO [train.py:715] (7/8) Epoch 10, batch 24000, loss[loss=0.1432, simple_loss=0.2244, pruned_loss=0.03098, over 4816.00 frames.], tot_loss[loss=0.14, simple_loss=0.2134, pruned_loss=0.03335, over 972550.75 frames.], batch size: 27, lr: 2.12e-04 +2022-05-06 21:57:06,219 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 21:57:15,894 INFO [train.py:742] (7/8) Epoch 10, validation: loss=0.1061, simple_loss=0.1905, pruned_loss=0.01087, over 914524.00 frames. +2022-05-06 21:57:55,802 INFO [train.py:715] (7/8) Epoch 10, batch 24050, loss[loss=0.1489, simple_loss=0.2371, pruned_loss=0.03036, over 4690.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2131, pruned_loss=0.03384, over 972139.15 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:58:36,847 INFO [train.py:715] (7/8) Epoch 10, batch 24100, loss[loss=0.1219, simple_loss=0.1993, pruned_loss=0.02221, over 4931.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2133, pruned_loss=0.03417, over 972222.20 frames.], batch size: 17, lr: 2.12e-04 +2022-05-06 21:59:18,109 INFO [train.py:715] (7/8) Epoch 10, batch 24150, loss[loss=0.135, simple_loss=0.2145, pruned_loss=0.02774, over 4980.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2124, pruned_loss=0.03346, over 973399.05 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 21:59:57,436 INFO [train.py:715] (7/8) Epoch 10, batch 24200, loss[loss=0.1177, simple_loss=0.1832, pruned_loss=0.02612, over 4785.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.03352, over 972567.63 frames.], batch size: 14, lr: 2.12e-04 +2022-05-06 22:00:38,181 INFO [train.py:715] (7/8) Epoch 10, batch 24250, loss[loss=0.1387, simple_loss=0.204, pruned_loss=0.03665, over 4648.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2119, pruned_loss=0.0333, over 973172.07 frames.], batch size: 13, lr: 2.12e-04 +2022-05-06 22:01:19,296 INFO [train.py:715] (7/8) Epoch 10, batch 24300, loss[loss=0.1234, simple_loss=0.2015, pruned_loss=0.02268, over 4920.00 frames.], tot_loss[loss=0.139, simple_loss=0.2116, pruned_loss=0.0332, over 973007.64 frames.], batch size: 29, lr: 2.12e-04 +2022-05-06 22:01:59,415 INFO [train.py:715] (7/8) Epoch 10, batch 24350, loss[loss=0.1589, simple_loss=0.2292, pruned_loss=0.04434, over 4960.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2115, pruned_loss=0.03313, over 973278.80 frames.], batch size: 21, lr: 2.12e-04 +2022-05-06 22:02:39,459 INFO [train.py:715] (7/8) Epoch 10, batch 24400, loss[loss=0.1198, simple_loss=0.1844, pruned_loss=0.02762, over 4718.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2113, pruned_loss=0.03298, over 973699.42 frames.], batch size: 12, lr: 2.12e-04 +2022-05-06 22:03:20,178 INFO [train.py:715] (7/8) Epoch 10, batch 24450, loss[loss=0.1597, simple_loss=0.2313, pruned_loss=0.04404, over 4980.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2105, pruned_loss=0.0326, over 972427.84 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 22:04:01,139 INFO [train.py:715] (7/8) Epoch 10, batch 24500, loss[loss=0.1134, simple_loss=0.1899, pruned_loss=0.0184, over 4800.00 frames.], tot_loss[loss=0.1385, simple_loss=0.211, pruned_loss=0.033, over 972738.82 frames.], batch size: 21, lr: 2.12e-04 +2022-05-06 22:04:40,220 INFO [train.py:715] (7/8) Epoch 10, batch 24550, loss[loss=0.1845, simple_loss=0.2482, pruned_loss=0.06038, over 4767.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2111, pruned_loss=0.03315, over 974094.84 frames.], batch size: 14, lr: 2.12e-04 +2022-05-06 22:05:20,204 INFO [train.py:715] (7/8) Epoch 10, batch 24600, loss[loss=0.1663, simple_loss=0.2283, pruned_loss=0.05219, over 4793.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2112, pruned_loss=0.03281, over 973692.08 frames.], batch size: 14, lr: 2.12e-04 +2022-05-06 22:06:00,575 INFO [train.py:715] (7/8) Epoch 10, batch 24650, loss[loss=0.1288, simple_loss=0.1991, pruned_loss=0.02923, over 4710.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2109, pruned_loss=0.03297, over 973458.95 frames.], batch size: 15, lr: 2.12e-04 +2022-05-06 22:06:39,588 INFO [train.py:715] (7/8) Epoch 10, batch 24700, loss[loss=0.1478, simple_loss=0.2172, pruned_loss=0.03918, over 4871.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2114, pruned_loss=0.03323, over 973240.51 frames.], batch size: 16, lr: 2.12e-04 +2022-05-06 22:07:18,187 INFO [train.py:715] (7/8) Epoch 10, batch 24750, loss[loss=0.1268, simple_loss=0.2054, pruned_loss=0.02416, over 4812.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2119, pruned_loss=0.03328, over 972622.23 frames.], batch size: 25, lr: 2.12e-04 +2022-05-06 22:07:57,675 INFO [train.py:715] (7/8) Epoch 10, batch 24800, loss[loss=0.1503, simple_loss=0.2194, pruned_loss=0.04057, over 4928.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03258, over 972196.14 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 22:08:36,825 INFO [train.py:715] (7/8) Epoch 10, batch 24850, loss[loss=0.1482, simple_loss=0.2207, pruned_loss=0.03778, over 4917.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2112, pruned_loss=0.03261, over 972243.47 frames.], batch size: 23, lr: 2.12e-04 +2022-05-06 22:09:14,901 INFO [train.py:715] (7/8) Epoch 10, batch 24900, loss[loss=0.1312, simple_loss=0.1954, pruned_loss=0.03352, over 4878.00 frames.], tot_loss[loss=0.138, simple_loss=0.211, pruned_loss=0.03244, over 973171.56 frames.], batch size: 22, lr: 2.12e-04 +2022-05-06 22:09:54,526 INFO [train.py:715] (7/8) Epoch 10, batch 24950, loss[loss=0.149, simple_loss=0.2256, pruned_loss=0.03623, over 4778.00 frames.], tot_loss[loss=0.138, simple_loss=0.211, pruned_loss=0.03247, over 972839.28 frames.], batch size: 19, lr: 2.12e-04 +2022-05-06 22:10:34,376 INFO [train.py:715] (7/8) Epoch 10, batch 25000, loss[loss=0.1534, simple_loss=0.2322, pruned_loss=0.03725, over 4900.00 frames.], tot_loss[loss=0.138, simple_loss=0.2106, pruned_loss=0.03267, over 972668.75 frames.], batch size: 39, lr: 2.12e-04 +2022-05-06 22:11:13,228 INFO [train.py:715] (7/8) Epoch 10, batch 25050, loss[loss=0.1541, simple_loss=0.225, pruned_loss=0.04158, over 4918.00 frames.], tot_loss[loss=0.1392, simple_loss=0.212, pruned_loss=0.03317, over 973158.93 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 22:11:52,696 INFO [train.py:715] (7/8) Epoch 10, batch 25100, loss[loss=0.1272, simple_loss=0.1964, pruned_loss=0.02899, over 4879.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.0332, over 973421.97 frames.], batch size: 16, lr: 2.12e-04 +2022-05-06 22:12:32,720 INFO [train.py:715] (7/8) Epoch 10, batch 25150, loss[loss=0.1344, simple_loss=0.2128, pruned_loss=0.02802, over 4864.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2123, pruned_loss=0.03346, over 973518.60 frames.], batch size: 20, lr: 2.12e-04 +2022-05-06 22:13:12,210 INFO [train.py:715] (7/8) Epoch 10, batch 25200, loss[loss=0.1398, simple_loss=0.2107, pruned_loss=0.03444, over 4917.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2126, pruned_loss=0.03376, over 973151.54 frames.], batch size: 39, lr: 2.12e-04 +2022-05-06 22:13:50,343 INFO [train.py:715] (7/8) Epoch 10, batch 25250, loss[loss=0.1177, simple_loss=0.196, pruned_loss=0.0197, over 4821.00 frames.], tot_loss[loss=0.141, simple_loss=0.2136, pruned_loss=0.0342, over 973052.39 frames.], batch size: 27, lr: 2.12e-04 +2022-05-06 22:14:29,217 INFO [train.py:715] (7/8) Epoch 10, batch 25300, loss[loss=0.1389, simple_loss=0.2133, pruned_loss=0.03229, over 4916.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2136, pruned_loss=0.03374, over 972913.81 frames.], batch size: 18, lr: 2.12e-04 +2022-05-06 22:15:08,864 INFO [train.py:715] (7/8) Epoch 10, batch 25350, loss[loss=0.1267, simple_loss=0.1935, pruned_loss=0.02989, over 4828.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2129, pruned_loss=0.0333, over 972180.20 frames.], batch size: 13, lr: 2.12e-04 +2022-05-06 22:15:47,381 INFO [train.py:715] (7/8) Epoch 10, batch 25400, loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02969, over 4636.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.03353, over 971401.11 frames.], batch size: 13, lr: 2.12e-04 +2022-05-06 22:16:26,239 INFO [train.py:715] (7/8) Epoch 10, batch 25450, loss[loss=0.1612, simple_loss=0.2296, pruned_loss=0.04642, over 4951.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2132, pruned_loss=0.03358, over 971467.65 frames.], batch size: 35, lr: 2.12e-04 +2022-05-06 22:17:06,160 INFO [train.py:715] (7/8) Epoch 10, batch 25500, loss[loss=0.1164, simple_loss=0.1845, pruned_loss=0.02416, over 4748.00 frames.], tot_loss[loss=0.14, simple_loss=0.2134, pruned_loss=0.03336, over 972165.64 frames.], batch size: 16, lr: 2.11e-04 +2022-05-06 22:17:45,980 INFO [train.py:715] (7/8) Epoch 10, batch 25550, loss[loss=0.1409, simple_loss=0.2095, pruned_loss=0.03615, over 4741.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2127, pruned_loss=0.03307, over 972915.99 frames.], batch size: 16, lr: 2.11e-04 +2022-05-06 22:18:24,959 INFO [train.py:715] (7/8) Epoch 10, batch 25600, loss[loss=0.164, simple_loss=0.2355, pruned_loss=0.0463, over 4895.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2133, pruned_loss=0.03368, over 972544.14 frames.], batch size: 19, lr: 2.11e-04 +2022-05-06 22:19:05,114 INFO [train.py:715] (7/8) Epoch 10, batch 25650, loss[loss=0.1273, simple_loss=0.1998, pruned_loss=0.02736, over 4760.00 frames.], tot_loss[loss=0.14, simple_loss=0.213, pruned_loss=0.03347, over 972815.51 frames.], batch size: 19, lr: 2.11e-04 +2022-05-06 22:19:45,488 INFO [train.py:715] (7/8) Epoch 10, batch 25700, loss[loss=0.1322, simple_loss=0.2059, pruned_loss=0.02924, over 4923.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03353, over 972659.83 frames.], batch size: 23, lr: 2.11e-04 +2022-05-06 22:20:25,352 INFO [train.py:715] (7/8) Epoch 10, batch 25750, loss[loss=0.1476, simple_loss=0.2148, pruned_loss=0.04019, over 4823.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.0337, over 972689.00 frames.], batch size: 15, lr: 2.11e-04 +2022-05-06 22:21:04,757 INFO [train.py:715] (7/8) Epoch 10, batch 25800, loss[loss=0.1137, simple_loss=0.1888, pruned_loss=0.01933, over 4937.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2133, pruned_loss=0.0336, over 972422.73 frames.], batch size: 29, lr: 2.11e-04 +2022-05-06 22:21:45,291 INFO [train.py:715] (7/8) Epoch 10, batch 25850, loss[loss=0.1281, simple_loss=0.1965, pruned_loss=0.02986, over 4816.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2135, pruned_loss=0.03385, over 972169.53 frames.], batch size: 13, lr: 2.11e-04 +2022-05-06 22:22:25,227 INFO [train.py:715] (7/8) Epoch 10, batch 25900, loss[loss=0.1132, simple_loss=0.1919, pruned_loss=0.01719, over 4832.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03344, over 973032.32 frames.], batch size: 26, lr: 2.11e-04 +2022-05-06 22:23:03,942 INFO [train.py:715] (7/8) Epoch 10, batch 25950, loss[loss=0.1219, simple_loss=0.2089, pruned_loss=0.01747, over 4770.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2119, pruned_loss=0.03313, over 973199.22 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:23:42,716 INFO [train.py:715] (7/8) Epoch 10, batch 26000, loss[loss=0.1486, simple_loss=0.2285, pruned_loss=0.03438, over 4880.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.0333, over 972369.22 frames.], batch size: 16, lr: 2.11e-04 +2022-05-06 22:24:21,990 INFO [train.py:715] (7/8) Epoch 10, batch 26050, loss[loss=0.1832, simple_loss=0.252, pruned_loss=0.05723, over 4946.00 frames.], tot_loss[loss=0.14, simple_loss=0.2131, pruned_loss=0.03348, over 972613.89 frames.], batch size: 39, lr: 2.11e-04 +2022-05-06 22:25:00,975 INFO [train.py:715] (7/8) Epoch 10, batch 26100, loss[loss=0.147, simple_loss=0.2175, pruned_loss=0.03829, over 4991.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2125, pruned_loss=0.03351, over 972911.25 frames.], batch size: 14, lr: 2.11e-04 +2022-05-06 22:25:40,338 INFO [train.py:715] (7/8) Epoch 10, batch 26150, loss[loss=0.1267, simple_loss=0.2087, pruned_loss=0.02236, over 4713.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.0333, over 972235.21 frames.], batch size: 15, lr: 2.11e-04 +2022-05-06 22:26:21,097 INFO [train.py:715] (7/8) Epoch 10, batch 26200, loss[loss=0.1492, simple_loss=0.2202, pruned_loss=0.03911, over 4793.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2121, pruned_loss=0.03332, over 971411.60 frames.], batch size: 21, lr: 2.11e-04 +2022-05-06 22:27:00,353 INFO [train.py:715] (7/8) Epoch 10, batch 26250, loss[loss=0.1061, simple_loss=0.1775, pruned_loss=0.01731, over 4799.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2124, pruned_loss=0.03347, over 971515.53 frames.], batch size: 24, lr: 2.11e-04 +2022-05-06 22:27:40,000 INFO [train.py:715] (7/8) Epoch 10, batch 26300, loss[loss=0.1278, simple_loss=0.194, pruned_loss=0.03086, over 4814.00 frames.], tot_loss[loss=0.14, simple_loss=0.2129, pruned_loss=0.03356, over 971898.76 frames.], batch size: 27, lr: 2.11e-04 +2022-05-06 22:28:19,567 INFO [train.py:715] (7/8) Epoch 10, batch 26350, loss[loss=0.1839, simple_loss=0.25, pruned_loss=0.05885, over 4920.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.03364, over 971080.99 frames.], batch size: 23, lr: 2.11e-04 +2022-05-06 22:28:59,167 INFO [train.py:715] (7/8) Epoch 10, batch 26400, loss[loss=0.189, simple_loss=0.2606, pruned_loss=0.05873, over 4919.00 frames.], tot_loss[loss=0.14, simple_loss=0.2125, pruned_loss=0.03372, over 971258.19 frames.], batch size: 39, lr: 2.11e-04 +2022-05-06 22:29:38,873 INFO [train.py:715] (7/8) Epoch 10, batch 26450, loss[loss=0.1257, simple_loss=0.1962, pruned_loss=0.0276, over 4889.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2129, pruned_loss=0.03401, over 970956.79 frames.], batch size: 22, lr: 2.11e-04 +2022-05-06 22:30:18,683 INFO [train.py:715] (7/8) Epoch 10, batch 26500, loss[loss=0.157, simple_loss=0.2313, pruned_loss=0.04133, over 4768.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2134, pruned_loss=0.03377, over 970024.91 frames.], batch size: 17, lr: 2.11e-04 +2022-05-06 22:30:59,100 INFO [train.py:715] (7/8) Epoch 10, batch 26550, loss[loss=0.1257, simple_loss=0.1975, pruned_loss=0.02691, over 4828.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2129, pruned_loss=0.03368, over 970742.89 frames.], batch size: 26, lr: 2.11e-04 +2022-05-06 22:31:37,641 INFO [train.py:715] (7/8) Epoch 10, batch 26600, loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02864, over 4788.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2129, pruned_loss=0.03415, over 971414.56 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:32:17,163 INFO [train.py:715] (7/8) Epoch 10, batch 26650, loss[loss=0.1494, simple_loss=0.2243, pruned_loss=0.03727, over 4766.00 frames.], tot_loss[loss=0.141, simple_loss=0.2134, pruned_loss=0.03429, over 970917.88 frames.], batch size: 17, lr: 2.11e-04 +2022-05-06 22:32:56,674 INFO [train.py:715] (7/8) Epoch 10, batch 26700, loss[loss=0.1341, simple_loss=0.2022, pruned_loss=0.03302, over 4850.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.03417, over 971135.22 frames.], batch size: 16, lr: 2.11e-04 +2022-05-06 22:33:36,178 INFO [train.py:715] (7/8) Epoch 10, batch 26750, loss[loss=0.09492, simple_loss=0.1577, pruned_loss=0.01607, over 4825.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2131, pruned_loss=0.03403, over 972267.86 frames.], batch size: 12, lr: 2.11e-04 +2022-05-06 22:34:14,865 INFO [train.py:715] (7/8) Epoch 10, batch 26800, loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03104, over 4751.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2129, pruned_loss=0.03405, over 971988.75 frames.], batch size: 14, lr: 2.11e-04 +2022-05-06 22:34:54,623 INFO [train.py:715] (7/8) Epoch 10, batch 26850, loss[loss=0.1773, simple_loss=0.2364, pruned_loss=0.05908, over 4644.00 frames.], tot_loss[loss=0.1406, simple_loss=0.213, pruned_loss=0.03411, over 971232.82 frames.], batch size: 13, lr: 2.11e-04 +2022-05-06 22:35:34,130 INFO [train.py:715] (7/8) Epoch 10, batch 26900, loss[loss=0.1437, simple_loss=0.212, pruned_loss=0.03768, over 4758.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.03349, over 971068.74 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:36:12,943 INFO [train.py:715] (7/8) Epoch 10, batch 26950, loss[loss=0.1541, simple_loss=0.2326, pruned_loss=0.03782, over 4943.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2128, pruned_loss=0.03344, over 971092.83 frames.], batch size: 39, lr: 2.11e-04 +2022-05-06 22:36:51,894 INFO [train.py:715] (7/8) Epoch 10, batch 27000, loss[loss=0.1302, simple_loss=0.2079, pruned_loss=0.02622, over 4908.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.03373, over 971780.00 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:36:51,895 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 22:37:01,643 INFO [train.py:742] (7/8) Epoch 10, validation: loss=0.1063, simple_loss=0.1906, pruned_loss=0.01104, over 914524.00 frames. +2022-05-06 22:37:41,041 INFO [train.py:715] (7/8) Epoch 10, batch 27050, loss[loss=0.142, simple_loss=0.2129, pruned_loss=0.03559, over 4902.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03377, over 971935.08 frames.], batch size: 19, lr: 2.11e-04 +2022-05-06 22:38:21,002 INFO [train.py:715] (7/8) Epoch 10, batch 27100, loss[loss=0.1855, simple_loss=0.247, pruned_loss=0.06202, over 4836.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03369, over 971660.74 frames.], batch size: 30, lr: 2.11e-04 +2022-05-06 22:38:59,619 INFO [train.py:715] (7/8) Epoch 10, batch 27150, loss[loss=0.1539, simple_loss=0.2122, pruned_loss=0.04778, over 4970.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2131, pruned_loss=0.03359, over 971323.88 frames.], batch size: 35, lr: 2.11e-04 +2022-05-06 22:39:38,790 INFO [train.py:715] (7/8) Epoch 10, batch 27200, loss[loss=0.1169, simple_loss=0.1884, pruned_loss=0.02275, over 4749.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03379, over 971177.48 frames.], batch size: 16, lr: 2.11e-04 +2022-05-06 22:40:18,812 INFO [train.py:715] (7/8) Epoch 10, batch 27250, loss[loss=0.1269, simple_loss=0.1904, pruned_loss=0.03167, over 4820.00 frames.], tot_loss[loss=0.14, simple_loss=0.2131, pruned_loss=0.0335, over 971122.99 frames.], batch size: 26, lr: 2.11e-04 +2022-05-06 22:40:58,233 INFO [train.py:715] (7/8) Epoch 10, batch 27300, loss[loss=0.1177, simple_loss=0.1969, pruned_loss=0.01924, over 4781.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03302, over 972301.19 frames.], batch size: 17, lr: 2.11e-04 +2022-05-06 22:41:36,434 INFO [train.py:715] (7/8) Epoch 10, batch 27350, loss[loss=0.1446, simple_loss=0.2193, pruned_loss=0.03494, over 4951.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2131, pruned_loss=0.03331, over 972735.85 frames.], batch size: 23, lr: 2.11e-04 +2022-05-06 22:42:15,732 INFO [train.py:715] (7/8) Epoch 10, batch 27400, loss[loss=0.1494, simple_loss=0.2286, pruned_loss=0.03513, over 4965.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2137, pruned_loss=0.03375, over 972594.02 frames.], batch size: 24, lr: 2.11e-04 +2022-05-06 22:42:55,895 INFO [train.py:715] (7/8) Epoch 10, batch 27450, loss[loss=0.1165, simple_loss=0.1903, pruned_loss=0.02135, over 4889.00 frames.], tot_loss[loss=0.1412, simple_loss=0.214, pruned_loss=0.03413, over 972954.22 frames.], batch size: 16, lr: 2.11e-04 +2022-05-06 22:43:34,163 INFO [train.py:715] (7/8) Epoch 10, batch 27500, loss[loss=0.1433, simple_loss=0.2103, pruned_loss=0.03815, over 4796.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2143, pruned_loss=0.03444, over 972731.30 frames.], batch size: 21, lr: 2.11e-04 +2022-05-06 22:44:13,414 INFO [train.py:715] (7/8) Epoch 10, batch 27550, loss[loss=0.1458, simple_loss=0.2116, pruned_loss=0.04004, over 4860.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2132, pruned_loss=0.03402, over 973007.35 frames.], batch size: 20, lr: 2.11e-04 +2022-05-06 22:44:52,782 INFO [train.py:715] (7/8) Epoch 10, batch 27600, loss[loss=0.1585, simple_loss=0.2258, pruned_loss=0.04557, over 4973.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2133, pruned_loss=0.03397, over 972488.75 frames.], batch size: 15, lr: 2.11e-04 +2022-05-06 22:45:32,116 INFO [train.py:715] (7/8) Epoch 10, batch 27650, loss[loss=0.1357, simple_loss=0.2012, pruned_loss=0.03508, over 4833.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2132, pruned_loss=0.03366, over 973864.61 frames.], batch size: 27, lr: 2.11e-04 +2022-05-06 22:46:11,031 INFO [train.py:715] (7/8) Epoch 10, batch 27700, loss[loss=0.1167, simple_loss=0.1897, pruned_loss=0.02185, over 4825.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2134, pruned_loss=0.03374, over 972741.18 frames.], batch size: 13, lr: 2.11e-04 +2022-05-06 22:46:51,026 INFO [train.py:715] (7/8) Epoch 10, batch 27750, loss[loss=0.1232, simple_loss=0.2079, pruned_loss=0.01929, over 4988.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03364, over 972496.27 frames.], batch size: 25, lr: 2.11e-04 +2022-05-06 22:47:31,102 INFO [train.py:715] (7/8) Epoch 10, batch 27800, loss[loss=0.1295, simple_loss=0.2041, pruned_loss=0.02742, over 4778.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2124, pruned_loss=0.03349, over 972781.47 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:48:10,309 INFO [train.py:715] (7/8) Epoch 10, batch 27850, loss[loss=0.1547, simple_loss=0.2325, pruned_loss=0.03846, over 4904.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2117, pruned_loss=0.03279, over 972204.70 frames.], batch size: 17, lr: 2.11e-04 +2022-05-06 22:48:50,680 INFO [train.py:715] (7/8) Epoch 10, batch 27900, loss[loss=0.1297, simple_loss=0.1971, pruned_loss=0.03114, over 4825.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03285, over 972394.00 frames.], batch size: 13, lr: 2.11e-04 +2022-05-06 22:49:34,040 INFO [train.py:715] (7/8) Epoch 10, batch 27950, loss[loss=0.1429, simple_loss=0.2198, pruned_loss=0.03304, over 4929.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2109, pruned_loss=0.03243, over 973285.11 frames.], batch size: 21, lr: 2.11e-04 +2022-05-06 22:50:13,534 INFO [train.py:715] (7/8) Epoch 10, batch 28000, loss[loss=0.1437, simple_loss=0.2256, pruned_loss=0.03091, over 4925.00 frames.], tot_loss[loss=0.137, simple_loss=0.21, pruned_loss=0.032, over 973595.58 frames.], batch size: 29, lr: 2.11e-04 +2022-05-06 22:50:53,593 INFO [train.py:715] (7/8) Epoch 10, batch 28050, loss[loss=0.133, simple_loss=0.2016, pruned_loss=0.03217, over 4793.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2108, pruned_loss=0.0324, over 973834.66 frames.], batch size: 14, lr: 2.11e-04 +2022-05-06 22:51:34,465 INFO [train.py:715] (7/8) Epoch 10, batch 28100, loss[loss=0.1377, simple_loss=0.2193, pruned_loss=0.02806, over 4927.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2105, pruned_loss=0.03234, over 973154.89 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:52:15,138 INFO [train.py:715] (7/8) Epoch 10, batch 28150, loss[loss=0.1202, simple_loss=0.1916, pruned_loss=0.02439, over 4836.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2108, pruned_loss=0.03267, over 973044.07 frames.], batch size: 12, lr: 2.11e-04 +2022-05-06 22:52:54,872 INFO [train.py:715] (7/8) Epoch 10, batch 28200, loss[loss=0.1113, simple_loss=0.187, pruned_loss=0.01775, over 4814.00 frames.], tot_loss[loss=0.139, simple_loss=0.2114, pruned_loss=0.03335, over 972093.24 frames.], batch size: 26, lr: 2.11e-04 +2022-05-06 22:53:35,216 INFO [train.py:715] (7/8) Epoch 10, batch 28250, loss[loss=0.1605, simple_loss=0.2347, pruned_loss=0.04311, over 4887.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2128, pruned_loss=0.03383, over 972717.89 frames.], batch size: 16, lr: 2.11e-04 +2022-05-06 22:54:16,803 INFO [train.py:715] (7/8) Epoch 10, batch 28300, loss[loss=0.1257, simple_loss=0.2095, pruned_loss=0.02093, over 4801.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.03323, over 973533.90 frames.], batch size: 21, lr: 2.11e-04 +2022-05-06 22:54:56,895 INFO [train.py:715] (7/8) Epoch 10, batch 28350, loss[loss=0.1715, simple_loss=0.2392, pruned_loss=0.05194, over 4909.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2129, pruned_loss=0.03364, over 973920.92 frames.], batch size: 17, lr: 2.11e-04 +2022-05-06 22:55:37,456 INFO [train.py:715] (7/8) Epoch 10, batch 28400, loss[loss=0.1443, simple_loss=0.2203, pruned_loss=0.03413, over 4785.00 frames.], tot_loss[loss=0.1414, simple_loss=0.214, pruned_loss=0.03439, over 973189.84 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:56:19,122 INFO [train.py:715] (7/8) Epoch 10, batch 28450, loss[loss=0.1392, simple_loss=0.217, pruned_loss=0.03073, over 4893.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2133, pruned_loss=0.03376, over 972713.73 frames.], batch size: 22, lr: 2.11e-04 +2022-05-06 22:57:00,139 INFO [train.py:715] (7/8) Epoch 10, batch 28500, loss[loss=0.1543, simple_loss=0.2232, pruned_loss=0.04265, over 4782.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2127, pruned_loss=0.03331, over 972923.13 frames.], batch size: 17, lr: 2.11e-04 +2022-05-06 22:57:40,544 INFO [train.py:715] (7/8) Epoch 10, batch 28550, loss[loss=0.1434, simple_loss=0.2191, pruned_loss=0.03389, over 4946.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2126, pruned_loss=0.03324, over 973248.03 frames.], batch size: 35, lr: 2.11e-04 +2022-05-06 22:58:21,444 INFO [train.py:715] (7/8) Epoch 10, batch 28600, loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03332, over 4789.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2138, pruned_loss=0.03341, over 972930.33 frames.], batch size: 18, lr: 2.11e-04 +2022-05-06 22:59:03,593 INFO [train.py:715] (7/8) Epoch 10, batch 28650, loss[loss=0.1457, simple_loss=0.2168, pruned_loss=0.03732, over 4964.00 frames.], tot_loss[loss=0.1396, simple_loss=0.213, pruned_loss=0.03312, over 973886.37 frames.], batch size: 35, lr: 2.11e-04 +2022-05-06 22:59:43,745 INFO [train.py:715] (7/8) Epoch 10, batch 28700, loss[loss=0.1467, simple_loss=0.2136, pruned_loss=0.03994, over 4975.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2135, pruned_loss=0.0334, over 974526.43 frames.], batch size: 24, lr: 2.11e-04 +2022-05-06 23:00:24,820 INFO [train.py:715] (7/8) Epoch 10, batch 28750, loss[loss=0.1339, simple_loss=0.2187, pruned_loss=0.02458, over 4974.00 frames.], tot_loss[loss=0.1396, simple_loss=0.213, pruned_loss=0.03309, over 974437.69 frames.], batch size: 24, lr: 2.11e-04 +2022-05-06 23:01:05,928 INFO [train.py:715] (7/8) Epoch 10, batch 28800, loss[loss=0.1184, simple_loss=0.1882, pruned_loss=0.02436, over 4779.00 frames.], tot_loss[loss=0.1393, simple_loss=0.213, pruned_loss=0.03276, over 974165.04 frames.], batch size: 12, lr: 2.11e-04 +2022-05-06 23:01:46,832 INFO [train.py:715] (7/8) Epoch 10, batch 28850, loss[loss=0.1411, simple_loss=0.2195, pruned_loss=0.0313, over 4987.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2136, pruned_loss=0.03339, over 973080.99 frames.], batch size: 31, lr: 2.11e-04 +2022-05-06 23:02:27,345 INFO [train.py:715] (7/8) Epoch 10, batch 28900, loss[loss=0.1599, simple_loss=0.2268, pruned_loss=0.04653, over 4988.00 frames.], tot_loss[loss=0.14, simple_loss=0.2134, pruned_loss=0.03332, over 973203.78 frames.], batch size: 25, lr: 2.11e-04 +2022-05-06 23:03:08,209 INFO [train.py:715] (7/8) Epoch 10, batch 28950, loss[loss=0.1409, simple_loss=0.2106, pruned_loss=0.03558, over 4762.00 frames.], tot_loss[loss=0.139, simple_loss=0.2124, pruned_loss=0.03277, over 974094.52 frames.], batch size: 17, lr: 2.11e-04 +2022-05-06 23:03:49,283 INFO [train.py:715] (7/8) Epoch 10, batch 29000, loss[loss=0.1712, simple_loss=0.2286, pruned_loss=0.05691, over 4930.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2116, pruned_loss=0.03283, over 972990.64 frames.], batch size: 23, lr: 2.11e-04 +2022-05-06 23:04:28,427 INFO [train.py:715] (7/8) Epoch 10, batch 29050, loss[loss=0.1296, simple_loss=0.2058, pruned_loss=0.02671, over 4873.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.03279, over 972937.88 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:05:07,302 INFO [train.py:715] (7/8) Epoch 10, batch 29100, loss[loss=0.1246, simple_loss=0.1946, pruned_loss=0.02735, over 4720.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2111, pruned_loss=0.03257, over 972728.04 frames.], batch size: 12, lr: 2.10e-04 +2022-05-06 23:05:47,481 INFO [train.py:715] (7/8) Epoch 10, batch 29150, loss[loss=0.1224, simple_loss=0.1959, pruned_loss=0.02442, over 4781.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2106, pruned_loss=0.0322, over 972517.81 frames.], batch size: 14, lr: 2.10e-04 +2022-05-06 23:06:27,779 INFO [train.py:715] (7/8) Epoch 10, batch 29200, loss[loss=0.1539, simple_loss=0.2359, pruned_loss=0.03593, over 4759.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03252, over 972928.77 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:07:06,677 INFO [train.py:715] (7/8) Epoch 10, batch 29250, loss[loss=0.1323, simple_loss=0.2106, pruned_loss=0.02698, over 4820.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2124, pruned_loss=0.03252, over 973585.50 frames.], batch size: 27, lr: 2.10e-04 +2022-05-06 23:07:46,921 INFO [train.py:715] (7/8) Epoch 10, batch 29300, loss[loss=0.1647, simple_loss=0.2313, pruned_loss=0.04905, over 4780.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2131, pruned_loss=0.03316, over 972980.90 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:08:27,017 INFO [train.py:715] (7/8) Epoch 10, batch 29350, loss[loss=0.1167, simple_loss=0.1856, pruned_loss=0.0239, over 4874.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03314, over 972439.88 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:09:06,025 INFO [train.py:715] (7/8) Epoch 10, batch 29400, loss[loss=0.1238, simple_loss=0.1933, pruned_loss=0.02713, over 4784.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2131, pruned_loss=0.03325, over 972076.17 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:09:45,806 INFO [train.py:715] (7/8) Epoch 10, batch 29450, loss[loss=0.1214, simple_loss=0.1856, pruned_loss=0.02853, over 4984.00 frames.], tot_loss[loss=0.1399, simple_loss=0.213, pruned_loss=0.03342, over 973151.60 frames.], batch size: 14, lr: 2.10e-04 +2022-05-06 23:10:26,004 INFO [train.py:715] (7/8) Epoch 10, batch 29500, loss[loss=0.1415, simple_loss=0.2103, pruned_loss=0.03636, over 4760.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03337, over 972186.26 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:11:05,708 INFO [train.py:715] (7/8) Epoch 10, batch 29550, loss[loss=0.1342, simple_loss=0.2125, pruned_loss=0.02794, over 4832.00 frames.], tot_loss[loss=0.14, simple_loss=0.2128, pruned_loss=0.0336, over 973021.18 frames.], batch size: 26, lr: 2.10e-04 +2022-05-06 23:11:44,345 INFO [train.py:715] (7/8) Epoch 10, batch 29600, loss[loss=0.1195, simple_loss=0.196, pruned_loss=0.02157, over 4980.00 frames.], tot_loss[loss=0.1405, simple_loss=0.2135, pruned_loss=0.03378, over 973166.00 frames.], batch size: 24, lr: 2.10e-04 +2022-05-06 23:12:24,000 INFO [train.py:715] (7/8) Epoch 10, batch 29650, loss[loss=0.1433, simple_loss=0.2173, pruned_loss=0.03467, over 4783.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2138, pruned_loss=0.03395, over 973175.09 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:13:03,437 INFO [train.py:715] (7/8) Epoch 10, batch 29700, loss[loss=0.1048, simple_loss=0.1713, pruned_loss=0.0192, over 4989.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.0335, over 972734.12 frames.], batch size: 14, lr: 2.10e-04 +2022-05-06 23:13:42,105 INFO [train.py:715] (7/8) Epoch 10, batch 29750, loss[loss=0.1315, simple_loss=0.21, pruned_loss=0.02652, over 4826.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.03356, over 972155.71 frames.], batch size: 26, lr: 2.10e-04 +2022-05-06 23:14:21,082 INFO [train.py:715] (7/8) Epoch 10, batch 29800, loss[loss=0.1572, simple_loss=0.2332, pruned_loss=0.04057, over 4806.00 frames.], tot_loss[loss=0.1414, simple_loss=0.2143, pruned_loss=0.03424, over 972020.18 frames.], batch size: 21, lr: 2.10e-04 +2022-05-06 23:15:00,555 INFO [train.py:715] (7/8) Epoch 10, batch 29850, loss[loss=0.1068, simple_loss=0.1882, pruned_loss=0.01264, over 4636.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2138, pruned_loss=0.03404, over 972164.92 frames.], batch size: 13, lr: 2.10e-04 +2022-05-06 23:15:39,440 INFO [train.py:715] (7/8) Epoch 10, batch 29900, loss[loss=0.1478, simple_loss=0.2232, pruned_loss=0.03616, over 4851.00 frames.], tot_loss[loss=0.1411, simple_loss=0.214, pruned_loss=0.03406, over 972080.86 frames.], batch size: 20, lr: 2.10e-04 +2022-05-06 23:16:17,896 INFO [train.py:715] (7/8) Epoch 10, batch 29950, loss[loss=0.164, simple_loss=0.2264, pruned_loss=0.05078, over 4775.00 frames.], tot_loss[loss=0.1409, simple_loss=0.2137, pruned_loss=0.03403, over 971915.30 frames.], batch size: 18, lr: 2.10e-04 +2022-05-06 23:16:57,115 INFO [train.py:715] (7/8) Epoch 10, batch 30000, loss[loss=0.1415, simple_loss=0.2175, pruned_loss=0.03274, over 4967.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2141, pruned_loss=0.03401, over 972604.03 frames.], batch size: 39, lr: 2.10e-04 +2022-05-06 23:16:57,115 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 23:17:06,542 INFO [train.py:742] (7/8) Epoch 10, validation: loss=0.1063, simple_loss=0.1906, pruned_loss=0.01106, over 914524.00 frames. +2022-05-06 23:17:46,310 INFO [train.py:715] (7/8) Epoch 10, batch 30050, loss[loss=0.1648, simple_loss=0.2352, pruned_loss=0.04715, over 4891.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2136, pruned_loss=0.03386, over 973081.62 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:18:25,806 INFO [train.py:715] (7/8) Epoch 10, batch 30100, loss[loss=0.131, simple_loss=0.1984, pruned_loss=0.03183, over 4761.00 frames.], tot_loss[loss=0.1399, simple_loss=0.213, pruned_loss=0.03345, over 971858.25 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:19:04,200 INFO [train.py:715] (7/8) Epoch 10, batch 30150, loss[loss=0.1564, simple_loss=0.2464, pruned_loss=0.03314, over 4983.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03339, over 971797.40 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:19:44,549 INFO [train.py:715] (7/8) Epoch 10, batch 30200, loss[loss=0.1713, simple_loss=0.2418, pruned_loss=0.05036, over 4968.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.03368, over 972325.15 frames.], batch size: 35, lr: 2.10e-04 +2022-05-06 23:20:24,573 INFO [train.py:715] (7/8) Epoch 10, batch 30250, loss[loss=0.1327, simple_loss=0.206, pruned_loss=0.02975, over 4880.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.03321, over 972144.09 frames.], batch size: 32, lr: 2.10e-04 +2022-05-06 23:21:02,963 INFO [train.py:715] (7/8) Epoch 10, batch 30300, loss[loss=0.1273, simple_loss=0.2116, pruned_loss=0.0215, over 4988.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2128, pruned_loss=0.0334, over 972803.69 frames.], batch size: 25, lr: 2.10e-04 +2022-05-06 23:21:41,379 INFO [train.py:715] (7/8) Epoch 10, batch 30350, loss[loss=0.1376, simple_loss=0.2037, pruned_loss=0.03572, over 4783.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.03286, over 972483.09 frames.], batch size: 14, lr: 2.10e-04 +2022-05-06 23:22:21,185 INFO [train.py:715] (7/8) Epoch 10, batch 30400, loss[loss=0.125, simple_loss=0.2029, pruned_loss=0.0236, over 4865.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2118, pruned_loss=0.03262, over 973389.29 frames.], batch size: 20, lr: 2.10e-04 +2022-05-06 23:23:00,550 INFO [train.py:715] (7/8) Epoch 10, batch 30450, loss[loss=0.1566, simple_loss=0.2244, pruned_loss=0.0444, over 4980.00 frames.], tot_loss[loss=0.138, simple_loss=0.2112, pruned_loss=0.03237, over 974305.26 frames.], batch size: 33, lr: 2.10e-04 +2022-05-06 23:23:38,709 INFO [train.py:715] (7/8) Epoch 10, batch 30500, loss[loss=0.1347, simple_loss=0.2072, pruned_loss=0.03112, over 4898.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03255, over 973485.55 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:24:18,305 INFO [train.py:715] (7/8) Epoch 10, batch 30550, loss[loss=0.1437, simple_loss=0.2198, pruned_loss=0.03384, over 4806.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03219, over 973645.95 frames.], batch size: 24, lr: 2.10e-04 +2022-05-06 23:24:57,944 INFO [train.py:715] (7/8) Epoch 10, batch 30600, loss[loss=0.1498, simple_loss=0.222, pruned_loss=0.0388, over 4858.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2123, pruned_loss=0.03253, over 973243.59 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:25:36,408 INFO [train.py:715] (7/8) Epoch 10, batch 30650, loss[loss=0.1512, simple_loss=0.2222, pruned_loss=0.04008, over 4881.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2122, pruned_loss=0.03271, over 973000.15 frames.], batch size: 32, lr: 2.10e-04 +2022-05-06 23:26:15,886 INFO [train.py:715] (7/8) Epoch 10, batch 30700, loss[loss=0.1281, simple_loss=0.2107, pruned_loss=0.02272, over 4796.00 frames.], tot_loss[loss=0.139, simple_loss=0.2123, pruned_loss=0.03286, over 973159.96 frames.], batch size: 24, lr: 2.10e-04 +2022-05-06 23:26:55,009 INFO [train.py:715] (7/8) Epoch 10, batch 30750, loss[loss=0.1539, simple_loss=0.2306, pruned_loss=0.03857, over 4756.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2121, pruned_loss=0.03258, over 973372.01 frames.], batch size: 14, lr: 2.10e-04 +2022-05-06 23:27:33,901 INFO [train.py:715] (7/8) Epoch 10, batch 30800, loss[loss=0.1241, simple_loss=0.2026, pruned_loss=0.02283, over 4785.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2117, pruned_loss=0.03231, over 972303.77 frames.], batch size: 18, lr: 2.10e-04 +2022-05-06 23:28:12,411 INFO [train.py:715] (7/8) Epoch 10, batch 30850, loss[loss=0.1342, simple_loss=0.1992, pruned_loss=0.03464, over 4790.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03252, over 971275.51 frames.], batch size: 14, lr: 2.10e-04 +2022-05-06 23:28:52,166 INFO [train.py:715] (7/8) Epoch 10, batch 30900, loss[loss=0.1301, simple_loss=0.2018, pruned_loss=0.02917, over 4959.00 frames.], tot_loss[loss=0.138, simple_loss=0.2115, pruned_loss=0.03224, over 972158.01 frames.], batch size: 21, lr: 2.10e-04 +2022-05-06 23:29:32,114 INFO [train.py:715] (7/8) Epoch 10, batch 30950, loss[loss=0.1268, simple_loss=0.1997, pruned_loss=0.02692, over 4827.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03234, over 972836.99 frames.], batch size: 26, lr: 2.10e-04 +2022-05-06 23:30:11,645 INFO [train.py:715] (7/8) Epoch 10, batch 31000, loss[loss=0.1433, simple_loss=0.2123, pruned_loss=0.03718, over 4940.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03281, over 972519.86 frames.], batch size: 21, lr: 2.10e-04 +2022-05-06 23:30:50,321 INFO [train.py:715] (7/8) Epoch 10, batch 31050, loss[loss=0.1091, simple_loss=0.1791, pruned_loss=0.01959, over 4825.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03313, over 972474.45 frames.], batch size: 13, lr: 2.10e-04 +2022-05-06 23:31:29,593 INFO [train.py:715] (7/8) Epoch 10, batch 31100, loss[loss=0.1269, simple_loss=0.1986, pruned_loss=0.02761, over 4891.00 frames.], tot_loss[loss=0.139, simple_loss=0.2122, pruned_loss=0.03287, over 972990.44 frames.], batch size: 22, lr: 2.10e-04 +2022-05-06 23:32:09,328 INFO [train.py:715] (7/8) Epoch 10, batch 31150, loss[loss=0.1107, simple_loss=0.1772, pruned_loss=0.0221, over 4881.00 frames.], tot_loss[loss=0.1387, simple_loss=0.212, pruned_loss=0.03269, over 973147.08 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:32:47,338 INFO [train.py:715] (7/8) Epoch 10, batch 31200, loss[loss=0.134, simple_loss=0.2087, pruned_loss=0.02964, over 4960.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2116, pruned_loss=0.03296, over 972450.56 frames.], batch size: 24, lr: 2.10e-04 +2022-05-06 23:33:26,831 INFO [train.py:715] (7/8) Epoch 10, batch 31250, loss[loss=0.1465, simple_loss=0.2194, pruned_loss=0.03678, over 4947.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2114, pruned_loss=0.03276, over 973011.05 frames.], batch size: 39, lr: 2.10e-04 +2022-05-06 23:34:06,255 INFO [train.py:715] (7/8) Epoch 10, batch 31300, loss[loss=0.1154, simple_loss=0.1922, pruned_loss=0.01931, over 4900.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2115, pruned_loss=0.03318, over 972742.25 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:34:45,244 INFO [train.py:715] (7/8) Epoch 10, batch 31350, loss[loss=0.1453, simple_loss=0.2206, pruned_loss=0.03504, over 4892.00 frames.], tot_loss[loss=0.1393, simple_loss=0.212, pruned_loss=0.03328, over 972968.91 frames.], batch size: 16, lr: 2.10e-04 +2022-05-06 23:35:23,743 INFO [train.py:715] (7/8) Epoch 10, batch 31400, loss[loss=0.1428, simple_loss=0.2067, pruned_loss=0.03942, over 4875.00 frames.], tot_loss[loss=0.14, simple_loss=0.2127, pruned_loss=0.03368, over 973042.78 frames.], batch size: 32, lr: 2.10e-04 +2022-05-06 23:36:02,751 INFO [train.py:715] (7/8) Epoch 10, batch 31450, loss[loss=0.1571, simple_loss=0.224, pruned_loss=0.04514, over 4947.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2128, pruned_loss=0.03332, over 972894.53 frames.], batch size: 35, lr: 2.10e-04 +2022-05-06 23:36:42,175 INFO [train.py:715] (7/8) Epoch 10, batch 31500, loss[loss=0.122, simple_loss=0.194, pruned_loss=0.025, over 4990.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2134, pruned_loss=0.03363, over 973094.51 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:37:19,857 INFO [train.py:715] (7/8) Epoch 10, batch 31550, loss[loss=0.1418, simple_loss=0.2143, pruned_loss=0.03462, over 4984.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2127, pruned_loss=0.03329, over 972314.70 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:37:58,956 INFO [train.py:715] (7/8) Epoch 10, batch 31600, loss[loss=0.1185, simple_loss=0.1893, pruned_loss=0.02389, over 4962.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.03315, over 971737.27 frames.], batch size: 28, lr: 2.10e-04 +2022-05-06 23:38:38,098 INFO [train.py:715] (7/8) Epoch 10, batch 31650, loss[loss=0.1689, simple_loss=0.2412, pruned_loss=0.04826, over 4977.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03319, over 971577.68 frames.], batch size: 25, lr: 2.10e-04 +2022-05-06 23:39:17,246 INFO [train.py:715] (7/8) Epoch 10, batch 31700, loss[loss=0.1437, simple_loss=0.2234, pruned_loss=0.03202, over 4961.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.0331, over 972780.39 frames.], batch size: 24, lr: 2.10e-04 +2022-05-06 23:39:55,913 INFO [train.py:715] (7/8) Epoch 10, batch 31750, loss[loss=0.1473, simple_loss=0.2262, pruned_loss=0.03414, over 4842.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2117, pruned_loss=0.03305, over 972107.05 frames.], batch size: 34, lr: 2.10e-04 +2022-05-06 23:40:34,951 INFO [train.py:715] (7/8) Epoch 10, batch 31800, loss[loss=0.1397, simple_loss=0.2171, pruned_loss=0.03114, over 4987.00 frames.], tot_loss[loss=0.1381, simple_loss=0.211, pruned_loss=0.03259, over 972522.87 frames.], batch size: 28, lr: 2.10e-04 +2022-05-06 23:41:14,311 INFO [train.py:715] (7/8) Epoch 10, batch 31850, loss[loss=0.1359, simple_loss=0.2122, pruned_loss=0.02976, over 4929.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03271, over 972165.58 frames.], batch size: 23, lr: 2.10e-04 +2022-05-06 23:41:52,372 INFO [train.py:715] (7/8) Epoch 10, batch 31900, loss[loss=0.1353, simple_loss=0.211, pruned_loss=0.02983, over 4892.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03283, over 971649.71 frames.], batch size: 17, lr: 2.10e-04 +2022-05-06 23:42:31,529 INFO [train.py:715] (7/8) Epoch 10, batch 31950, loss[loss=0.1108, simple_loss=0.1969, pruned_loss=0.01236, over 4739.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2107, pruned_loss=0.03235, over 971765.51 frames.], batch size: 12, lr: 2.10e-04 +2022-05-06 23:43:10,932 INFO [train.py:715] (7/8) Epoch 10, batch 32000, loss[loss=0.1364, simple_loss=0.2116, pruned_loss=0.03063, over 4888.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03295, over 971497.39 frames.], batch size: 19, lr: 2.10e-04 +2022-05-06 23:43:49,604 INFO [train.py:715] (7/8) Epoch 10, batch 32050, loss[loss=0.1607, simple_loss=0.2388, pruned_loss=0.04136, over 4704.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03316, over 971571.49 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:44:27,919 INFO [train.py:715] (7/8) Epoch 10, batch 32100, loss[loss=0.1461, simple_loss=0.2111, pruned_loss=0.04053, over 4970.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2116, pruned_loss=0.03297, over 972114.62 frames.], batch size: 35, lr: 2.10e-04 +2022-05-06 23:45:06,917 INFO [train.py:715] (7/8) Epoch 10, batch 32150, loss[loss=0.1576, simple_loss=0.2269, pruned_loss=0.04419, over 4876.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2111, pruned_loss=0.03303, over 972056.51 frames.], batch size: 32, lr: 2.10e-04 +2022-05-06 23:45:45,856 INFO [train.py:715] (7/8) Epoch 10, batch 32200, loss[loss=0.1766, simple_loss=0.2417, pruned_loss=0.0558, over 4860.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2117, pruned_loss=0.03325, over 972110.52 frames.], batch size: 30, lr: 2.10e-04 +2022-05-06 23:46:23,728 INFO [train.py:715] (7/8) Epoch 10, batch 32250, loss[loss=0.1422, simple_loss=0.2152, pruned_loss=0.03464, over 4852.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2128, pruned_loss=0.0337, over 972133.23 frames.], batch size: 20, lr: 2.10e-04 +2022-05-06 23:47:02,887 INFO [train.py:715] (7/8) Epoch 10, batch 32300, loss[loss=0.112, simple_loss=0.1895, pruned_loss=0.01724, over 4843.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2133, pruned_loss=0.03368, over 971858.40 frames.], batch size: 26, lr: 2.10e-04 +2022-05-06 23:47:42,101 INFO [train.py:715] (7/8) Epoch 10, batch 32350, loss[loss=0.1193, simple_loss=0.197, pruned_loss=0.02077, over 4965.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2131, pruned_loss=0.03285, over 972204.76 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:48:20,906 INFO [train.py:715] (7/8) Epoch 10, batch 32400, loss[loss=0.1385, simple_loss=0.2058, pruned_loss=0.03556, over 4929.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2126, pruned_loss=0.03305, over 971885.15 frames.], batch size: 18, lr: 2.10e-04 +2022-05-06 23:48:59,314 INFO [train.py:715] (7/8) Epoch 10, batch 32450, loss[loss=0.1407, simple_loss=0.2182, pruned_loss=0.03162, over 4819.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03253, over 972351.72 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:49:38,633 INFO [train.py:715] (7/8) Epoch 10, batch 32500, loss[loss=0.1542, simple_loss=0.2125, pruned_loss=0.0479, over 4867.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03262, over 971987.81 frames.], batch size: 32, lr: 2.10e-04 +2022-05-06 23:50:18,347 INFO [train.py:715] (7/8) Epoch 10, batch 32550, loss[loss=0.1694, simple_loss=0.2276, pruned_loss=0.0556, over 4838.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03262, over 971286.49 frames.], batch size: 30, lr: 2.10e-04 +2022-05-06 23:50:56,265 INFO [train.py:715] (7/8) Epoch 10, batch 32600, loss[loss=0.1697, simple_loss=0.2373, pruned_loss=0.05107, over 4834.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2108, pruned_loss=0.03238, over 971691.24 frames.], batch size: 15, lr: 2.10e-04 +2022-05-06 23:51:35,798 INFO [train.py:715] (7/8) Epoch 10, batch 32650, loss[loss=0.1505, simple_loss=0.2235, pruned_loss=0.03879, over 4964.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2109, pruned_loss=0.03233, over 972051.32 frames.], batch size: 24, lr: 2.10e-04 +2022-05-06 23:52:15,568 INFO [train.py:715] (7/8) Epoch 10, batch 32700, loss[loss=0.1286, simple_loss=0.2062, pruned_loss=0.02549, over 4761.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2112, pruned_loss=0.03259, over 971469.24 frames.], batch size: 19, lr: 2.09e-04 +2022-05-06 23:52:53,820 INFO [train.py:715] (7/8) Epoch 10, batch 32750, loss[loss=0.1347, simple_loss=0.2038, pruned_loss=0.03279, over 4906.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2111, pruned_loss=0.03307, over 972141.18 frames.], batch size: 19, lr: 2.09e-04 +2022-05-06 23:53:34,511 INFO [train.py:715] (7/8) Epoch 10, batch 32800, loss[loss=0.1219, simple_loss=0.1913, pruned_loss=0.0263, over 4924.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2106, pruned_loss=0.03319, over 972875.03 frames.], batch size: 23, lr: 2.09e-04 +2022-05-06 23:54:14,776 INFO [train.py:715] (7/8) Epoch 10, batch 32850, loss[loss=0.1543, simple_loss=0.2295, pruned_loss=0.03952, over 4920.00 frames.], tot_loss[loss=0.139, simple_loss=0.2112, pruned_loss=0.03342, over 972783.24 frames.], batch size: 18, lr: 2.09e-04 +2022-05-06 23:54:54,886 INFO [train.py:715] (7/8) Epoch 10, batch 32900, loss[loss=0.1189, simple_loss=0.1946, pruned_loss=0.02166, over 4805.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2116, pruned_loss=0.03308, over 972594.75 frames.], batch size: 21, lr: 2.09e-04 +2022-05-06 23:55:34,229 INFO [train.py:715] (7/8) Epoch 10, batch 32950, loss[loss=0.1227, simple_loss=0.2063, pruned_loss=0.0196, over 4955.00 frames.], tot_loss[loss=0.139, simple_loss=0.2118, pruned_loss=0.03306, over 971952.00 frames.], batch size: 21, lr: 2.09e-04 +2022-05-06 23:56:14,908 INFO [train.py:715] (7/8) Epoch 10, batch 33000, loss[loss=0.1359, simple_loss=0.2115, pruned_loss=0.03015, over 4885.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2121, pruned_loss=0.03354, over 971583.62 frames.], batch size: 32, lr: 2.09e-04 +2022-05-06 23:56:14,908 INFO [train.py:733] (7/8) Computing validation loss +2022-05-06 23:56:24,576 INFO [train.py:742] (7/8) Epoch 10, validation: loss=0.1063, simple_loss=0.1905, pruned_loss=0.01103, over 914524.00 frames. +2022-05-06 23:57:03,965 INFO [train.py:715] (7/8) Epoch 10, batch 33050, loss[loss=0.1489, simple_loss=0.2185, pruned_loss=0.0397, over 4910.00 frames.], tot_loss[loss=0.138, simple_loss=0.2108, pruned_loss=0.03262, over 971711.13 frames.], batch size: 39, lr: 2.09e-04 +2022-05-06 23:57:43,742 INFO [train.py:715] (7/8) Epoch 10, batch 33100, loss[loss=0.1492, simple_loss=0.2223, pruned_loss=0.03808, over 4944.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.033, over 972306.98 frames.], batch size: 39, lr: 2.09e-04 +2022-05-06 23:58:21,692 INFO [train.py:715] (7/8) Epoch 10, batch 33150, loss[loss=0.1467, simple_loss=0.2242, pruned_loss=0.03461, over 4912.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03324, over 972551.42 frames.], batch size: 17, lr: 2.09e-04 +2022-05-06 23:59:00,824 INFO [train.py:715] (7/8) Epoch 10, batch 33200, loss[loss=0.1308, simple_loss=0.2159, pruned_loss=0.02281, over 4841.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.0332, over 973475.49 frames.], batch size: 13, lr: 2.09e-04 +2022-05-06 23:59:40,447 INFO [train.py:715] (7/8) Epoch 10, batch 33250, loss[loss=0.1101, simple_loss=0.1833, pruned_loss=0.01848, over 4830.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.03337, over 973437.43 frames.], batch size: 15, lr: 2.09e-04 +2022-05-07 00:00:18,361 INFO [train.py:715] (7/8) Epoch 10, batch 33300, loss[loss=0.1278, simple_loss=0.193, pruned_loss=0.03132, over 4803.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2119, pruned_loss=0.03343, over 973082.02 frames.], batch size: 25, lr: 2.09e-04 +2022-05-07 00:00:57,775 INFO [train.py:715] (7/8) Epoch 10, batch 33350, loss[loss=0.153, simple_loss=0.2205, pruned_loss=0.04273, over 4785.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.0333, over 973218.21 frames.], batch size: 18, lr: 2.09e-04 +2022-05-07 00:01:37,020 INFO [train.py:715] (7/8) Epoch 10, batch 33400, loss[loss=0.1197, simple_loss=0.203, pruned_loss=0.01818, over 4988.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2129, pruned_loss=0.0337, over 974211.22 frames.], batch size: 25, lr: 2.09e-04 +2022-05-07 00:02:16,545 INFO [train.py:715] (7/8) Epoch 10, batch 33450, loss[loss=0.1652, simple_loss=0.2404, pruned_loss=0.04499, over 4796.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2128, pruned_loss=0.03351, over 973850.21 frames.], batch size: 21, lr: 2.09e-04 +2022-05-07 00:02:54,368 INFO [train.py:715] (7/8) Epoch 10, batch 33500, loss[loss=0.1613, simple_loss=0.2302, pruned_loss=0.04621, over 4849.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03308, over 974054.91 frames.], batch size: 15, lr: 2.09e-04 +2022-05-07 00:03:33,960 INFO [train.py:715] (7/8) Epoch 10, batch 33550, loss[loss=0.1153, simple_loss=0.1833, pruned_loss=0.02365, over 4808.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03322, over 973972.70 frames.], batch size: 13, lr: 2.09e-04 +2022-05-07 00:04:13,564 INFO [train.py:715] (7/8) Epoch 10, batch 33600, loss[loss=0.1448, simple_loss=0.2212, pruned_loss=0.03425, over 4958.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2121, pruned_loss=0.03275, over 973349.52 frames.], batch size: 15, lr: 2.09e-04 +2022-05-07 00:04:52,115 INFO [train.py:715] (7/8) Epoch 10, batch 33650, loss[loss=0.1386, simple_loss=0.2129, pruned_loss=0.03215, over 4705.00 frames.], tot_loss[loss=0.1388, simple_loss=0.212, pruned_loss=0.03281, over 973126.36 frames.], batch size: 15, lr: 2.09e-04 +2022-05-07 00:05:30,844 INFO [train.py:715] (7/8) Epoch 10, batch 33700, loss[loss=0.1391, simple_loss=0.2168, pruned_loss=0.03072, over 4922.00 frames.], tot_loss[loss=0.139, simple_loss=0.2122, pruned_loss=0.03292, over 973426.79 frames.], batch size: 21, lr: 2.09e-04 +2022-05-07 00:06:10,498 INFO [train.py:715] (7/8) Epoch 10, batch 33750, loss[loss=0.1344, simple_loss=0.2093, pruned_loss=0.02973, over 4947.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2113, pruned_loss=0.03262, over 973824.27 frames.], batch size: 21, lr: 2.09e-04 +2022-05-07 00:06:50,170 INFO [train.py:715] (7/8) Epoch 10, batch 33800, loss[loss=0.1792, simple_loss=0.2506, pruned_loss=0.05394, over 4903.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.0329, over 973923.16 frames.], batch size: 19, lr: 2.09e-04 +2022-05-07 00:07:29,176 INFO [train.py:715] (7/8) Epoch 10, batch 33850, loss[loss=0.1724, simple_loss=0.2543, pruned_loss=0.04527, over 4742.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2134, pruned_loss=0.03341, over 973023.31 frames.], batch size: 16, lr: 2.09e-04 +2022-05-07 00:08:08,842 INFO [train.py:715] (7/8) Epoch 10, batch 33900, loss[loss=0.1264, simple_loss=0.2021, pruned_loss=0.02536, over 4976.00 frames.], tot_loss[loss=0.1408, simple_loss=0.214, pruned_loss=0.03381, over 972711.10 frames.], batch size: 28, lr: 2.09e-04 +2022-05-07 00:08:48,749 INFO [train.py:715] (7/8) Epoch 10, batch 33950, loss[loss=0.1365, simple_loss=0.2157, pruned_loss=0.02867, over 4988.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2133, pruned_loss=0.0335, over 972421.03 frames.], batch size: 28, lr: 2.09e-04 +2022-05-07 00:09:27,307 INFO [train.py:715] (7/8) Epoch 10, batch 34000, loss[loss=0.1413, simple_loss=0.2199, pruned_loss=0.03133, over 4943.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2135, pruned_loss=0.03384, over 973375.24 frames.], batch size: 24, lr: 2.09e-04 +2022-05-07 00:10:06,615 INFO [train.py:715] (7/8) Epoch 10, batch 34050, loss[loss=0.1548, simple_loss=0.2275, pruned_loss=0.041, over 4840.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2131, pruned_loss=0.03365, over 972371.59 frames.], batch size: 30, lr: 2.09e-04 +2022-05-07 00:10:45,880 INFO [train.py:715] (7/8) Epoch 10, batch 34100, loss[loss=0.1347, simple_loss=0.2032, pruned_loss=0.0331, over 4758.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03318, over 973003.45 frames.], batch size: 16, lr: 2.09e-04 +2022-05-07 00:11:25,362 INFO [train.py:715] (7/8) Epoch 10, batch 34150, loss[loss=0.1486, simple_loss=0.2243, pruned_loss=0.03649, over 4967.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2118, pruned_loss=0.03305, over 972424.11 frames.], batch size: 25, lr: 2.09e-04 +2022-05-07 00:12:04,923 INFO [train.py:715] (7/8) Epoch 10, batch 34200, loss[loss=0.1451, simple_loss=0.2223, pruned_loss=0.03394, over 4810.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.03267, over 973391.01 frames.], batch size: 13, lr: 2.09e-04 +2022-05-07 00:12:44,143 INFO [train.py:715] (7/8) Epoch 10, batch 34250, loss[loss=0.1329, simple_loss=0.2054, pruned_loss=0.0302, over 4968.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03297, over 972981.71 frames.], batch size: 14, lr: 2.09e-04 +2022-05-07 00:13:23,648 INFO [train.py:715] (7/8) Epoch 10, batch 34300, loss[loss=0.1384, simple_loss=0.2088, pruned_loss=0.03399, over 4656.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03307, over 973109.94 frames.], batch size: 13, lr: 2.09e-04 +2022-05-07 00:14:03,553 INFO [train.py:715] (7/8) Epoch 10, batch 34350, loss[loss=0.1415, simple_loss=0.2021, pruned_loss=0.04038, over 4911.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03323, over 971927.91 frames.], batch size: 18, lr: 2.09e-04 +2022-05-07 00:14:43,436 INFO [train.py:715] (7/8) Epoch 10, batch 34400, loss[loss=0.1528, simple_loss=0.2258, pruned_loss=0.03995, over 4808.00 frames.], tot_loss[loss=0.1386, simple_loss=0.212, pruned_loss=0.03263, over 971860.87 frames.], batch size: 21, lr: 2.09e-04 +2022-05-07 00:15:23,584 INFO [train.py:715] (7/8) Epoch 10, batch 34450, loss[loss=0.1512, simple_loss=0.2209, pruned_loss=0.04078, over 4846.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2119, pruned_loss=0.03247, over 971956.87 frames.], batch size: 32, lr: 2.09e-04 +2022-05-07 00:16:03,653 INFO [train.py:715] (7/8) Epoch 10, batch 34500, loss[loss=0.1536, simple_loss=0.223, pruned_loss=0.04207, over 4939.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2125, pruned_loss=0.03278, over 972019.36 frames.], batch size: 23, lr: 2.09e-04 +2022-05-07 00:16:42,852 INFO [train.py:715] (7/8) Epoch 10, batch 34550, loss[loss=0.1295, simple_loss=0.1982, pruned_loss=0.03041, over 4877.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2127, pruned_loss=0.033, over 971688.54 frames.], batch size: 16, lr: 2.09e-04 +2022-05-07 00:17:23,154 INFO [train.py:715] (7/8) Epoch 10, batch 34600, loss[loss=0.1654, simple_loss=0.2229, pruned_loss=0.05394, over 4976.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2132, pruned_loss=0.0333, over 972307.85 frames.], batch size: 25, lr: 2.09e-04 +2022-05-07 00:18:03,613 INFO [train.py:715] (7/8) Epoch 10, batch 34650, loss[loss=0.1398, simple_loss=0.2061, pruned_loss=0.03677, over 4904.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2136, pruned_loss=0.03358, over 972393.92 frames.], batch size: 19, lr: 2.09e-04 +2022-05-07 00:18:42,651 INFO [train.py:715] (7/8) Epoch 10, batch 34700, loss[loss=0.1264, simple_loss=0.1998, pruned_loss=0.02647, over 4712.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2136, pruned_loss=0.03352, over 972324.51 frames.], batch size: 15, lr: 2.09e-04 +2022-05-07 00:19:21,233 INFO [train.py:715] (7/8) Epoch 10, batch 34750, loss[loss=0.1397, simple_loss=0.2162, pruned_loss=0.03163, over 4787.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2132, pruned_loss=0.03355, over 972406.32 frames.], batch size: 17, lr: 2.09e-04 +2022-05-07 00:19:57,688 INFO [train.py:715] (7/8) Epoch 10, batch 34800, loss[loss=0.1295, simple_loss=0.2014, pruned_loss=0.02882, over 4734.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03297, over 972349.35 frames.], batch size: 12, lr: 2.09e-04 +2022-05-07 00:20:47,595 INFO [train.py:715] (7/8) Epoch 11, batch 0, loss[loss=0.1455, simple_loss=0.2138, pruned_loss=0.03858, over 4877.00 frames.], tot_loss[loss=0.1455, simple_loss=0.2138, pruned_loss=0.03858, over 4877.00 frames.], batch size: 20, lr: 2.00e-04 +2022-05-07 00:21:26,501 INFO [train.py:715] (7/8) Epoch 11, batch 50, loss[loss=0.1566, simple_loss=0.2323, pruned_loss=0.04044, over 4975.00 frames.], tot_loss[loss=0.1458, simple_loss=0.2163, pruned_loss=0.03765, over 219681.66 frames.], batch size: 15, lr: 2.00e-04 +2022-05-07 00:22:06,400 INFO [train.py:715] (7/8) Epoch 11, batch 100, loss[loss=0.1242, simple_loss=0.1978, pruned_loss=0.0253, over 4976.00 frames.], tot_loss[loss=0.1427, simple_loss=0.2144, pruned_loss=0.03548, over 387466.06 frames.], batch size: 15, lr: 2.00e-04 +2022-05-07 00:22:46,277 INFO [train.py:715] (7/8) Epoch 11, batch 150, loss[loss=0.1428, simple_loss=0.2081, pruned_loss=0.03878, over 4859.00 frames.], tot_loss[loss=0.1416, simple_loss=0.2139, pruned_loss=0.03464, over 517974.20 frames.], batch size: 20, lr: 2.00e-04 +2022-05-07 00:23:26,828 INFO [train.py:715] (7/8) Epoch 11, batch 200, loss[loss=0.1544, simple_loss=0.2274, pruned_loss=0.04068, over 4851.00 frames.], tot_loss[loss=0.1411, simple_loss=0.2133, pruned_loss=0.03443, over 619198.25 frames.], batch size: 20, lr: 2.00e-04 +2022-05-07 00:24:06,701 INFO [train.py:715] (7/8) Epoch 11, batch 250, loss[loss=0.134, simple_loss=0.2105, pruned_loss=0.02875, over 4750.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2128, pruned_loss=0.03384, over 696749.12 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:24:45,520 INFO [train.py:715] (7/8) Epoch 11, batch 300, loss[loss=0.1494, simple_loss=0.2245, pruned_loss=0.03715, over 4893.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03338, over 757410.13 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:25:26,105 INFO [train.py:715] (7/8) Epoch 11, batch 350, loss[loss=0.1252, simple_loss=0.2007, pruned_loss=0.02481, over 4990.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.0334, over 805179.25 frames.], batch size: 20, lr: 2.00e-04 +2022-05-07 00:26:05,778 INFO [train.py:715] (7/8) Epoch 11, batch 400, loss[loss=0.1334, simple_loss=0.1977, pruned_loss=0.03456, over 4959.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2125, pruned_loss=0.03301, over 842385.53 frames.], batch size: 24, lr: 2.00e-04 +2022-05-07 00:26:46,461 INFO [train.py:715] (7/8) Epoch 11, batch 450, loss[loss=0.1211, simple_loss=0.1953, pruned_loss=0.02341, over 4864.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2126, pruned_loss=0.03319, over 870960.82 frames.], batch size: 32, lr: 2.00e-04 +2022-05-07 00:27:27,788 INFO [train.py:715] (7/8) Epoch 11, batch 500, loss[loss=0.1248, simple_loss=0.1983, pruned_loss=0.0256, over 4789.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2135, pruned_loss=0.03397, over 893310.12 frames.], batch size: 14, lr: 2.00e-04 +2022-05-07 00:28:09,386 INFO [train.py:715] (7/8) Epoch 11, batch 550, loss[loss=0.1582, simple_loss=0.2346, pruned_loss=0.04087, over 4960.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2136, pruned_loss=0.03385, over 911155.72 frames.], batch size: 24, lr: 2.00e-04 +2022-05-07 00:28:50,708 INFO [train.py:715] (7/8) Epoch 11, batch 600, loss[loss=0.1715, simple_loss=0.2393, pruned_loss=0.05189, over 4929.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2137, pruned_loss=0.0338, over 925400.77 frames.], batch size: 39, lr: 2.00e-04 +2022-05-07 00:29:32,041 INFO [train.py:715] (7/8) Epoch 11, batch 650, loss[loss=0.1424, simple_loss=0.2191, pruned_loss=0.03292, over 4757.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2126, pruned_loss=0.03321, over 935894.29 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 00:30:13,303 INFO [train.py:715] (7/8) Epoch 11, batch 700, loss[loss=0.1378, simple_loss=0.2039, pruned_loss=0.03589, over 4761.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03311, over 944380.40 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 00:30:54,880 INFO [train.py:715] (7/8) Epoch 11, batch 750, loss[loss=0.1878, simple_loss=0.2446, pruned_loss=0.06548, over 4820.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2121, pruned_loss=0.03347, over 950422.61 frames.], batch size: 25, lr: 2.00e-04 +2022-05-07 00:31:36,034 INFO [train.py:715] (7/8) Epoch 11, batch 800, loss[loss=0.1509, simple_loss=0.2275, pruned_loss=0.03711, over 4772.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2118, pruned_loss=0.0333, over 955521.75 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:32:16,760 INFO [train.py:715] (7/8) Epoch 11, batch 850, loss[loss=0.1418, simple_loss=0.2214, pruned_loss=0.03112, over 4795.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2129, pruned_loss=0.03365, over 959652.08 frames.], batch size: 17, lr: 2.00e-04 +2022-05-07 00:32:58,361 INFO [train.py:715] (7/8) Epoch 11, batch 900, loss[loss=0.1478, simple_loss=0.2183, pruned_loss=0.03865, over 4898.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2123, pruned_loss=0.03372, over 962210.66 frames.], batch size: 17, lr: 2.00e-04 +2022-05-07 00:33:38,987 INFO [train.py:715] (7/8) Epoch 11, batch 950, loss[loss=0.1181, simple_loss=0.1923, pruned_loss=0.02194, over 4894.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2125, pruned_loss=0.03389, over 965090.70 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:34:19,481 INFO [train.py:715] (7/8) Epoch 11, batch 1000, loss[loss=0.1439, simple_loss=0.222, pruned_loss=0.03287, over 4976.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03338, over 966781.92 frames.], batch size: 35, lr: 2.00e-04 +2022-05-07 00:34:58,896 INFO [train.py:715] (7/8) Epoch 11, batch 1050, loss[loss=0.1278, simple_loss=0.2039, pruned_loss=0.02581, over 4891.00 frames.], tot_loss[loss=0.1403, simple_loss=0.213, pruned_loss=0.0338, over 968295.60 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:35:41,048 INFO [train.py:715] (7/8) Epoch 11, batch 1100, loss[loss=0.1378, simple_loss=0.2053, pruned_loss=0.03509, over 4974.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2128, pruned_loss=0.03328, over 969849.39 frames.], batch size: 28, lr: 2.00e-04 +2022-05-07 00:36:20,722 INFO [train.py:715] (7/8) Epoch 11, batch 1150, loss[loss=0.1519, simple_loss=0.2196, pruned_loss=0.04209, over 4864.00 frames.], tot_loss[loss=0.1403, simple_loss=0.2131, pruned_loss=0.03377, over 970871.40 frames.], batch size: 32, lr: 2.00e-04 +2022-05-07 00:37:00,330 INFO [train.py:715] (7/8) Epoch 11, batch 1200, loss[loss=0.1206, simple_loss=0.1943, pruned_loss=0.02345, over 4859.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2135, pruned_loss=0.0338, over 971309.46 frames.], batch size: 20, lr: 2.00e-04 +2022-05-07 00:37:39,162 INFO [train.py:715] (7/8) Epoch 11, batch 1250, loss[loss=0.1434, simple_loss=0.2066, pruned_loss=0.04012, over 4798.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2139, pruned_loss=0.03386, over 971555.79 frames.], batch size: 21, lr: 2.00e-04 +2022-05-07 00:38:18,013 INFO [train.py:715] (7/8) Epoch 11, batch 1300, loss[loss=0.1278, simple_loss=0.1877, pruned_loss=0.03394, over 4644.00 frames.], tot_loss[loss=0.14, simple_loss=0.213, pruned_loss=0.03351, over 971763.29 frames.], batch size: 13, lr: 2.00e-04 +2022-05-07 00:38:56,863 INFO [train.py:715] (7/8) Epoch 11, batch 1350, loss[loss=0.1104, simple_loss=0.1875, pruned_loss=0.01667, over 4913.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2118, pruned_loss=0.03337, over 971812.49 frames.], batch size: 23, lr: 2.00e-04 +2022-05-07 00:39:35,884 INFO [train.py:715] (7/8) Epoch 11, batch 1400, loss[loss=0.1238, simple_loss=0.2054, pruned_loss=0.02103, over 4788.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2121, pruned_loss=0.03339, over 972489.89 frames.], batch size: 17, lr: 2.00e-04 +2022-05-07 00:40:14,716 INFO [train.py:715] (7/8) Epoch 11, batch 1450, loss[loss=0.1259, simple_loss=0.1932, pruned_loss=0.02933, over 4791.00 frames.], tot_loss[loss=0.14, simple_loss=0.2123, pruned_loss=0.03383, over 973200.41 frames.], batch size: 12, lr: 2.00e-04 +2022-05-07 00:40:53,350 INFO [train.py:715] (7/8) Epoch 11, batch 1500, loss[loss=0.1154, simple_loss=0.1866, pruned_loss=0.02206, over 4906.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03333, over 972985.50 frames.], batch size: 18, lr: 2.00e-04 +2022-05-07 00:41:31,715 INFO [train.py:715] (7/8) Epoch 11, batch 1550, loss[loss=0.1452, simple_loss=0.2146, pruned_loss=0.0379, over 4859.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2121, pruned_loss=0.03353, over 972685.51 frames.], batch size: 30, lr: 2.00e-04 +2022-05-07 00:42:10,773 INFO [train.py:715] (7/8) Epoch 11, batch 1600, loss[loss=0.1289, simple_loss=0.2051, pruned_loss=0.02636, over 4828.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2117, pruned_loss=0.03327, over 973171.99 frames.], batch size: 26, lr: 2.00e-04 +2022-05-07 00:42:49,743 INFO [train.py:715] (7/8) Epoch 11, batch 1650, loss[loss=0.1421, simple_loss=0.216, pruned_loss=0.03408, over 4969.00 frames.], tot_loss[loss=0.1393, simple_loss=0.212, pruned_loss=0.03327, over 973509.32 frames.], batch size: 24, lr: 2.00e-04 +2022-05-07 00:43:28,109 INFO [train.py:715] (7/8) Epoch 11, batch 1700, loss[loss=0.1422, simple_loss=0.216, pruned_loss=0.03418, over 4873.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2122, pruned_loss=0.03363, over 972689.57 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:44:07,383 INFO [train.py:715] (7/8) Epoch 11, batch 1750, loss[loss=0.1271, simple_loss=0.2099, pruned_loss=0.02216, over 4937.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03326, over 972011.35 frames.], batch size: 23, lr: 2.00e-04 +2022-05-07 00:44:46,269 INFO [train.py:715] (7/8) Epoch 11, batch 1800, loss[loss=0.119, simple_loss=0.206, pruned_loss=0.01601, over 4947.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03267, over 972270.96 frames.], batch size: 24, lr: 2.00e-04 +2022-05-07 00:45:25,307 INFO [train.py:715] (7/8) Epoch 11, batch 1850, loss[loss=0.152, simple_loss=0.2305, pruned_loss=0.0368, over 4814.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.03268, over 972711.07 frames.], batch size: 27, lr: 2.00e-04 +2022-05-07 00:46:04,488 INFO [train.py:715] (7/8) Epoch 11, batch 1900, loss[loss=0.1197, simple_loss=0.1994, pruned_loss=0.01997, over 4946.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.0324, over 972902.25 frames.], batch size: 23, lr: 2.00e-04 +2022-05-07 00:46:43,766 INFO [train.py:715] (7/8) Epoch 11, batch 1950, loss[loss=0.1245, simple_loss=0.1973, pruned_loss=0.02587, over 4796.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03183, over 972586.13 frames.], batch size: 14, lr: 2.00e-04 +2022-05-07 00:47:23,301 INFO [train.py:715] (7/8) Epoch 11, batch 2000, loss[loss=0.1269, simple_loss=0.2085, pruned_loss=0.02266, over 4934.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2113, pruned_loss=0.0319, over 971674.66 frames.], batch size: 29, lr: 2.00e-04 +2022-05-07 00:48:01,930 INFO [train.py:715] (7/8) Epoch 11, batch 2050, loss[loss=0.1596, simple_loss=0.2252, pruned_loss=0.04699, over 4775.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2097, pruned_loss=0.03152, over 971194.68 frames.], batch size: 17, lr: 2.00e-04 +2022-05-07 00:48:41,076 INFO [train.py:715] (7/8) Epoch 11, batch 2100, loss[loss=0.1174, simple_loss=0.1913, pruned_loss=0.02179, over 4900.00 frames.], tot_loss[loss=0.1367, simple_loss=0.21, pruned_loss=0.03168, over 971846.14 frames.], batch size: 22, lr: 2.00e-04 +2022-05-07 00:49:20,364 INFO [train.py:715] (7/8) Epoch 11, batch 2150, loss[loss=0.1225, simple_loss=0.2041, pruned_loss=0.02045, over 4793.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03186, over 971886.35 frames.], batch size: 24, lr: 2.00e-04 +2022-05-07 00:49:59,564 INFO [train.py:715] (7/8) Epoch 11, batch 2200, loss[loss=0.1377, simple_loss=0.2046, pruned_loss=0.03541, over 4794.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.0317, over 971892.53 frames.], batch size: 18, lr: 2.00e-04 +2022-05-07 00:50:38,224 INFO [train.py:715] (7/8) Epoch 11, batch 2250, loss[loss=0.1379, simple_loss=0.2076, pruned_loss=0.03414, over 4848.00 frames.], tot_loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03218, over 972844.61 frames.], batch size: 32, lr: 2.00e-04 +2022-05-07 00:51:17,281 INFO [train.py:715] (7/8) Epoch 11, batch 2300, loss[loss=0.1332, simple_loss=0.2055, pruned_loss=0.03043, over 4777.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2124, pruned_loss=0.03264, over 972511.63 frames.], batch size: 14, lr: 2.00e-04 +2022-05-07 00:51:56,682 INFO [train.py:715] (7/8) Epoch 11, batch 2350, loss[loss=0.1454, simple_loss=0.2154, pruned_loss=0.03773, over 4932.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2112, pruned_loss=0.0323, over 973323.46 frames.], batch size: 23, lr: 2.00e-04 +2022-05-07 00:52:35,086 INFO [train.py:715] (7/8) Epoch 11, batch 2400, loss[loss=0.1525, simple_loss=0.2225, pruned_loss=0.04127, over 4935.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03239, over 973020.08 frames.], batch size: 21, lr: 2.00e-04 +2022-05-07 00:53:14,459 INFO [train.py:715] (7/8) Epoch 11, batch 2450, loss[loss=0.1324, simple_loss=0.2088, pruned_loss=0.02801, over 4954.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2114, pruned_loss=0.03289, over 973078.12 frames.], batch size: 29, lr: 2.00e-04 +2022-05-07 00:53:54,032 INFO [train.py:715] (7/8) Epoch 11, batch 2500, loss[loss=0.1138, simple_loss=0.1889, pruned_loss=0.01933, over 4916.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2107, pruned_loss=0.03229, over 971959.40 frames.], batch size: 19, lr: 2.00e-04 +2022-05-07 00:54:33,184 INFO [train.py:715] (7/8) Epoch 11, batch 2550, loss[loss=0.1425, simple_loss=0.2164, pruned_loss=0.03433, over 4766.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2098, pruned_loss=0.03195, over 971744.95 frames.], batch size: 14, lr: 2.00e-04 +2022-05-07 00:55:12,422 INFO [train.py:715] (7/8) Epoch 11, batch 2600, loss[loss=0.1479, simple_loss=0.2229, pruned_loss=0.03641, over 4898.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2104, pruned_loss=0.03218, over 971853.82 frames.], batch size: 16, lr: 2.00e-04 +2022-05-07 00:55:51,263 INFO [train.py:715] (7/8) Epoch 11, batch 2650, loss[loss=0.1335, simple_loss=0.2097, pruned_loss=0.02865, over 4863.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03224, over 972303.40 frames.], batch size: 20, lr: 2.00e-04 +2022-05-07 00:56:30,349 INFO [train.py:715] (7/8) Epoch 11, batch 2700, loss[loss=0.1236, simple_loss=0.1987, pruned_loss=0.02426, over 4944.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2112, pruned_loss=0.03228, over 973320.94 frames.], batch size: 21, lr: 2.00e-04 +2022-05-07 00:57:09,094 INFO [train.py:715] (7/8) Epoch 11, batch 2750, loss[loss=0.1541, simple_loss=0.2312, pruned_loss=0.03845, over 4886.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03275, over 973357.80 frames.], batch size: 39, lr: 2.00e-04 +2022-05-07 00:57:48,079 INFO [train.py:715] (7/8) Epoch 11, batch 2800, loss[loss=0.1797, simple_loss=0.2438, pruned_loss=0.05777, over 4894.00 frames.], tot_loss[loss=0.139, simple_loss=0.2115, pruned_loss=0.03323, over 973417.63 frames.], batch size: 17, lr: 2.00e-04 +2022-05-07 00:58:27,250 INFO [train.py:715] (7/8) Epoch 11, batch 2850, loss[loss=0.1272, simple_loss=0.2097, pruned_loss=0.02232, over 4971.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03276, over 973279.61 frames.], batch size: 24, lr: 2.00e-04 +2022-05-07 00:59:05,710 INFO [train.py:715] (7/8) Epoch 11, batch 2900, loss[loss=0.1291, simple_loss=0.2044, pruned_loss=0.02686, over 4786.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03295, over 972246.63 frames.], batch size: 14, lr: 2.00e-04 +2022-05-07 00:59:45,166 INFO [train.py:715] (7/8) Epoch 11, batch 2950, loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03101, over 4927.00 frames.], tot_loss[loss=0.139, simple_loss=0.2116, pruned_loss=0.0332, over 972238.34 frames.], batch size: 23, lr: 2.00e-04 +2022-05-07 01:00:25,029 INFO [train.py:715] (7/8) Epoch 11, batch 3000, loss[loss=0.1287, simple_loss=0.2106, pruned_loss=0.02335, over 4953.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2126, pruned_loss=0.03383, over 972145.90 frames.], batch size: 29, lr: 2.00e-04 +2022-05-07 01:00:25,030 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 01:00:34,771 INFO [train.py:742] (7/8) Epoch 11, validation: loss=0.1061, simple_loss=0.1902, pruned_loss=0.01097, over 914524.00 frames. +2022-05-07 01:01:14,750 INFO [train.py:715] (7/8) Epoch 11, batch 3050, loss[loss=0.1395, simple_loss=0.2074, pruned_loss=0.03581, over 4788.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2125, pruned_loss=0.0335, over 972432.06 frames.], batch size: 14, lr: 2.00e-04 +2022-05-07 01:01:54,018 INFO [train.py:715] (7/8) Epoch 11, batch 3100, loss[loss=0.1556, simple_loss=0.2258, pruned_loss=0.04272, over 4782.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2121, pruned_loss=0.03355, over 971528.52 frames.], batch size: 17, lr: 2.00e-04 +2022-05-07 01:02:34,091 INFO [train.py:715] (7/8) Epoch 11, batch 3150, loss[loss=0.1429, simple_loss=0.2143, pruned_loss=0.03574, over 4695.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2119, pruned_loss=0.03348, over 971184.17 frames.], batch size: 15, lr: 2.00e-04 +2022-05-07 01:03:13,127 INFO [train.py:715] (7/8) Epoch 11, batch 3200, loss[loss=0.2002, simple_loss=0.2778, pruned_loss=0.06131, over 4983.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2128, pruned_loss=0.03356, over 971651.56 frames.], batch size: 31, lr: 2.00e-04 +2022-05-07 01:03:52,802 INFO [train.py:715] (7/8) Epoch 11, batch 3250, loss[loss=0.1723, simple_loss=0.2475, pruned_loss=0.04853, over 4964.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03309, over 972818.88 frames.], batch size: 14, lr: 2.00e-04 +2022-05-07 01:04:31,532 INFO [train.py:715] (7/8) Epoch 11, batch 3300, loss[loss=0.1565, simple_loss=0.2248, pruned_loss=0.04406, over 4895.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03297, over 972663.11 frames.], batch size: 39, lr: 2.00e-04 +2022-05-07 01:05:10,791 INFO [train.py:715] (7/8) Epoch 11, batch 3350, loss[loss=0.119, simple_loss=0.1989, pruned_loss=0.01952, over 4949.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2117, pruned_loss=0.03286, over 972571.17 frames.], batch size: 21, lr: 2.00e-04 +2022-05-07 01:05:50,445 INFO [train.py:715] (7/8) Epoch 11, batch 3400, loss[loss=0.1086, simple_loss=0.1818, pruned_loss=0.01769, over 4974.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03264, over 972156.57 frames.], batch size: 24, lr: 2.00e-04 +2022-05-07 01:06:29,436 INFO [train.py:715] (7/8) Epoch 11, batch 3450, loss[loss=0.1396, simple_loss=0.2141, pruned_loss=0.03255, over 4949.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.0327, over 972481.59 frames.], batch size: 21, lr: 2.00e-04 +2022-05-07 01:07:08,299 INFO [train.py:715] (7/8) Epoch 11, batch 3500, loss[loss=0.1628, simple_loss=0.2346, pruned_loss=0.04552, over 4700.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03295, over 972606.45 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:07:47,579 INFO [train.py:715] (7/8) Epoch 11, batch 3550, loss[loss=0.1386, simple_loss=0.2163, pruned_loss=0.03049, over 4798.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03291, over 972437.99 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:08:27,195 INFO [train.py:715] (7/8) Epoch 11, batch 3600, loss[loss=0.1166, simple_loss=0.2009, pruned_loss=0.01622, over 4943.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2115, pruned_loss=0.03259, over 971935.99 frames.], batch size: 29, lr: 1.99e-04 +2022-05-07 01:09:05,515 INFO [train.py:715] (7/8) Epoch 11, batch 3650, loss[loss=0.1362, simple_loss=0.2169, pruned_loss=0.02771, over 4689.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2107, pruned_loss=0.03253, over 971773.65 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:09:45,169 INFO [train.py:715] (7/8) Epoch 11, batch 3700, loss[loss=0.1208, simple_loss=0.1962, pruned_loss=0.02267, over 4976.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03266, over 972096.35 frames.], batch size: 29, lr: 1.99e-04 +2022-05-07 01:10:24,605 INFO [train.py:715] (7/8) Epoch 11, batch 3750, loss[loss=0.1263, simple_loss=0.2051, pruned_loss=0.02374, over 4803.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03199, over 973268.39 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:11:03,051 INFO [train.py:715] (7/8) Epoch 11, batch 3800, loss[loss=0.1275, simple_loss=0.2089, pruned_loss=0.02309, over 4904.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2103, pruned_loss=0.03206, over 973034.14 frames.], batch size: 17, lr: 1.99e-04 +2022-05-07 01:11:42,116 INFO [train.py:715] (7/8) Epoch 11, batch 3850, loss[loss=0.1524, simple_loss=0.2235, pruned_loss=0.04068, over 4868.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2107, pruned_loss=0.03239, over 972854.81 frames.], batch size: 32, lr: 1.99e-04 +2022-05-07 01:12:21,425 INFO [train.py:715] (7/8) Epoch 11, batch 3900, loss[loss=0.1358, simple_loss=0.219, pruned_loss=0.02634, over 4913.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2112, pruned_loss=0.03229, over 972572.44 frames.], batch size: 17, lr: 1.99e-04 +2022-05-07 01:13:01,148 INFO [train.py:715] (7/8) Epoch 11, batch 3950, loss[loss=0.1372, simple_loss=0.2077, pruned_loss=0.03336, over 4791.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2106, pruned_loss=0.03223, over 972153.99 frames.], batch size: 18, lr: 1.99e-04 +2022-05-07 01:13:39,997 INFO [train.py:715] (7/8) Epoch 11, batch 4000, loss[loss=0.1274, simple_loss=0.206, pruned_loss=0.02443, over 4947.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03234, over 972646.93 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:14:19,838 INFO [train.py:715] (7/8) Epoch 11, batch 4050, loss[loss=0.1328, simple_loss=0.2101, pruned_loss=0.02781, over 4759.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03197, over 972102.01 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:14:59,480 INFO [train.py:715] (7/8) Epoch 11, batch 4100, loss[loss=0.1669, simple_loss=0.2334, pruned_loss=0.05019, over 4973.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03291, over 972072.71 frames.], batch size: 35, lr: 1.99e-04 +2022-05-07 01:15:38,033 INFO [train.py:715] (7/8) Epoch 11, batch 4150, loss[loss=0.1314, simple_loss=0.2005, pruned_loss=0.03114, over 4883.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2119, pruned_loss=0.03318, over 972298.41 frames.], batch size: 22, lr: 1.99e-04 +2022-05-07 01:16:16,418 INFO [train.py:715] (7/8) Epoch 11, batch 4200, loss[loss=0.1349, simple_loss=0.21, pruned_loss=0.02993, over 4882.00 frames.], tot_loss[loss=0.139, simple_loss=0.2118, pruned_loss=0.03312, over 972037.26 frames.], batch size: 22, lr: 1.99e-04 +2022-05-07 01:16:56,663 INFO [train.py:715] (7/8) Epoch 11, batch 4250, loss[loss=0.1781, simple_loss=0.2558, pruned_loss=0.05023, over 4723.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2119, pruned_loss=0.03339, over 971833.32 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:17:36,660 INFO [train.py:715] (7/8) Epoch 11, batch 4300, loss[loss=0.1292, simple_loss=0.2068, pruned_loss=0.02581, over 4789.00 frames.], tot_loss[loss=0.1406, simple_loss=0.213, pruned_loss=0.03408, over 972111.38 frames.], batch size: 18, lr: 1.99e-04 +2022-05-07 01:18:15,824 INFO [train.py:715] (7/8) Epoch 11, batch 4350, loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03053, over 4955.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2119, pruned_loss=0.03314, over 972300.40 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:18:56,181 INFO [train.py:715] (7/8) Epoch 11, batch 4400, loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.03176, over 4632.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03317, over 972189.22 frames.], batch size: 13, lr: 1.99e-04 +2022-05-07 01:19:36,297 INFO [train.py:715] (7/8) Epoch 11, batch 4450, loss[loss=0.1369, simple_loss=0.2233, pruned_loss=0.02523, over 4805.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03296, over 973394.81 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:20:15,925 INFO [train.py:715] (7/8) Epoch 11, batch 4500, loss[loss=0.1315, simple_loss=0.2113, pruned_loss=0.02589, over 4984.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03267, over 973250.50 frames.], batch size: 25, lr: 1.99e-04 +2022-05-07 01:20:55,944 INFO [train.py:715] (7/8) Epoch 11, batch 4550, loss[loss=0.1594, simple_loss=0.22, pruned_loss=0.04939, over 4838.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03263, over 973219.20 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:21:35,996 INFO [train.py:715] (7/8) Epoch 11, batch 4600, loss[loss=0.1431, simple_loss=0.2064, pruned_loss=0.03986, over 4863.00 frames.], tot_loss[loss=0.139, simple_loss=0.2123, pruned_loss=0.03283, over 973050.26 frames.], batch size: 30, lr: 1.99e-04 +2022-05-07 01:22:15,465 INFO [train.py:715] (7/8) Epoch 11, batch 4650, loss[loss=0.128, simple_loss=0.1944, pruned_loss=0.03078, over 4844.00 frames.], tot_loss[loss=0.1386, simple_loss=0.212, pruned_loss=0.03259, over 972997.19 frames.], batch size: 13, lr: 1.99e-04 +2022-05-07 01:22:55,182 INFO [train.py:715] (7/8) Epoch 11, batch 4700, loss[loss=0.1178, simple_loss=0.1981, pruned_loss=0.01877, over 4950.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03292, over 972654.18 frames.], batch size: 29, lr: 1.99e-04 +2022-05-07 01:23:35,352 INFO [train.py:715] (7/8) Epoch 11, batch 4750, loss[loss=0.1294, simple_loss=0.2019, pruned_loss=0.02844, over 4788.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2118, pruned_loss=0.03269, over 972900.77 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:24:15,522 INFO [train.py:715] (7/8) Epoch 11, batch 4800, loss[loss=0.1531, simple_loss=0.2308, pruned_loss=0.0377, over 4818.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03256, over 973769.88 frames.], batch size: 25, lr: 1.99e-04 +2022-05-07 01:24:55,134 INFO [train.py:715] (7/8) Epoch 11, batch 4850, loss[loss=0.1203, simple_loss=0.2042, pruned_loss=0.01822, over 4923.00 frames.], tot_loss[loss=0.1387, simple_loss=0.212, pruned_loss=0.03266, over 973631.75 frames.], batch size: 29, lr: 1.99e-04 +2022-05-07 01:25:34,923 INFO [train.py:715] (7/8) Epoch 11, batch 4900, loss[loss=0.1459, simple_loss=0.2109, pruned_loss=0.04041, over 4787.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03261, over 972812.57 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:26:14,641 INFO [train.py:715] (7/8) Epoch 11, batch 4950, loss[loss=0.1248, simple_loss=0.1923, pruned_loss=0.02865, over 4976.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03292, over 973697.08 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:26:53,443 INFO [train.py:715] (7/8) Epoch 11, batch 5000, loss[loss=0.1152, simple_loss=0.1867, pruned_loss=0.02188, over 4831.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03246, over 972838.01 frames.], batch size: 13, lr: 1.99e-04 +2022-05-07 01:27:31,884 INFO [train.py:715] (7/8) Epoch 11, batch 5050, loss[loss=0.1385, simple_loss=0.2131, pruned_loss=0.03196, over 4809.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03268, over 971719.63 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:28:11,145 INFO [train.py:715] (7/8) Epoch 11, batch 5100, loss[loss=0.164, simple_loss=0.2471, pruned_loss=0.04049, over 4839.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2114, pruned_loss=0.03279, over 972036.70 frames.], batch size: 30, lr: 1.99e-04 +2022-05-07 01:28:50,277 INFO [train.py:715] (7/8) Epoch 11, batch 5150, loss[loss=0.1421, simple_loss=0.2104, pruned_loss=0.03688, over 4944.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2126, pruned_loss=0.03347, over 973034.76 frames.], batch size: 23, lr: 1.99e-04 +2022-05-07 01:29:29,206 INFO [train.py:715] (7/8) Epoch 11, batch 5200, loss[loss=0.1738, simple_loss=0.2359, pruned_loss=0.05581, over 4841.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.03293, over 972752.52 frames.], batch size: 30, lr: 1.99e-04 +2022-05-07 01:30:08,610 INFO [train.py:715] (7/8) Epoch 11, batch 5250, loss[loss=0.1187, simple_loss=0.1931, pruned_loss=0.02215, over 4975.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03268, over 973620.93 frames.], batch size: 28, lr: 1.99e-04 +2022-05-07 01:30:48,291 INFO [train.py:715] (7/8) Epoch 11, batch 5300, loss[loss=0.167, simple_loss=0.2334, pruned_loss=0.05031, over 4833.00 frames.], tot_loss[loss=0.139, simple_loss=0.212, pruned_loss=0.03304, over 973473.66 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:31:27,444 INFO [train.py:715] (7/8) Epoch 11, batch 5350, loss[loss=0.1584, simple_loss=0.2209, pruned_loss=0.04798, over 4942.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03316, over 973591.85 frames.], batch size: 39, lr: 1.99e-04 +2022-05-07 01:32:06,512 INFO [train.py:715] (7/8) Epoch 11, batch 5400, loss[loss=0.1404, simple_loss=0.212, pruned_loss=0.03442, over 4935.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2115, pruned_loss=0.03256, over 974040.27 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:32:45,900 INFO [train.py:715] (7/8) Epoch 11, batch 5450, loss[loss=0.1223, simple_loss=0.2077, pruned_loss=0.01846, over 4942.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.0327, over 973322.74 frames.], batch size: 29, lr: 1.99e-04 +2022-05-07 01:33:25,400 INFO [train.py:715] (7/8) Epoch 11, batch 5500, loss[loss=0.1602, simple_loss=0.2328, pruned_loss=0.04386, over 4731.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.03292, over 973163.30 frames.], batch size: 16, lr: 1.99e-04 +2022-05-07 01:34:04,253 INFO [train.py:715] (7/8) Epoch 11, batch 5550, loss[loss=0.1661, simple_loss=0.2263, pruned_loss=0.05295, over 4886.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2113, pruned_loss=0.0327, over 972833.56 frames.], batch size: 39, lr: 1.99e-04 +2022-05-07 01:34:42,709 INFO [train.py:715] (7/8) Epoch 11, batch 5600, loss[loss=0.1178, simple_loss=0.1852, pruned_loss=0.02516, over 4892.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2115, pruned_loss=0.03302, over 973472.46 frames.], batch size: 17, lr: 1.99e-04 +2022-05-07 01:35:22,174 INFO [train.py:715] (7/8) Epoch 11, batch 5650, loss[loss=0.1689, simple_loss=0.2361, pruned_loss=0.05089, over 4990.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2112, pruned_loss=0.03291, over 973458.93 frames.], batch size: 28, lr: 1.99e-04 +2022-05-07 01:36:01,615 INFO [train.py:715] (7/8) Epoch 11, batch 5700, loss[loss=0.14, simple_loss=0.2144, pruned_loss=0.03284, over 4771.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03254, over 972796.29 frames.], batch size: 19, lr: 1.99e-04 +2022-05-07 01:36:40,403 INFO [train.py:715] (7/8) Epoch 11, batch 5750, loss[loss=0.1551, simple_loss=0.2189, pruned_loss=0.04571, over 4855.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2102, pruned_loss=0.03208, over 972053.59 frames.], batch size: 32, lr: 1.99e-04 +2022-05-07 01:37:19,379 INFO [train.py:715] (7/8) Epoch 11, batch 5800, loss[loss=0.1272, simple_loss=0.2141, pruned_loss=0.0201, over 4933.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03218, over 972170.49 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:37:58,489 INFO [train.py:715] (7/8) Epoch 11, batch 5850, loss[loss=0.1636, simple_loss=0.2286, pruned_loss=0.04933, over 4839.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2111, pruned_loss=0.03226, over 972721.06 frames.], batch size: 30, lr: 1.99e-04 +2022-05-07 01:38:37,496 INFO [train.py:715] (7/8) Epoch 11, batch 5900, loss[loss=0.1275, simple_loss=0.2032, pruned_loss=0.02593, over 4821.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2117, pruned_loss=0.03252, over 971540.41 frames.], batch size: 26, lr: 1.99e-04 +2022-05-07 01:39:16,657 INFO [train.py:715] (7/8) Epoch 11, batch 5950, loss[loss=0.1122, simple_loss=0.1815, pruned_loss=0.02147, over 4754.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03295, over 971106.56 frames.], batch size: 16, lr: 1.99e-04 +2022-05-07 01:39:56,448 INFO [train.py:715] (7/8) Epoch 11, batch 6000, loss[loss=0.1498, simple_loss=0.2239, pruned_loss=0.03781, over 4976.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03263, over 971474.60 frames.], batch size: 14, lr: 1.99e-04 +2022-05-07 01:39:56,449 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 01:40:06,015 INFO [train.py:742] (7/8) Epoch 11, validation: loss=0.1059, simple_loss=0.1901, pruned_loss=0.01082, over 914524.00 frames. +2022-05-07 01:40:45,576 INFO [train.py:715] (7/8) Epoch 11, batch 6050, loss[loss=0.1161, simple_loss=0.2024, pruned_loss=0.01486, over 4944.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.0321, over 971523.37 frames.], batch size: 23, lr: 1.99e-04 +2022-05-07 01:41:24,990 INFO [train.py:715] (7/8) Epoch 11, batch 6100, loss[loss=0.1467, simple_loss=0.2158, pruned_loss=0.03881, over 4866.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03245, over 971892.16 frames.], batch size: 16, lr: 1.99e-04 +2022-05-07 01:42:03,738 INFO [train.py:715] (7/8) Epoch 11, batch 6150, loss[loss=0.1676, simple_loss=0.2262, pruned_loss=0.05445, over 4763.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.03288, over 971650.09 frames.], batch size: 17, lr: 1.99e-04 +2022-05-07 01:42:43,200 INFO [train.py:715] (7/8) Epoch 11, batch 6200, loss[loss=0.1222, simple_loss=0.2081, pruned_loss=0.01817, over 4753.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2123, pruned_loss=0.03304, over 972152.22 frames.], batch size: 19, lr: 1.99e-04 +2022-05-07 01:43:22,230 INFO [train.py:715] (7/8) Epoch 11, batch 6250, loss[loss=0.1506, simple_loss=0.2181, pruned_loss=0.04156, over 4801.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2117, pruned_loss=0.0328, over 972611.22 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:44:01,021 INFO [train.py:715] (7/8) Epoch 11, batch 6300, loss[loss=0.192, simple_loss=0.2686, pruned_loss=0.0577, over 4767.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2118, pruned_loss=0.03269, over 972330.34 frames.], batch size: 16, lr: 1.99e-04 +2022-05-07 01:44:39,693 INFO [train.py:715] (7/8) Epoch 11, batch 6350, loss[loss=0.145, simple_loss=0.212, pruned_loss=0.03896, over 4909.00 frames.], tot_loss[loss=0.1396, simple_loss=0.213, pruned_loss=0.03314, over 972177.05 frames.], batch size: 19, lr: 1.99e-04 +2022-05-07 01:45:20,272 INFO [train.py:715] (7/8) Epoch 11, batch 6400, loss[loss=0.1732, simple_loss=0.2476, pruned_loss=0.04943, over 4880.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2134, pruned_loss=0.03319, over 972055.91 frames.], batch size: 39, lr: 1.99e-04 +2022-05-07 01:45:59,615 INFO [train.py:715] (7/8) Epoch 11, batch 6450, loss[loss=0.1494, simple_loss=0.2129, pruned_loss=0.04296, over 4694.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2125, pruned_loss=0.03322, over 972688.65 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:46:38,693 INFO [train.py:715] (7/8) Epoch 11, batch 6500, loss[loss=0.1554, simple_loss=0.2047, pruned_loss=0.05306, over 4745.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2126, pruned_loss=0.03325, over 972286.56 frames.], batch size: 16, lr: 1.99e-04 +2022-05-07 01:47:18,038 INFO [train.py:715] (7/8) Epoch 11, batch 6550, loss[loss=0.1281, simple_loss=0.2109, pruned_loss=0.02268, over 4898.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2121, pruned_loss=0.03273, over 972460.79 frames.], batch size: 22, lr: 1.99e-04 +2022-05-07 01:47:58,218 INFO [train.py:715] (7/8) Epoch 11, batch 6600, loss[loss=0.1384, simple_loss=0.2231, pruned_loss=0.02681, over 4920.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2118, pruned_loss=0.03261, over 973545.07 frames.], batch size: 23, lr: 1.99e-04 +2022-05-07 01:48:38,345 INFO [train.py:715] (7/8) Epoch 11, batch 6650, loss[loss=0.1256, simple_loss=0.2012, pruned_loss=0.02496, over 4723.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2121, pruned_loss=0.03272, over 973525.69 frames.], batch size: 16, lr: 1.99e-04 +2022-05-07 01:49:17,553 INFO [train.py:715] (7/8) Epoch 11, batch 6700, loss[loss=0.1166, simple_loss=0.1901, pruned_loss=0.0215, over 4982.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2127, pruned_loss=0.03279, over 973732.54 frames.], batch size: 28, lr: 1.99e-04 +2022-05-07 01:49:57,812 INFO [train.py:715] (7/8) Epoch 11, batch 6750, loss[loss=0.1361, simple_loss=0.1977, pruned_loss=0.03719, over 4838.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03275, over 972890.49 frames.], batch size: 20, lr: 1.99e-04 +2022-05-07 01:50:37,609 INFO [train.py:715] (7/8) Epoch 11, batch 6800, loss[loss=0.1254, simple_loss=0.1981, pruned_loss=0.02638, over 4768.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2117, pruned_loss=0.03278, over 973158.03 frames.], batch size: 18, lr: 1.99e-04 +2022-05-07 01:51:16,487 INFO [train.py:715] (7/8) Epoch 11, batch 6850, loss[loss=0.1537, simple_loss=0.2327, pruned_loss=0.03734, over 4917.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03292, over 972901.54 frames.], batch size: 18, lr: 1.99e-04 +2022-05-07 01:51:55,544 INFO [train.py:715] (7/8) Epoch 11, batch 6900, loss[loss=0.1474, simple_loss=0.2269, pruned_loss=0.03398, over 4928.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2117, pruned_loss=0.03286, over 974074.77 frames.], batch size: 39, lr: 1.99e-04 +2022-05-07 01:52:34,235 INFO [train.py:715] (7/8) Epoch 11, batch 6950, loss[loss=0.1251, simple_loss=0.2001, pruned_loss=0.02506, over 4869.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2111, pruned_loss=0.03232, over 973408.33 frames.], batch size: 22, lr: 1.99e-04 +2022-05-07 01:53:13,694 INFO [train.py:715] (7/8) Epoch 11, batch 7000, loss[loss=0.1321, simple_loss=0.2158, pruned_loss=0.02416, over 4789.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.03231, over 973045.68 frames.], batch size: 18, lr: 1.99e-04 +2022-05-07 01:53:52,255 INFO [train.py:715] (7/8) Epoch 11, batch 7050, loss[loss=0.1507, simple_loss=0.2272, pruned_loss=0.03712, over 4940.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2126, pruned_loss=0.03312, over 973042.44 frames.], batch size: 21, lr: 1.99e-04 +2022-05-07 01:54:31,698 INFO [train.py:715] (7/8) Epoch 11, batch 7100, loss[loss=0.1783, simple_loss=0.2658, pruned_loss=0.04543, over 4917.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2129, pruned_loss=0.03316, over 972542.67 frames.], batch size: 18, lr: 1.99e-04 +2022-05-07 01:55:10,749 INFO [train.py:715] (7/8) Epoch 11, batch 7150, loss[loss=0.1249, simple_loss=0.2024, pruned_loss=0.02373, over 4864.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2125, pruned_loss=0.0326, over 972969.08 frames.], batch size: 20, lr: 1.99e-04 +2022-05-07 01:55:49,511 INFO [train.py:715] (7/8) Epoch 11, batch 7200, loss[loss=0.1772, simple_loss=0.2599, pruned_loss=0.04722, over 4833.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2129, pruned_loss=0.03284, over 973315.48 frames.], batch size: 15, lr: 1.99e-04 +2022-05-07 01:56:28,452 INFO [train.py:715] (7/8) Epoch 11, batch 7250, loss[loss=0.1085, simple_loss=0.1792, pruned_loss=0.01886, over 4838.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2126, pruned_loss=0.03279, over 972819.00 frames.], batch size: 12, lr: 1.99e-04 +2022-05-07 01:57:07,430 INFO [train.py:715] (7/8) Epoch 11, batch 7300, loss[loss=0.1445, simple_loss=0.1952, pruned_loss=0.04694, over 4781.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2123, pruned_loss=0.03278, over 973156.43 frames.], batch size: 12, lr: 1.99e-04 +2022-05-07 01:57:46,541 INFO [train.py:715] (7/8) Epoch 11, batch 7350, loss[loss=0.1349, simple_loss=0.208, pruned_loss=0.03087, over 4916.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2128, pruned_loss=0.0331, over 973429.91 frames.], batch size: 18, lr: 1.99e-04 +2022-05-07 01:58:25,307 INFO [train.py:715] (7/8) Epoch 11, batch 7400, loss[loss=0.1739, simple_loss=0.2492, pruned_loss=0.04928, over 4809.00 frames.], tot_loss[loss=0.1395, simple_loss=0.213, pruned_loss=0.033, over 973163.16 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 01:59:04,701 INFO [train.py:715] (7/8) Epoch 11, batch 7450, loss[loss=0.1514, simple_loss=0.2192, pruned_loss=0.04177, over 4897.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2132, pruned_loss=0.03304, over 972353.32 frames.], batch size: 39, lr: 1.98e-04 +2022-05-07 01:59:43,843 INFO [train.py:715] (7/8) Epoch 11, batch 7500, loss[loss=0.1222, simple_loss=0.2028, pruned_loss=0.02077, over 4969.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2132, pruned_loss=0.03295, over 972695.29 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:00:23,094 INFO [train.py:715] (7/8) Epoch 11, batch 7550, loss[loss=0.1386, simple_loss=0.2131, pruned_loss=0.03207, over 4882.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2127, pruned_loss=0.03287, over 973553.36 frames.], batch size: 16, lr: 1.98e-04 +2022-05-07 02:01:02,845 INFO [train.py:715] (7/8) Epoch 11, batch 7600, loss[loss=0.1537, simple_loss=0.2317, pruned_loss=0.03787, over 4793.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.03287, over 973462.14 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:01:42,516 INFO [train.py:715] (7/8) Epoch 11, batch 7650, loss[loss=0.1456, simple_loss=0.2141, pruned_loss=0.03849, over 4844.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03284, over 973048.26 frames.], batch size: 13, lr: 1.98e-04 +2022-05-07 02:02:22,056 INFO [train.py:715] (7/8) Epoch 11, batch 7700, loss[loss=0.1443, simple_loss=0.2255, pruned_loss=0.03154, over 4978.00 frames.], tot_loss[loss=0.1383, simple_loss=0.211, pruned_loss=0.03275, over 973254.25 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:03:01,233 INFO [train.py:715] (7/8) Epoch 11, batch 7750, loss[loss=0.132, simple_loss=0.2002, pruned_loss=0.03184, over 4965.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.0326, over 973322.35 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:03:40,567 INFO [train.py:715] (7/8) Epoch 11, batch 7800, loss[loss=0.165, simple_loss=0.2437, pruned_loss=0.04313, over 4929.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2128, pruned_loss=0.03339, over 973250.05 frames.], batch size: 39, lr: 1.98e-04 +2022-05-07 02:04:19,855 INFO [train.py:715] (7/8) Epoch 11, batch 7850, loss[loss=0.1177, simple_loss=0.188, pruned_loss=0.02371, over 4877.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.03349, over 973554.23 frames.], batch size: 22, lr: 1.98e-04 +2022-05-07 02:04:58,994 INFO [train.py:715] (7/8) Epoch 11, batch 7900, loss[loss=0.1769, simple_loss=0.2422, pruned_loss=0.05581, over 4895.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2133, pruned_loss=0.03329, over 973739.21 frames.], batch size: 39, lr: 1.98e-04 +2022-05-07 02:05:37,734 INFO [train.py:715] (7/8) Epoch 11, batch 7950, loss[loss=0.1572, simple_loss=0.2329, pruned_loss=0.04078, over 4766.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2133, pruned_loss=0.03347, over 972757.07 frames.], batch size: 19, lr: 1.98e-04 +2022-05-07 02:06:18,362 INFO [train.py:715] (7/8) Epoch 11, batch 8000, loss[loss=0.1293, simple_loss=0.2125, pruned_loss=0.02303, over 4934.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2124, pruned_loss=0.03295, over 972680.88 frames.], batch size: 29, lr: 1.98e-04 +2022-05-07 02:06:57,624 INFO [train.py:715] (7/8) Epoch 11, batch 8050, loss[loss=0.1399, simple_loss=0.216, pruned_loss=0.03195, over 4785.00 frames.], tot_loss[loss=0.139, simple_loss=0.2122, pruned_loss=0.03294, over 972948.78 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:07:37,876 INFO [train.py:715] (7/8) Epoch 11, batch 8100, loss[loss=0.1504, simple_loss=0.2193, pruned_loss=0.04076, over 4894.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2126, pruned_loss=0.03283, over 972682.14 frames.], batch size: 19, lr: 1.98e-04 +2022-05-07 02:08:17,872 INFO [train.py:715] (7/8) Epoch 11, batch 8150, loss[loss=0.1243, simple_loss=0.186, pruned_loss=0.03132, over 4785.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03301, over 973069.57 frames.], batch size: 12, lr: 1.98e-04 +2022-05-07 02:08:57,400 INFO [train.py:715] (7/8) Epoch 11, batch 8200, loss[loss=0.1461, simple_loss=0.2206, pruned_loss=0.03582, over 4985.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03252, over 973062.45 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:09:36,730 INFO [train.py:715] (7/8) Epoch 11, batch 8250, loss[loss=0.1372, simple_loss=0.2144, pruned_loss=0.02999, over 4786.00 frames.], tot_loss[loss=0.139, simple_loss=0.2119, pruned_loss=0.03305, over 972659.30 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:10:15,063 INFO [train.py:715] (7/8) Epoch 11, batch 8300, loss[loss=0.1345, simple_loss=0.2072, pruned_loss=0.03087, over 4855.00 frames.], tot_loss[loss=0.139, simple_loss=0.2118, pruned_loss=0.03306, over 972935.29 frames.], batch size: 34, lr: 1.98e-04 +2022-05-07 02:10:54,960 INFO [train.py:715] (7/8) Epoch 11, batch 8350, loss[loss=0.1188, simple_loss=0.2008, pruned_loss=0.01842, over 4823.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.03288, over 972971.16 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:11:34,528 INFO [train.py:715] (7/8) Epoch 11, batch 8400, loss[loss=0.1352, simple_loss=0.2126, pruned_loss=0.02891, over 4946.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.03267, over 973170.74 frames.], batch size: 29, lr: 1.98e-04 +2022-05-07 02:12:13,503 INFO [train.py:715] (7/8) Epoch 11, batch 8450, loss[loss=0.15, simple_loss=0.2102, pruned_loss=0.04493, over 4851.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03266, over 972864.74 frames.], batch size: 32, lr: 1.98e-04 +2022-05-07 02:12:52,199 INFO [train.py:715] (7/8) Epoch 11, batch 8500, loss[loss=0.1642, simple_loss=0.2309, pruned_loss=0.04876, over 4974.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2129, pruned_loss=0.03311, over 973344.97 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:13:32,007 INFO [train.py:715] (7/8) Epoch 11, batch 8550, loss[loss=0.1464, simple_loss=0.2201, pruned_loss=0.03634, over 4983.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2131, pruned_loss=0.03285, over 973224.77 frames.], batch size: 28, lr: 1.98e-04 +2022-05-07 02:14:11,213 INFO [train.py:715] (7/8) Epoch 11, batch 8600, loss[loss=0.1372, simple_loss=0.2083, pruned_loss=0.03305, over 4786.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2129, pruned_loss=0.03292, over 972757.67 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:14:49,544 INFO [train.py:715] (7/8) Epoch 11, batch 8650, loss[loss=0.1545, simple_loss=0.225, pruned_loss=0.04198, over 4920.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2134, pruned_loss=0.03344, over 973526.71 frames.], batch size: 29, lr: 1.98e-04 +2022-05-07 02:15:29,404 INFO [train.py:715] (7/8) Epoch 11, batch 8700, loss[loss=0.1064, simple_loss=0.1809, pruned_loss=0.01597, over 4804.00 frames.], tot_loss[loss=0.1408, simple_loss=0.2138, pruned_loss=0.03394, over 973105.69 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:16:08,723 INFO [train.py:715] (7/8) Epoch 11, batch 8750, loss[loss=0.1278, simple_loss=0.1985, pruned_loss=0.02856, over 4783.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2134, pruned_loss=0.03392, over 972360.13 frames.], batch size: 18, lr: 1.98e-04 +2022-05-07 02:16:47,705 INFO [train.py:715] (7/8) Epoch 11, batch 8800, loss[loss=0.1214, simple_loss=0.1968, pruned_loss=0.02306, over 4809.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2132, pruned_loss=0.03413, over 972517.40 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:17:26,839 INFO [train.py:715] (7/8) Epoch 11, batch 8850, loss[loss=0.1436, simple_loss=0.2142, pruned_loss=0.03646, over 4932.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2121, pruned_loss=0.0333, over 972225.48 frames.], batch size: 29, lr: 1.98e-04 +2022-05-07 02:18:06,536 INFO [train.py:715] (7/8) Epoch 11, batch 8900, loss[loss=0.1454, simple_loss=0.2139, pruned_loss=0.03841, over 4844.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2123, pruned_loss=0.03327, over 971841.93 frames.], batch size: 30, lr: 1.98e-04 +2022-05-07 02:18:46,168 INFO [train.py:715] (7/8) Epoch 11, batch 8950, loss[loss=0.1258, simple_loss=0.1959, pruned_loss=0.0278, over 4781.00 frames.], tot_loss[loss=0.1402, simple_loss=0.213, pruned_loss=0.03369, over 972707.54 frames.], batch size: 19, lr: 1.98e-04 +2022-05-07 02:19:25,277 INFO [train.py:715] (7/8) Epoch 11, batch 9000, loss[loss=0.1435, simple_loss=0.2119, pruned_loss=0.03753, over 4726.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03338, over 972639.21 frames.], batch size: 16, lr: 1.98e-04 +2022-05-07 02:19:25,278 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 02:19:34,857 INFO [train.py:742] (7/8) Epoch 11, validation: loss=0.1061, simple_loss=0.1903, pruned_loss=0.011, over 914524.00 frames. +2022-05-07 02:20:13,752 INFO [train.py:715] (7/8) Epoch 11, batch 9050, loss[loss=0.1462, simple_loss=0.2156, pruned_loss=0.03834, over 4810.00 frames.], tot_loss[loss=0.1399, simple_loss=0.213, pruned_loss=0.03341, over 972318.26 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:20:55,924 INFO [train.py:715] (7/8) Epoch 11, batch 9100, loss[loss=0.181, simple_loss=0.2697, pruned_loss=0.04612, over 4777.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2125, pruned_loss=0.03302, over 972554.12 frames.], batch size: 18, lr: 1.98e-04 +2022-05-07 02:21:35,544 INFO [train.py:715] (7/8) Epoch 11, batch 9150, loss[loss=0.1876, simple_loss=0.2539, pruned_loss=0.06065, over 4917.00 frames.], tot_loss[loss=0.139, simple_loss=0.2116, pruned_loss=0.03315, over 972342.83 frames.], batch size: 18, lr: 1.98e-04 +2022-05-07 02:22:15,055 INFO [train.py:715] (7/8) Epoch 11, batch 9200, loss[loss=0.1218, simple_loss=0.1861, pruned_loss=0.02876, over 4763.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2106, pruned_loss=0.03289, over 971968.15 frames.], batch size: 12, lr: 1.98e-04 +2022-05-07 02:22:54,637 INFO [train.py:715] (7/8) Epoch 11, batch 9250, loss[loss=0.1459, simple_loss=0.2215, pruned_loss=0.03517, over 4814.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2107, pruned_loss=0.033, over 972078.42 frames.], batch size: 25, lr: 1.98e-04 +2022-05-07 02:23:33,874 INFO [train.py:715] (7/8) Epoch 11, batch 9300, loss[loss=0.1139, simple_loss=0.1893, pruned_loss=0.01921, over 4753.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2105, pruned_loss=0.03294, over 972562.08 frames.], batch size: 16, lr: 1.98e-04 +2022-05-07 02:24:12,711 INFO [train.py:715] (7/8) Epoch 11, batch 9350, loss[loss=0.125, simple_loss=0.1932, pruned_loss=0.02842, over 4903.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2108, pruned_loss=0.03267, over 971828.44 frames.], batch size: 19, lr: 1.98e-04 +2022-05-07 02:24:51,485 INFO [train.py:715] (7/8) Epoch 11, batch 9400, loss[loss=0.1288, simple_loss=0.2064, pruned_loss=0.02559, over 4896.00 frames.], tot_loss[loss=0.138, simple_loss=0.2109, pruned_loss=0.03258, over 971739.14 frames.], batch size: 18, lr: 1.98e-04 +2022-05-07 02:25:31,002 INFO [train.py:715] (7/8) Epoch 11, batch 9450, loss[loss=0.1217, simple_loss=0.1918, pruned_loss=0.02584, over 4746.00 frames.], tot_loss[loss=0.139, simple_loss=0.2116, pruned_loss=0.03315, over 971837.14 frames.], batch size: 12, lr: 1.98e-04 +2022-05-07 02:26:10,041 INFO [train.py:715] (7/8) Epoch 11, batch 9500, loss[loss=0.1546, simple_loss=0.2336, pruned_loss=0.0378, over 4788.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2114, pruned_loss=0.0329, over 971693.88 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:26:48,575 INFO [train.py:715] (7/8) Epoch 11, batch 9550, loss[loss=0.1346, simple_loss=0.2059, pruned_loss=0.03163, over 4845.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2108, pruned_loss=0.03246, over 970895.48 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:27:28,238 INFO [train.py:715] (7/8) Epoch 11, batch 9600, loss[loss=0.1333, simple_loss=0.2143, pruned_loss=0.02618, over 4990.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.03244, over 971585.08 frames.], batch size: 16, lr: 1.98e-04 +2022-05-07 02:28:07,061 INFO [train.py:715] (7/8) Epoch 11, batch 9650, loss[loss=0.1167, simple_loss=0.1887, pruned_loss=0.02231, over 4967.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03253, over 971741.97 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:28:45,590 INFO [train.py:715] (7/8) Epoch 11, batch 9700, loss[loss=0.1485, simple_loss=0.2256, pruned_loss=0.03568, over 4984.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2111, pruned_loss=0.03258, over 972280.31 frames.], batch size: 28, lr: 1.98e-04 +2022-05-07 02:29:24,589 INFO [train.py:715] (7/8) Epoch 11, batch 9750, loss[loss=0.1227, simple_loss=0.1912, pruned_loss=0.02713, over 4773.00 frames.], tot_loss[loss=0.138, simple_loss=0.211, pruned_loss=0.03253, over 971391.51 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:30:03,698 INFO [train.py:715] (7/8) Epoch 11, batch 9800, loss[loss=0.1629, simple_loss=0.2266, pruned_loss=0.04958, over 4933.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2119, pruned_loss=0.03326, over 971867.35 frames.], batch size: 18, lr: 1.98e-04 +2022-05-07 02:30:43,327 INFO [train.py:715] (7/8) Epoch 11, batch 9850, loss[loss=0.1058, simple_loss=0.1667, pruned_loss=0.02248, over 4829.00 frames.], tot_loss[loss=0.139, simple_loss=0.212, pruned_loss=0.03302, over 971770.30 frames.], batch size: 13, lr: 1.98e-04 +2022-05-07 02:31:22,287 INFO [train.py:715] (7/8) Epoch 11, batch 9900, loss[loss=0.1591, simple_loss=0.2274, pruned_loss=0.04538, over 4841.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03261, over 971221.44 frames.], batch size: 32, lr: 1.98e-04 +2022-05-07 02:32:02,526 INFO [train.py:715] (7/8) Epoch 11, batch 9950, loss[loss=0.1326, simple_loss=0.198, pruned_loss=0.0336, over 4796.00 frames.], tot_loss[loss=0.1382, simple_loss=0.211, pruned_loss=0.0327, over 972051.85 frames.], batch size: 12, lr: 1.98e-04 +2022-05-07 02:32:41,857 INFO [train.py:715] (7/8) Epoch 11, batch 10000, loss[loss=0.1243, simple_loss=0.2056, pruned_loss=0.02152, over 4972.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2118, pruned_loss=0.03255, over 972361.02 frames.], batch size: 24, lr: 1.98e-04 +2022-05-07 02:33:21,584 INFO [train.py:715] (7/8) Epoch 11, batch 10050, loss[loss=0.1491, simple_loss=0.221, pruned_loss=0.0386, over 4907.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2118, pruned_loss=0.03281, over 972034.50 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:33:59,723 INFO [train.py:715] (7/8) Epoch 11, batch 10100, loss[loss=0.1208, simple_loss=0.1999, pruned_loss=0.02082, over 4975.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2113, pruned_loss=0.03282, over 972885.44 frames.], batch size: 24, lr: 1.98e-04 +2022-05-07 02:34:38,757 INFO [train.py:715] (7/8) Epoch 11, batch 10150, loss[loss=0.1447, simple_loss=0.2224, pruned_loss=0.03352, over 4799.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2105, pruned_loss=0.03235, over 972500.36 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:35:17,191 INFO [train.py:715] (7/8) Epoch 11, batch 10200, loss[loss=0.1593, simple_loss=0.2242, pruned_loss=0.0472, over 4816.00 frames.], tot_loss[loss=0.1382, simple_loss=0.211, pruned_loss=0.03269, over 972011.84 frames.], batch size: 13, lr: 1.98e-04 +2022-05-07 02:35:55,358 INFO [train.py:715] (7/8) Epoch 11, batch 10250, loss[loss=0.1513, simple_loss=0.2209, pruned_loss=0.04089, over 4927.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2123, pruned_loss=0.0332, over 971424.21 frames.], batch size: 29, lr: 1.98e-04 +2022-05-07 02:36:34,758 INFO [train.py:715] (7/8) Epoch 11, batch 10300, loss[loss=0.1425, simple_loss=0.2144, pruned_loss=0.03534, over 4752.00 frames.], tot_loss[loss=0.139, simple_loss=0.212, pruned_loss=0.03297, over 971727.97 frames.], batch size: 16, lr: 1.98e-04 +2022-05-07 02:37:13,487 INFO [train.py:715] (7/8) Epoch 11, batch 10350, loss[loss=0.154, simple_loss=0.2346, pruned_loss=0.03668, over 4850.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03288, over 971321.21 frames.], batch size: 20, lr: 1.98e-04 +2022-05-07 02:37:52,307 INFO [train.py:715] (7/8) Epoch 11, batch 10400, loss[loss=0.1272, simple_loss=0.2051, pruned_loss=0.02467, over 4790.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2111, pruned_loss=0.03292, over 971877.55 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:38:30,789 INFO [train.py:715] (7/8) Epoch 11, batch 10450, loss[loss=0.1276, simple_loss=0.203, pruned_loss=0.0261, over 4862.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2112, pruned_loss=0.03312, over 971617.10 frames.], batch size: 20, lr: 1.98e-04 +2022-05-07 02:39:09,428 INFO [train.py:715] (7/8) Epoch 11, batch 10500, loss[loss=0.1235, simple_loss=0.1959, pruned_loss=0.02561, over 4958.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2115, pruned_loss=0.03308, over 971715.02 frames.], batch size: 29, lr: 1.98e-04 +2022-05-07 02:39:48,489 INFO [train.py:715] (7/8) Epoch 11, batch 10550, loss[loss=0.1452, simple_loss=0.219, pruned_loss=0.03573, over 4780.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2113, pruned_loss=0.03277, over 972320.05 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:40:27,838 INFO [train.py:715] (7/8) Epoch 11, batch 10600, loss[loss=0.1098, simple_loss=0.1842, pruned_loss=0.01768, over 4787.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03257, over 971460.16 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:41:06,626 INFO [train.py:715] (7/8) Epoch 11, batch 10650, loss[loss=0.1183, simple_loss=0.19, pruned_loss=0.02326, over 4887.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2117, pruned_loss=0.03258, over 971865.44 frames.], batch size: 22, lr: 1.98e-04 +2022-05-07 02:41:45,853 INFO [train.py:715] (7/8) Epoch 11, batch 10700, loss[loss=0.1233, simple_loss=0.2075, pruned_loss=0.01952, over 4818.00 frames.], tot_loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03221, over 972078.75 frames.], batch size: 26, lr: 1.98e-04 +2022-05-07 02:42:25,056 INFO [train.py:715] (7/8) Epoch 11, batch 10750, loss[loss=0.1428, simple_loss=0.2153, pruned_loss=0.03521, over 4928.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03257, over 971872.62 frames.], batch size: 23, lr: 1.98e-04 +2022-05-07 02:43:03,974 INFO [train.py:715] (7/8) Epoch 11, batch 10800, loss[loss=0.1362, simple_loss=0.2109, pruned_loss=0.03082, over 4928.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03225, over 972043.81 frames.], batch size: 23, lr: 1.98e-04 +2022-05-07 02:43:43,680 INFO [train.py:715] (7/8) Epoch 11, batch 10850, loss[loss=0.1395, simple_loss=0.2104, pruned_loss=0.03435, over 4828.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03223, over 971685.12 frames.], batch size: 13, lr: 1.98e-04 +2022-05-07 02:44:23,476 INFO [train.py:715] (7/8) Epoch 11, batch 10900, loss[loss=0.129, simple_loss=0.2017, pruned_loss=0.02821, over 4958.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03299, over 971565.97 frames.], batch size: 24, lr: 1.98e-04 +2022-05-07 02:45:02,833 INFO [train.py:715] (7/8) Epoch 11, batch 10950, loss[loss=0.1432, simple_loss=0.2293, pruned_loss=0.02854, over 4760.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.03292, over 972546.58 frames.], batch size: 17, lr: 1.98e-04 +2022-05-07 02:45:42,048 INFO [train.py:715] (7/8) Epoch 11, batch 11000, loss[loss=0.1193, simple_loss=0.189, pruned_loss=0.02479, over 4940.00 frames.], tot_loss[loss=0.138, simple_loss=0.211, pruned_loss=0.03244, over 972366.64 frames.], batch size: 18, lr: 1.98e-04 +2022-05-07 02:46:21,453 INFO [train.py:715] (7/8) Epoch 11, batch 11050, loss[loss=0.1276, simple_loss=0.1999, pruned_loss=0.02765, over 4876.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03156, over 972766.77 frames.], batch size: 16, lr: 1.98e-04 +2022-05-07 02:47:00,459 INFO [train.py:715] (7/8) Epoch 11, batch 11100, loss[loss=0.1372, simple_loss=0.2005, pruned_loss=0.03694, over 4797.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.03242, over 971638.13 frames.], batch size: 12, lr: 1.98e-04 +2022-05-07 02:47:39,071 INFO [train.py:715] (7/8) Epoch 11, batch 11150, loss[loss=0.1372, simple_loss=0.2195, pruned_loss=0.02746, over 4967.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2118, pruned_loss=0.033, over 971616.78 frames.], batch size: 14, lr: 1.98e-04 +2022-05-07 02:48:18,473 INFO [train.py:715] (7/8) Epoch 11, batch 11200, loss[loss=0.1526, simple_loss=0.2209, pruned_loss=0.0422, over 4780.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.03325, over 972087.83 frames.], batch size: 18, lr: 1.98e-04 +2022-05-07 02:48:57,592 INFO [train.py:715] (7/8) Epoch 11, batch 11250, loss[loss=0.1367, simple_loss=0.2146, pruned_loss=0.02943, over 4690.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.03312, over 971785.59 frames.], batch size: 15, lr: 1.98e-04 +2022-05-07 02:49:35,932 INFO [train.py:715] (7/8) Epoch 11, batch 11300, loss[loss=0.1633, simple_loss=0.2485, pruned_loss=0.03911, over 4933.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2123, pruned_loss=0.03303, over 972253.91 frames.], batch size: 21, lr: 1.98e-04 +2022-05-07 02:50:14,830 INFO [train.py:715] (7/8) Epoch 11, batch 11350, loss[loss=0.1455, simple_loss=0.2209, pruned_loss=0.03506, over 4753.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.0327, over 972219.90 frames.], batch size: 19, lr: 1.97e-04 +2022-05-07 02:50:54,375 INFO [train.py:715] (7/8) Epoch 11, batch 11400, loss[loss=0.1416, simple_loss=0.2208, pruned_loss=0.03118, over 4945.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2113, pruned_loss=0.03228, over 972837.47 frames.], batch size: 23, lr: 1.97e-04 +2022-05-07 02:51:32,953 INFO [train.py:715] (7/8) Epoch 11, batch 11450, loss[loss=0.153, simple_loss=0.2201, pruned_loss=0.0429, over 4859.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2111, pruned_loss=0.03238, over 973082.22 frames.], batch size: 16, lr: 1.97e-04 +2022-05-07 02:52:11,284 INFO [train.py:715] (7/8) Epoch 11, batch 11500, loss[loss=0.1938, simple_loss=0.2495, pruned_loss=0.06911, over 4985.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2101, pruned_loss=0.03207, over 973105.45 frames.], batch size: 15, lr: 1.97e-04 +2022-05-07 02:52:50,113 INFO [train.py:715] (7/8) Epoch 11, batch 11550, loss[loss=0.1363, simple_loss=0.2123, pruned_loss=0.03016, over 4918.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2108, pruned_loss=0.03239, over 972890.49 frames.], batch size: 29, lr: 1.97e-04 +2022-05-07 02:53:29,303 INFO [train.py:715] (7/8) Epoch 11, batch 11600, loss[loss=0.1415, simple_loss=0.2182, pruned_loss=0.03237, over 4848.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2101, pruned_loss=0.03228, over 973340.35 frames.], batch size: 20, lr: 1.97e-04 +2022-05-07 02:54:08,235 INFO [train.py:715] (7/8) Epoch 11, batch 11650, loss[loss=0.1479, simple_loss=0.2191, pruned_loss=0.03837, over 4750.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2104, pruned_loss=0.03245, over 973575.45 frames.], batch size: 19, lr: 1.97e-04 +2022-05-07 02:54:46,494 INFO [train.py:715] (7/8) Epoch 11, batch 11700, loss[loss=0.1432, simple_loss=0.219, pruned_loss=0.03373, over 4747.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2105, pruned_loss=0.03253, over 973475.57 frames.], batch size: 19, lr: 1.97e-04 +2022-05-07 02:55:25,411 INFO [train.py:715] (7/8) Epoch 11, batch 11750, loss[loss=0.1196, simple_loss=0.1964, pruned_loss=0.02141, over 4952.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2114, pruned_loss=0.03308, over 972735.24 frames.], batch size: 29, lr: 1.97e-04 +2022-05-07 02:56:04,628 INFO [train.py:715] (7/8) Epoch 11, batch 11800, loss[loss=0.1731, simple_loss=0.248, pruned_loss=0.04912, over 4786.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2109, pruned_loss=0.03265, over 973102.45 frames.], batch size: 14, lr: 1.97e-04 +2022-05-07 02:56:43,718 INFO [train.py:715] (7/8) Epoch 11, batch 11850, loss[loss=0.1325, simple_loss=0.2097, pruned_loss=0.02762, over 4988.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.03245, over 973441.03 frames.], batch size: 14, lr: 1.97e-04 +2022-05-07 02:57:23,415 INFO [train.py:715] (7/8) Epoch 11, batch 11900, loss[loss=0.1627, simple_loss=0.2465, pruned_loss=0.03947, over 4900.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03247, over 972385.39 frames.], batch size: 17, lr: 1.97e-04 +2022-05-07 02:58:03,753 INFO [train.py:715] (7/8) Epoch 11, batch 11950, loss[loss=0.1604, simple_loss=0.2378, pruned_loss=0.04146, over 4886.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03266, over 972325.46 frames.], batch size: 16, lr: 1.97e-04 +2022-05-07 02:58:43,549 INFO [train.py:715] (7/8) Epoch 11, batch 12000, loss[loss=0.1352, simple_loss=0.2077, pruned_loss=0.03138, over 4794.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03307, over 972288.05 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 02:58:43,549 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 02:58:53,275 INFO [train.py:742] (7/8) Epoch 11, validation: loss=0.1061, simple_loss=0.1902, pruned_loss=0.01096, over 914524.00 frames. +2022-05-07 02:59:33,213 INFO [train.py:715] (7/8) Epoch 11, batch 12050, loss[loss=0.1285, simple_loss=0.2088, pruned_loss=0.02407, over 4835.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03284, over 972189.44 frames.], batch size: 26, lr: 1.97e-04 +2022-05-07 03:00:12,650 INFO [train.py:715] (7/8) Epoch 11, batch 12100, loss[loss=0.1201, simple_loss=0.1879, pruned_loss=0.02615, over 4821.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03311, over 971726.80 frames.], batch size: 26, lr: 1.97e-04 +2022-05-07 03:00:51,873 INFO [train.py:715] (7/8) Epoch 11, batch 12150, loss[loss=0.1391, simple_loss=0.2151, pruned_loss=0.03152, over 4925.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03296, over 972815.02 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 03:01:31,404 INFO [train.py:715] (7/8) Epoch 11, batch 12200, loss[loss=0.1264, simple_loss=0.2125, pruned_loss=0.02012, over 4856.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2128, pruned_loss=0.03294, over 972895.90 frames.], batch size: 30, lr: 1.97e-04 +2022-05-07 03:02:09,903 INFO [train.py:715] (7/8) Epoch 11, batch 12250, loss[loss=0.1615, simple_loss=0.2323, pruned_loss=0.04539, over 4859.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2128, pruned_loss=0.03314, over 972712.34 frames.], batch size: 30, lr: 1.97e-04 +2022-05-07 03:02:49,516 INFO [train.py:715] (7/8) Epoch 11, batch 12300, loss[loss=0.1239, simple_loss=0.1834, pruned_loss=0.03224, over 4844.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2132, pruned_loss=0.03332, over 972258.93 frames.], batch size: 30, lr: 1.97e-04 +2022-05-07 03:03:29,335 INFO [train.py:715] (7/8) Epoch 11, batch 12350, loss[loss=0.1328, simple_loss=0.2155, pruned_loss=0.02508, over 4817.00 frames.], tot_loss[loss=0.139, simple_loss=0.2118, pruned_loss=0.03305, over 972561.87 frames.], batch size: 25, lr: 1.97e-04 +2022-05-07 03:04:08,695 INFO [train.py:715] (7/8) Epoch 11, batch 12400, loss[loss=0.145, simple_loss=0.2093, pruned_loss=0.04032, over 4815.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.0331, over 972006.63 frames.], batch size: 14, lr: 1.97e-04 +2022-05-07 03:04:46,933 INFO [train.py:715] (7/8) Epoch 11, batch 12450, loss[loss=0.1238, simple_loss=0.1956, pruned_loss=0.02605, over 4820.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03302, over 972167.34 frames.], batch size: 25, lr: 1.97e-04 +2022-05-07 03:05:26,167 INFO [train.py:715] (7/8) Epoch 11, batch 12500, loss[loss=0.1685, simple_loss=0.2355, pruned_loss=0.05072, over 4866.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2122, pruned_loss=0.03311, over 972229.49 frames.], batch size: 32, lr: 1.97e-04 +2022-05-07 03:06:05,436 INFO [train.py:715] (7/8) Epoch 11, batch 12550, loss[loss=0.1156, simple_loss=0.1865, pruned_loss=0.02235, over 4863.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2117, pruned_loss=0.03307, over 971889.45 frames.], batch size: 20, lr: 1.97e-04 +2022-05-07 03:06:44,092 INFO [train.py:715] (7/8) Epoch 11, batch 12600, loss[loss=0.09974, simple_loss=0.1657, pruned_loss=0.0169, over 4774.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2114, pruned_loss=0.03296, over 971361.57 frames.], batch size: 12, lr: 1.97e-04 +2022-05-07 03:07:23,081 INFO [train.py:715] (7/8) Epoch 11, batch 12650, loss[loss=0.1261, simple_loss=0.1897, pruned_loss=0.03119, over 4643.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2123, pruned_loss=0.03308, over 970720.84 frames.], batch size: 13, lr: 1.97e-04 +2022-05-07 03:08:02,195 INFO [train.py:715] (7/8) Epoch 11, batch 12700, loss[loss=0.1207, simple_loss=0.1976, pruned_loss=0.02192, over 4948.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03343, over 970826.53 frames.], batch size: 24, lr: 1.97e-04 +2022-05-07 03:08:40,890 INFO [train.py:715] (7/8) Epoch 11, batch 12750, loss[loss=0.152, simple_loss=0.2244, pruned_loss=0.03981, over 4761.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2128, pruned_loss=0.03357, over 971690.96 frames.], batch size: 16, lr: 1.97e-04 +2022-05-07 03:09:19,302 INFO [train.py:715] (7/8) Epoch 11, batch 12800, loss[loss=0.12, simple_loss=0.2013, pruned_loss=0.01931, over 4875.00 frames.], tot_loss[loss=0.14, simple_loss=0.2131, pruned_loss=0.03348, over 971464.60 frames.], batch size: 20, lr: 1.97e-04 +2022-05-07 03:09:58,877 INFO [train.py:715] (7/8) Epoch 11, batch 12850, loss[loss=0.1403, simple_loss=0.2223, pruned_loss=0.02917, over 4935.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2132, pruned_loss=0.03352, over 971674.33 frames.], batch size: 23, lr: 1.97e-04 +2022-05-07 03:10:38,290 INFO [train.py:715] (7/8) Epoch 11, batch 12900, loss[loss=0.1295, simple_loss=0.2089, pruned_loss=0.02509, over 4877.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03356, over 972013.98 frames.], batch size: 32, lr: 1.97e-04 +2022-05-07 03:11:17,932 INFO [train.py:715] (7/8) Epoch 11, batch 12950, loss[loss=0.1221, simple_loss=0.2015, pruned_loss=0.02132, over 4688.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2131, pruned_loss=0.0331, over 972625.80 frames.], batch size: 15, lr: 1.97e-04 +2022-05-07 03:11:56,711 INFO [train.py:715] (7/8) Epoch 11, batch 13000, loss[loss=0.1257, simple_loss=0.1993, pruned_loss=0.02605, over 4817.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.03312, over 972738.68 frames.], batch size: 26, lr: 1.97e-04 +2022-05-07 03:12:36,381 INFO [train.py:715] (7/8) Epoch 11, batch 13050, loss[loss=0.1423, simple_loss=0.2183, pruned_loss=0.03314, over 4801.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03299, over 972506.10 frames.], batch size: 25, lr: 1.97e-04 +2022-05-07 03:13:15,473 INFO [train.py:715] (7/8) Epoch 11, batch 13100, loss[loss=0.128, simple_loss=0.205, pruned_loss=0.02552, over 4993.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03301, over 972379.29 frames.], batch size: 14, lr: 1.97e-04 +2022-05-07 03:13:53,586 INFO [train.py:715] (7/8) Epoch 11, batch 13150, loss[loss=0.1456, simple_loss=0.2162, pruned_loss=0.03748, over 4753.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03292, over 971973.68 frames.], batch size: 16, lr: 1.97e-04 +2022-05-07 03:14:32,700 INFO [train.py:715] (7/8) Epoch 11, batch 13200, loss[loss=0.138, simple_loss=0.2176, pruned_loss=0.02922, over 4870.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03337, over 972355.16 frames.], batch size: 22, lr: 1.97e-04 +2022-05-07 03:15:11,058 INFO [train.py:715] (7/8) Epoch 11, batch 13250, loss[loss=0.1491, simple_loss=0.22, pruned_loss=0.03911, over 4915.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03333, over 972645.08 frames.], batch size: 39, lr: 1.97e-04 +2022-05-07 03:15:50,453 INFO [train.py:715] (7/8) Epoch 11, batch 13300, loss[loss=0.1528, simple_loss=0.2228, pruned_loss=0.0414, over 4780.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2111, pruned_loss=0.03317, over 972654.95 frames.], batch size: 17, lr: 1.97e-04 +2022-05-07 03:16:29,353 INFO [train.py:715] (7/8) Epoch 11, batch 13350, loss[loss=0.1079, simple_loss=0.1822, pruned_loss=0.01675, over 4790.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2113, pruned_loss=0.03282, over 971844.97 frames.], batch size: 14, lr: 1.97e-04 +2022-05-07 03:17:08,600 INFO [train.py:715] (7/8) Epoch 11, batch 13400, loss[loss=0.1287, simple_loss=0.1989, pruned_loss=0.02928, over 4803.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2107, pruned_loss=0.03294, over 972909.21 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:17:47,309 INFO [train.py:715] (7/8) Epoch 11, batch 13450, loss[loss=0.1562, simple_loss=0.2233, pruned_loss=0.04456, over 4864.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2113, pruned_loss=0.03308, over 973017.75 frames.], batch size: 20, lr: 1.97e-04 +2022-05-07 03:18:26,310 INFO [train.py:715] (7/8) Epoch 11, batch 13500, loss[loss=0.1548, simple_loss=0.2218, pruned_loss=0.04387, over 4842.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2117, pruned_loss=0.03331, over 972702.26 frames.], batch size: 13, lr: 1.97e-04 +2022-05-07 03:19:05,026 INFO [train.py:715] (7/8) Epoch 11, batch 13550, loss[loss=0.129, simple_loss=0.2095, pruned_loss=0.02422, over 4788.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.03288, over 972099.95 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 03:19:44,148 INFO [train.py:715] (7/8) Epoch 11, batch 13600, loss[loss=0.1218, simple_loss=0.1925, pruned_loss=0.02555, over 4977.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03246, over 972695.72 frames.], batch size: 15, lr: 1.97e-04 +2022-05-07 03:20:22,538 INFO [train.py:715] (7/8) Epoch 11, batch 13650, loss[loss=0.1612, simple_loss=0.2285, pruned_loss=0.04694, over 4865.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2118, pruned_loss=0.03262, over 972973.08 frames.], batch size: 16, lr: 1.97e-04 +2022-05-07 03:21:00,720 INFO [train.py:715] (7/8) Epoch 11, batch 13700, loss[loss=0.1611, simple_loss=0.2253, pruned_loss=0.04847, over 4877.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2123, pruned_loss=0.03317, over 973162.86 frames.], batch size: 16, lr: 1.97e-04 +2022-05-07 03:21:39,799 INFO [train.py:715] (7/8) Epoch 11, batch 13750, loss[loss=0.1466, simple_loss=0.2247, pruned_loss=0.03421, over 4851.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03289, over 973114.82 frames.], batch size: 20, lr: 1.97e-04 +2022-05-07 03:22:19,175 INFO [train.py:715] (7/8) Epoch 11, batch 13800, loss[loss=0.1337, simple_loss=0.2102, pruned_loss=0.02861, over 4926.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03293, over 972099.49 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:22:57,645 INFO [train.py:715] (7/8) Epoch 11, batch 13850, loss[loss=0.1002, simple_loss=0.1754, pruned_loss=0.01249, over 4824.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2109, pruned_loss=0.03246, over 971941.66 frames.], batch size: 25, lr: 1.97e-04 +2022-05-07 03:23:37,058 INFO [train.py:715] (7/8) Epoch 11, batch 13900, loss[loss=0.1061, simple_loss=0.1748, pruned_loss=0.01866, over 4813.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2102, pruned_loss=0.03199, over 972256.52 frames.], batch size: 13, lr: 1.97e-04 +2022-05-07 03:24:15,994 INFO [train.py:715] (7/8) Epoch 11, batch 13950, loss[loss=0.155, simple_loss=0.2292, pruned_loss=0.04039, over 4798.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2106, pruned_loss=0.03235, over 972470.69 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:24:55,160 INFO [train.py:715] (7/8) Epoch 11, batch 14000, loss[loss=0.1146, simple_loss=0.1817, pruned_loss=0.0238, over 4983.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2109, pruned_loss=0.03261, over 972382.63 frames.], batch size: 14, lr: 1.97e-04 +2022-05-07 03:25:34,612 INFO [train.py:715] (7/8) Epoch 11, batch 14050, loss[loss=0.1437, simple_loss=0.224, pruned_loss=0.03172, over 4745.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2111, pruned_loss=0.03291, over 971929.28 frames.], batch size: 16, lr: 1.97e-04 +2022-05-07 03:26:14,332 INFO [train.py:715] (7/8) Epoch 11, batch 14100, loss[loss=0.1323, simple_loss=0.2057, pruned_loss=0.0294, over 4930.00 frames.], tot_loss[loss=0.14, simple_loss=0.2127, pruned_loss=0.03365, over 971355.29 frames.], batch size: 29, lr: 1.97e-04 +2022-05-07 03:26:53,603 INFO [train.py:715] (7/8) Epoch 11, batch 14150, loss[loss=0.142, simple_loss=0.2105, pruned_loss=0.03678, over 4803.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.03358, over 970606.73 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:27:32,871 INFO [train.py:715] (7/8) Epoch 11, batch 14200, loss[loss=0.1399, simple_loss=0.2127, pruned_loss=0.03356, over 4801.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.03325, over 970737.53 frames.], batch size: 24, lr: 1.97e-04 +2022-05-07 03:28:13,019 INFO [train.py:715] (7/8) Epoch 11, batch 14250, loss[loss=0.1305, simple_loss=0.2024, pruned_loss=0.02931, over 4912.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2112, pruned_loss=0.03264, over 970538.75 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 03:28:53,023 INFO [train.py:715] (7/8) Epoch 11, batch 14300, loss[loss=0.1578, simple_loss=0.2233, pruned_loss=0.04621, over 4766.00 frames.], tot_loss[loss=0.139, simple_loss=0.2118, pruned_loss=0.03309, over 971395.96 frames.], batch size: 19, lr: 1.97e-04 +2022-05-07 03:29:32,291 INFO [train.py:715] (7/8) Epoch 11, batch 14350, loss[loss=0.1518, simple_loss=0.2148, pruned_loss=0.04447, over 4961.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03286, over 971332.60 frames.], batch size: 15, lr: 1.97e-04 +2022-05-07 03:30:12,239 INFO [train.py:715] (7/8) Epoch 11, batch 14400, loss[loss=0.1376, simple_loss=0.2057, pruned_loss=0.03477, over 4937.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03248, over 971867.65 frames.], batch size: 29, lr: 1.97e-04 +2022-05-07 03:30:52,513 INFO [train.py:715] (7/8) Epoch 11, batch 14450, loss[loss=0.1469, simple_loss=0.2235, pruned_loss=0.03515, over 4822.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03243, over 971816.07 frames.], batch size: 26, lr: 1.97e-04 +2022-05-07 03:31:31,925 INFO [train.py:715] (7/8) Epoch 11, batch 14500, loss[loss=0.1652, simple_loss=0.2299, pruned_loss=0.05022, over 4909.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2109, pruned_loss=0.03259, over 972625.98 frames.], batch size: 17, lr: 1.97e-04 +2022-05-07 03:32:11,425 INFO [train.py:715] (7/8) Epoch 11, batch 14550, loss[loss=0.1748, simple_loss=0.2427, pruned_loss=0.05345, over 4865.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2104, pruned_loss=0.03223, over 972889.73 frames.], batch size: 15, lr: 1.97e-04 +2022-05-07 03:32:51,266 INFO [train.py:715] (7/8) Epoch 11, batch 14600, loss[loss=0.1593, simple_loss=0.2254, pruned_loss=0.04659, over 4787.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2102, pruned_loss=0.03211, over 973217.09 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 03:33:30,638 INFO [train.py:715] (7/8) Epoch 11, batch 14650, loss[loss=0.1337, simple_loss=0.2108, pruned_loss=0.0283, over 4846.00 frames.], tot_loss[loss=0.1369, simple_loss=0.21, pruned_loss=0.03185, over 972568.58 frames.], batch size: 34, lr: 1.97e-04 +2022-05-07 03:34:09,057 INFO [train.py:715] (7/8) Epoch 11, batch 14700, loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02859, over 4991.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03158, over 973294.09 frames.], batch size: 14, lr: 1.97e-04 +2022-05-07 03:34:48,552 INFO [train.py:715] (7/8) Epoch 11, batch 14750, loss[loss=0.1256, simple_loss=0.1866, pruned_loss=0.0323, over 4796.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2103, pruned_loss=0.0319, over 972786.32 frames.], batch size: 12, lr: 1.97e-04 +2022-05-07 03:35:27,682 INFO [train.py:715] (7/8) Epoch 11, batch 14800, loss[loss=0.1208, simple_loss=0.1991, pruned_loss=0.02129, over 4802.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.03177, over 972961.13 frames.], batch size: 24, lr: 1.97e-04 +2022-05-07 03:36:06,360 INFO [train.py:715] (7/8) Epoch 11, batch 14850, loss[loss=0.1735, simple_loss=0.2386, pruned_loss=0.05425, over 4879.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03158, over 972278.46 frames.], batch size: 32, lr: 1.97e-04 +2022-05-07 03:36:45,863 INFO [train.py:715] (7/8) Epoch 11, batch 14900, loss[loss=0.1395, simple_loss=0.2114, pruned_loss=0.0338, over 4956.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2111, pruned_loss=0.03196, over 971475.94 frames.], batch size: 24, lr: 1.97e-04 +2022-05-07 03:37:25,087 INFO [train.py:715] (7/8) Epoch 11, batch 14950, loss[loss=0.1319, simple_loss=0.2157, pruned_loss=0.02404, over 4785.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.0322, over 972033.51 frames.], batch size: 14, lr: 1.97e-04 +2022-05-07 03:38:03,592 INFO [train.py:715] (7/8) Epoch 11, batch 15000, loss[loss=0.1168, simple_loss=0.1962, pruned_loss=0.01873, over 4984.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03246, over 971841.13 frames.], batch size: 28, lr: 1.97e-04 +2022-05-07 03:38:03,592 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 03:38:13,229 INFO [train.py:742] (7/8) Epoch 11, validation: loss=0.106, simple_loss=0.1901, pruned_loss=0.01091, over 914524.00 frames. +2022-05-07 03:38:52,003 INFO [train.py:715] (7/8) Epoch 11, batch 15050, loss[loss=0.1293, simple_loss=0.2112, pruned_loss=0.02369, over 4912.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.03276, over 970712.22 frames.], batch size: 17, lr: 1.97e-04 +2022-05-07 03:39:30,962 INFO [train.py:715] (7/8) Epoch 11, batch 15100, loss[loss=0.1306, simple_loss=0.2089, pruned_loss=0.02621, over 4776.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2117, pruned_loss=0.03276, over 971720.71 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 03:40:10,672 INFO [train.py:715] (7/8) Epoch 11, batch 15150, loss[loss=0.1595, simple_loss=0.2301, pruned_loss=0.04448, over 4953.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2116, pruned_loss=0.0328, over 971537.15 frames.], batch size: 21, lr: 1.97e-04 +2022-05-07 03:40:49,843 INFO [train.py:715] (7/8) Epoch 11, batch 15200, loss[loss=0.1425, simple_loss=0.2131, pruned_loss=0.03598, over 4849.00 frames.], tot_loss[loss=0.138, simple_loss=0.2112, pruned_loss=0.03238, over 971589.45 frames.], batch size: 15, lr: 1.97e-04 +2022-05-07 03:41:28,410 INFO [train.py:715] (7/8) Epoch 11, batch 15250, loss[loss=0.1695, simple_loss=0.2358, pruned_loss=0.05163, over 4971.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.03291, over 970746.00 frames.], batch size: 15, lr: 1.97e-04 +2022-05-07 03:42:07,672 INFO [train.py:715] (7/8) Epoch 11, batch 15300, loss[loss=0.1633, simple_loss=0.2424, pruned_loss=0.04216, over 4773.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03307, over 971402.48 frames.], batch size: 18, lr: 1.97e-04 +2022-05-07 03:42:46,995 INFO [train.py:715] (7/8) Epoch 11, batch 15350, loss[loss=0.1293, simple_loss=0.1989, pruned_loss=0.02988, over 4988.00 frames.], tot_loss[loss=0.139, simple_loss=0.2124, pruned_loss=0.0328, over 971416.87 frames.], batch size: 28, lr: 1.96e-04 +2022-05-07 03:43:25,865 INFO [train.py:715] (7/8) Epoch 11, batch 15400, loss[loss=0.1273, simple_loss=0.1998, pruned_loss=0.02745, over 4777.00 frames.], tot_loss[loss=0.139, simple_loss=0.212, pruned_loss=0.03302, over 970966.11 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 03:44:04,608 INFO [train.py:715] (7/8) Epoch 11, batch 15450, loss[loss=0.1356, simple_loss=0.2027, pruned_loss=0.0342, over 4907.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2117, pruned_loss=0.03322, over 970554.82 frames.], batch size: 17, lr: 1.96e-04 +2022-05-07 03:44:44,028 INFO [train.py:715] (7/8) Epoch 11, batch 15500, loss[loss=0.1277, simple_loss=0.203, pruned_loss=0.0262, over 4932.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2116, pruned_loss=0.03347, over 971453.82 frames.], batch size: 23, lr: 1.96e-04 +2022-05-07 03:45:23,172 INFO [train.py:715] (7/8) Epoch 11, batch 15550, loss[loss=0.1163, simple_loss=0.1981, pruned_loss=0.01724, over 4810.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2122, pruned_loss=0.03333, over 971752.84 frames.], batch size: 21, lr: 1.96e-04 +2022-05-07 03:46:01,708 INFO [train.py:715] (7/8) Epoch 11, batch 15600, loss[loss=0.133, simple_loss=0.209, pruned_loss=0.02854, over 4779.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03294, over 972441.64 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 03:46:40,883 INFO [train.py:715] (7/8) Epoch 11, batch 15650, loss[loss=0.1137, simple_loss=0.192, pruned_loss=0.0177, over 4777.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.0325, over 972140.07 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 03:47:19,845 INFO [train.py:715] (7/8) Epoch 11, batch 15700, loss[loss=0.1271, simple_loss=0.2025, pruned_loss=0.02586, over 4941.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03238, over 972623.39 frames.], batch size: 21, lr: 1.96e-04 +2022-05-07 03:47:58,649 INFO [train.py:715] (7/8) Epoch 11, batch 15750, loss[loss=0.1303, simple_loss=0.205, pruned_loss=0.02775, over 4931.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2108, pruned_loss=0.0325, over 972334.37 frames.], batch size: 29, lr: 1.96e-04 +2022-05-07 03:48:37,396 INFO [train.py:715] (7/8) Epoch 11, batch 15800, loss[loss=0.1479, simple_loss=0.227, pruned_loss=0.03441, over 4845.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03255, over 972327.62 frames.], batch size: 20, lr: 1.96e-04 +2022-05-07 03:49:16,758 INFO [train.py:715] (7/8) Epoch 11, batch 15850, loss[loss=0.1546, simple_loss=0.2219, pruned_loss=0.0436, over 4994.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2109, pruned_loss=0.03225, over 973590.71 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 03:49:55,695 INFO [train.py:715] (7/8) Epoch 11, batch 15900, loss[loss=0.1622, simple_loss=0.2274, pruned_loss=0.04851, over 4867.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2112, pruned_loss=0.03233, over 972793.60 frames.], batch size: 20, lr: 1.96e-04 +2022-05-07 03:50:34,611 INFO [train.py:715] (7/8) Epoch 11, batch 15950, loss[loss=0.1252, simple_loss=0.1949, pruned_loss=0.02775, over 4765.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.03242, over 972108.97 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 03:51:13,827 INFO [train.py:715] (7/8) Epoch 11, batch 16000, loss[loss=0.1453, simple_loss=0.2198, pruned_loss=0.03542, over 4706.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2111, pruned_loss=0.03257, over 971717.53 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 03:51:53,250 INFO [train.py:715] (7/8) Epoch 11, batch 16050, loss[loss=0.1449, simple_loss=0.2122, pruned_loss=0.03883, over 4840.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2109, pruned_loss=0.03247, over 971538.49 frames.], batch size: 32, lr: 1.96e-04 +2022-05-07 03:52:31,940 INFO [train.py:715] (7/8) Epoch 11, batch 16100, loss[loss=0.1391, simple_loss=0.2007, pruned_loss=0.03877, over 4879.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2103, pruned_loss=0.03234, over 970750.41 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 03:53:10,817 INFO [train.py:715] (7/8) Epoch 11, batch 16150, loss[loss=0.1286, simple_loss=0.2005, pruned_loss=0.02837, over 4964.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2106, pruned_loss=0.03238, over 972072.92 frames.], batch size: 24, lr: 1.96e-04 +2022-05-07 03:53:50,404 INFO [train.py:715] (7/8) Epoch 11, batch 16200, loss[loss=0.1029, simple_loss=0.1853, pruned_loss=0.01023, over 4813.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2106, pruned_loss=0.03239, over 970879.69 frames.], batch size: 21, lr: 1.96e-04 +2022-05-07 03:54:29,888 INFO [train.py:715] (7/8) Epoch 11, batch 16250, loss[loss=0.1338, simple_loss=0.2022, pruned_loss=0.03265, over 4881.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03257, over 970921.94 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 03:55:08,237 INFO [train.py:715] (7/8) Epoch 11, batch 16300, loss[loss=0.1123, simple_loss=0.1761, pruned_loss=0.02421, over 4821.00 frames.], tot_loss[loss=0.138, simple_loss=0.2115, pruned_loss=0.03224, over 970667.68 frames.], batch size: 27, lr: 1.96e-04 +2022-05-07 03:55:47,435 INFO [train.py:715] (7/8) Epoch 11, batch 16350, loss[loss=0.1541, simple_loss=0.2348, pruned_loss=0.03669, over 4817.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2123, pruned_loss=0.03258, over 971238.81 frames.], batch size: 25, lr: 1.96e-04 +2022-05-07 03:56:26,683 INFO [train.py:715] (7/8) Epoch 11, batch 16400, loss[loss=0.1463, simple_loss=0.2115, pruned_loss=0.0406, over 4764.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.03275, over 971911.74 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 03:57:05,183 INFO [train.py:715] (7/8) Epoch 11, batch 16450, loss[loss=0.1346, simple_loss=0.2062, pruned_loss=0.0315, over 4869.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03297, over 972082.79 frames.], batch size: 20, lr: 1.96e-04 +2022-05-07 03:57:44,154 INFO [train.py:715] (7/8) Epoch 11, batch 16500, loss[loss=0.1424, simple_loss=0.218, pruned_loss=0.0334, over 4892.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03294, over 972463.15 frames.], batch size: 22, lr: 1.96e-04 +2022-05-07 03:58:23,672 INFO [train.py:715] (7/8) Epoch 11, batch 16550, loss[loss=0.1515, simple_loss=0.2303, pruned_loss=0.03639, over 4880.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2123, pruned_loss=0.03307, over 971918.87 frames.], batch size: 22, lr: 1.96e-04 +2022-05-07 03:59:02,828 INFO [train.py:715] (7/8) Epoch 11, batch 16600, loss[loss=0.1255, simple_loss=0.2077, pruned_loss=0.02167, over 4986.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2118, pruned_loss=0.03278, over 972758.88 frames.], batch size: 28, lr: 1.96e-04 +2022-05-07 03:59:41,211 INFO [train.py:715] (7/8) Epoch 11, batch 16650, loss[loss=0.1693, simple_loss=0.254, pruned_loss=0.04226, over 4822.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2118, pruned_loss=0.03244, over 972828.30 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:00:20,431 INFO [train.py:715] (7/8) Epoch 11, batch 16700, loss[loss=0.1394, simple_loss=0.2098, pruned_loss=0.03446, over 4754.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03215, over 972177.50 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 04:00:59,411 INFO [train.py:715] (7/8) Epoch 11, batch 16750, loss[loss=0.1394, simple_loss=0.2077, pruned_loss=0.03552, over 4858.00 frames.], tot_loss[loss=0.1387, simple_loss=0.212, pruned_loss=0.03267, over 972453.17 frames.], batch size: 20, lr: 1.96e-04 +2022-05-07 04:01:38,341 INFO [train.py:715] (7/8) Epoch 11, batch 16800, loss[loss=0.1462, simple_loss=0.2193, pruned_loss=0.03655, over 4697.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2125, pruned_loss=0.03281, over 972209.49 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:02:17,998 INFO [train.py:715] (7/8) Epoch 11, batch 16850, loss[loss=0.1177, simple_loss=0.1913, pruned_loss=0.02206, over 4982.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2121, pruned_loss=0.03278, over 971995.77 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 04:02:57,560 INFO [train.py:715] (7/8) Epoch 11, batch 16900, loss[loss=0.1411, simple_loss=0.2108, pruned_loss=0.03571, over 4979.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03302, over 972277.62 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 04:03:37,029 INFO [train.py:715] (7/8) Epoch 11, batch 16950, loss[loss=0.1165, simple_loss=0.1922, pruned_loss=0.0204, over 4856.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.03262, over 972168.37 frames.], batch size: 20, lr: 1.96e-04 +2022-05-07 04:04:15,766 INFO [train.py:715] (7/8) Epoch 11, batch 17000, loss[loss=0.1314, simple_loss=0.2069, pruned_loss=0.02799, over 4811.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03346, over 970312.67 frames.], batch size: 24, lr: 1.96e-04 +2022-05-07 04:04:55,495 INFO [train.py:715] (7/8) Epoch 11, batch 17050, loss[loss=0.1458, simple_loss=0.222, pruned_loss=0.03484, over 4983.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2134, pruned_loss=0.03394, over 971441.92 frames.], batch size: 28, lr: 1.96e-04 +2022-05-07 04:05:38,134 INFO [train.py:715] (7/8) Epoch 11, batch 17100, loss[loss=0.153, simple_loss=0.242, pruned_loss=0.03203, over 4777.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2131, pruned_loss=0.03358, over 972422.63 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 04:06:17,135 INFO [train.py:715] (7/8) Epoch 11, batch 17150, loss[loss=0.1442, simple_loss=0.2143, pruned_loss=0.03705, over 4772.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2124, pruned_loss=0.0333, over 972575.91 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:06:56,397 INFO [train.py:715] (7/8) Epoch 11, batch 17200, loss[loss=0.1297, simple_loss=0.2004, pruned_loss=0.02952, over 4953.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2115, pruned_loss=0.03298, over 972585.51 frames.], batch size: 35, lr: 1.96e-04 +2022-05-07 04:07:35,866 INFO [train.py:715] (7/8) Epoch 11, batch 17250, loss[loss=0.1357, simple_loss=0.1956, pruned_loss=0.03789, over 4902.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2107, pruned_loss=0.03281, over 972733.28 frames.], batch size: 17, lr: 1.96e-04 +2022-05-07 04:08:14,917 INFO [train.py:715] (7/8) Epoch 11, batch 17300, loss[loss=0.1577, simple_loss=0.2318, pruned_loss=0.04178, over 4863.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2117, pruned_loss=0.03276, over 972331.50 frames.], batch size: 38, lr: 1.96e-04 +2022-05-07 04:08:53,648 INFO [train.py:715] (7/8) Epoch 11, batch 17350, loss[loss=0.1246, simple_loss=0.2043, pruned_loss=0.02242, over 4965.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2117, pruned_loss=0.03285, over 971658.43 frames.], batch size: 35, lr: 1.96e-04 +2022-05-07 04:09:33,977 INFO [train.py:715] (7/8) Epoch 11, batch 17400, loss[loss=0.1213, simple_loss=0.1908, pruned_loss=0.02597, over 4971.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2111, pruned_loss=0.03269, over 972185.95 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 04:10:14,473 INFO [train.py:715] (7/8) Epoch 11, batch 17450, loss[loss=0.1247, simple_loss=0.2009, pruned_loss=0.02427, over 4892.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2109, pruned_loss=0.03263, over 972338.34 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 04:10:53,786 INFO [train.py:715] (7/8) Epoch 11, batch 17500, loss[loss=0.1308, simple_loss=0.2101, pruned_loss=0.02575, over 4778.00 frames.], tot_loss[loss=0.1382, simple_loss=0.211, pruned_loss=0.03274, over 971974.76 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 04:11:33,223 INFO [train.py:715] (7/8) Epoch 11, batch 17550, loss[loss=0.1366, simple_loss=0.2116, pruned_loss=0.03078, over 4856.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2111, pruned_loss=0.03263, over 972463.92 frames.], batch size: 20, lr: 1.96e-04 +2022-05-07 04:12:12,578 INFO [train.py:715] (7/8) Epoch 11, batch 17600, loss[loss=0.1257, simple_loss=0.2033, pruned_loss=0.0241, over 4747.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2106, pruned_loss=0.03233, over 972995.92 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 04:12:51,731 INFO [train.py:715] (7/8) Epoch 11, batch 17650, loss[loss=0.1445, simple_loss=0.2176, pruned_loss=0.03575, over 4816.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2101, pruned_loss=0.03226, over 973304.35 frames.], batch size: 25, lr: 1.96e-04 +2022-05-07 04:13:29,969 INFO [train.py:715] (7/8) Epoch 11, batch 17700, loss[loss=0.1349, simple_loss=0.199, pruned_loss=0.03537, over 4982.00 frames.], tot_loss[loss=0.1364, simple_loss=0.209, pruned_loss=0.03191, over 973500.85 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 04:14:09,455 INFO [train.py:715] (7/8) Epoch 11, batch 17750, loss[loss=0.1455, simple_loss=0.2228, pruned_loss=0.03414, over 4823.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2095, pruned_loss=0.0319, over 973413.68 frames.], batch size: 25, lr: 1.96e-04 +2022-05-07 04:14:49,018 INFO [train.py:715] (7/8) Epoch 11, batch 17800, loss[loss=0.1747, simple_loss=0.2391, pruned_loss=0.05521, over 4833.00 frames.], tot_loss[loss=0.137, simple_loss=0.2096, pruned_loss=0.03224, over 972914.62 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:15:27,267 INFO [train.py:715] (7/8) Epoch 11, batch 17850, loss[loss=0.1266, simple_loss=0.2012, pruned_loss=0.02604, over 4827.00 frames.], tot_loss[loss=0.1373, simple_loss=0.21, pruned_loss=0.03231, over 971992.86 frames.], batch size: 27, lr: 1.96e-04 +2022-05-07 04:16:06,259 INFO [train.py:715] (7/8) Epoch 11, batch 17900, loss[loss=0.1546, simple_loss=0.2296, pruned_loss=0.03984, over 4785.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2104, pruned_loss=0.03245, over 972132.22 frames.], batch size: 17, lr: 1.96e-04 +2022-05-07 04:16:45,882 INFO [train.py:715] (7/8) Epoch 11, batch 17950, loss[loss=0.1307, simple_loss=0.2023, pruned_loss=0.02959, over 4889.00 frames.], tot_loss[loss=0.1382, simple_loss=0.211, pruned_loss=0.03271, over 972718.37 frames.], batch size: 22, lr: 1.96e-04 +2022-05-07 04:17:24,876 INFO [train.py:715] (7/8) Epoch 11, batch 18000, loss[loss=0.1265, simple_loss=0.1938, pruned_loss=0.02961, over 4650.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2102, pruned_loss=0.03218, over 971237.67 frames.], batch size: 13, lr: 1.96e-04 +2022-05-07 04:17:24,876 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 04:17:34,462 INFO [train.py:742] (7/8) Epoch 11, validation: loss=0.1061, simple_loss=0.1903, pruned_loss=0.01092, over 914524.00 frames. +2022-05-07 04:18:14,141 INFO [train.py:715] (7/8) Epoch 11, batch 18050, loss[loss=0.1432, simple_loss=0.2178, pruned_loss=0.03428, over 4720.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2105, pruned_loss=0.0323, over 970496.00 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:18:53,410 INFO [train.py:715] (7/8) Epoch 11, batch 18100, loss[loss=0.1265, simple_loss=0.1949, pruned_loss=0.02909, over 4833.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2109, pruned_loss=0.03243, over 971190.01 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:19:32,618 INFO [train.py:715] (7/8) Epoch 11, batch 18150, loss[loss=0.1383, simple_loss=0.2156, pruned_loss=0.03046, over 4848.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2107, pruned_loss=0.03236, over 971020.65 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:20:12,195 INFO [train.py:715] (7/8) Epoch 11, batch 18200, loss[loss=0.1487, simple_loss=0.2274, pruned_loss=0.03495, over 4820.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.03248, over 971160.14 frames.], batch size: 13, lr: 1.96e-04 +2022-05-07 04:20:50,628 INFO [train.py:715] (7/8) Epoch 11, batch 18250, loss[loss=0.1233, simple_loss=0.1963, pruned_loss=0.02517, over 4912.00 frames.], tot_loss[loss=0.138, simple_loss=0.2109, pruned_loss=0.03258, over 971377.73 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:21:29,929 INFO [train.py:715] (7/8) Epoch 11, batch 18300, loss[loss=0.1295, simple_loss=0.1994, pruned_loss=0.02979, over 4850.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2104, pruned_loss=0.03221, over 971126.62 frames.], batch size: 13, lr: 1.96e-04 +2022-05-07 04:22:09,176 INFO [train.py:715] (7/8) Epoch 11, batch 18350, loss[loss=0.1265, simple_loss=0.1953, pruned_loss=0.0288, over 4952.00 frames.], tot_loss[loss=0.138, simple_loss=0.2108, pruned_loss=0.03259, over 972118.21 frames.], batch size: 35, lr: 1.96e-04 +2022-05-07 04:22:47,572 INFO [train.py:715] (7/8) Epoch 11, batch 18400, loss[loss=0.1322, simple_loss=0.2054, pruned_loss=0.02945, over 4653.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2105, pruned_loss=0.03267, over 972223.36 frames.], batch size: 13, lr: 1.96e-04 +2022-05-07 04:23:25,989 INFO [train.py:715] (7/8) Epoch 11, batch 18450, loss[loss=0.1464, simple_loss=0.2173, pruned_loss=0.03776, over 4884.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2108, pruned_loss=0.03238, over 971945.12 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 04:24:05,023 INFO [train.py:715] (7/8) Epoch 11, batch 18500, loss[loss=0.127, simple_loss=0.2063, pruned_loss=0.0238, over 4881.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2117, pruned_loss=0.03254, over 972750.35 frames.], batch size: 19, lr: 1.96e-04 +2022-05-07 04:24:44,461 INFO [train.py:715] (7/8) Epoch 11, batch 18550, loss[loss=0.1286, simple_loss=0.208, pruned_loss=0.02466, over 4731.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03224, over 972248.72 frames.], batch size: 16, lr: 1.96e-04 +2022-05-07 04:25:22,565 INFO [train.py:715] (7/8) Epoch 11, batch 18600, loss[loss=0.1219, simple_loss=0.1972, pruned_loss=0.02328, over 4817.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2118, pruned_loss=0.03217, over 971813.60 frames.], batch size: 25, lr: 1.96e-04 +2022-05-07 04:26:01,412 INFO [train.py:715] (7/8) Epoch 11, batch 18650, loss[loss=0.1061, simple_loss=0.1861, pruned_loss=0.01303, over 4840.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2117, pruned_loss=0.03212, over 971886.13 frames.], batch size: 13, lr: 1.96e-04 +2022-05-07 04:26:40,669 INFO [train.py:715] (7/8) Epoch 11, batch 18700, loss[loss=0.1447, simple_loss=0.2254, pruned_loss=0.03204, over 4918.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.03264, over 972516.58 frames.], batch size: 23, lr: 1.96e-04 +2022-05-07 04:27:18,911 INFO [train.py:715] (7/8) Epoch 11, batch 18750, loss[loss=0.1835, simple_loss=0.2562, pruned_loss=0.05541, over 4892.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.03277, over 972566.65 frames.], batch size: 17, lr: 1.96e-04 +2022-05-07 04:27:57,978 INFO [train.py:715] (7/8) Epoch 11, batch 18800, loss[loss=0.1654, simple_loss=0.2315, pruned_loss=0.04963, over 4770.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03271, over 972919.80 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:28:36,594 INFO [train.py:715] (7/8) Epoch 11, batch 18850, loss[loss=0.1414, simple_loss=0.219, pruned_loss=0.03183, over 4989.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2119, pruned_loss=0.03255, over 973719.05 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 04:29:16,486 INFO [train.py:715] (7/8) Epoch 11, batch 18900, loss[loss=0.1404, simple_loss=0.2141, pruned_loss=0.03336, over 4887.00 frames.], tot_loss[loss=0.1388, simple_loss=0.212, pruned_loss=0.03282, over 972596.70 frames.], batch size: 22, lr: 1.96e-04 +2022-05-07 04:29:55,266 INFO [train.py:715] (7/8) Epoch 11, batch 18950, loss[loss=0.1546, simple_loss=0.2253, pruned_loss=0.04197, over 4690.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03311, over 971477.61 frames.], batch size: 15, lr: 1.96e-04 +2022-05-07 04:30:34,363 INFO [train.py:715] (7/8) Epoch 11, batch 19000, loss[loss=0.1712, simple_loss=0.2566, pruned_loss=0.04288, over 4922.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.03242, over 972029.95 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:31:13,457 INFO [train.py:715] (7/8) Epoch 11, batch 19050, loss[loss=0.1321, simple_loss=0.2031, pruned_loss=0.0306, over 4834.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03237, over 972988.22 frames.], batch size: 13, lr: 1.96e-04 +2022-05-07 04:31:52,057 INFO [train.py:715] (7/8) Epoch 11, batch 19100, loss[loss=0.1436, simple_loss=0.2271, pruned_loss=0.03003, over 4792.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2129, pruned_loss=0.03302, over 972463.35 frames.], batch size: 18, lr: 1.96e-04 +2022-05-07 04:32:31,181 INFO [train.py:715] (7/8) Epoch 11, batch 19150, loss[loss=0.1525, simple_loss=0.2205, pruned_loss=0.04228, over 4808.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.0328, over 972507.58 frames.], batch size: 21, lr: 1.96e-04 +2022-05-07 04:33:10,078 INFO [train.py:715] (7/8) Epoch 11, batch 19200, loss[loss=0.133, simple_loss=0.206, pruned_loss=0.03001, over 4853.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03253, over 972594.66 frames.], batch size: 30, lr: 1.96e-04 +2022-05-07 04:33:49,485 INFO [train.py:715] (7/8) Epoch 11, batch 19250, loss[loss=0.1335, simple_loss=0.2019, pruned_loss=0.03255, over 4841.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03176, over 972097.13 frames.], batch size: 30, lr: 1.96e-04 +2022-05-07 04:34:27,829 INFO [train.py:715] (7/8) Epoch 11, batch 19300, loss[loss=0.1367, simple_loss=0.2084, pruned_loss=0.0325, over 4977.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03244, over 972316.39 frames.], batch size: 35, lr: 1.96e-04 +2022-05-07 04:35:06,981 INFO [train.py:715] (7/8) Epoch 11, batch 19350, loss[loss=0.1519, simple_loss=0.2313, pruned_loss=0.03622, over 4859.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03245, over 972816.00 frames.], batch size: 34, lr: 1.96e-04 +2022-05-07 04:35:46,160 INFO [train.py:715] (7/8) Epoch 11, batch 19400, loss[loss=0.11, simple_loss=0.1846, pruned_loss=0.01766, over 4756.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2108, pruned_loss=0.03229, over 971718.28 frames.], batch size: 14, lr: 1.96e-04 +2022-05-07 04:36:24,110 INFO [train.py:715] (7/8) Epoch 11, batch 19450, loss[loss=0.1285, simple_loss=0.2041, pruned_loss=0.02644, over 4923.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2104, pruned_loss=0.03226, over 971922.83 frames.], batch size: 23, lr: 1.95e-04 +2022-05-07 04:37:03,251 INFO [train.py:715] (7/8) Epoch 11, batch 19500, loss[loss=0.1444, simple_loss=0.2208, pruned_loss=0.03403, over 4915.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03177, over 972355.63 frames.], batch size: 17, lr: 1.95e-04 +2022-05-07 04:37:42,218 INFO [train.py:715] (7/8) Epoch 11, batch 19550, loss[loss=0.1575, simple_loss=0.218, pruned_loss=0.04851, over 4982.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2111, pruned_loss=0.03239, over 973167.27 frames.], batch size: 14, lr: 1.95e-04 +2022-05-07 04:38:20,964 INFO [train.py:715] (7/8) Epoch 11, batch 19600, loss[loss=0.1266, simple_loss=0.1954, pruned_loss=0.02893, over 4697.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.03304, over 973210.05 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 04:38:59,547 INFO [train.py:715] (7/8) Epoch 11, batch 19650, loss[loss=0.147, simple_loss=0.2108, pruned_loss=0.04155, over 4837.00 frames.], tot_loss[loss=0.139, simple_loss=0.2119, pruned_loss=0.03308, over 972782.30 frames.], batch size: 30, lr: 1.95e-04 +2022-05-07 04:39:38,340 INFO [train.py:715] (7/8) Epoch 11, batch 19700, loss[loss=0.1499, simple_loss=0.2242, pruned_loss=0.03776, over 4813.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2119, pruned_loss=0.0332, over 972093.89 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 04:40:17,420 INFO [train.py:715] (7/8) Epoch 11, batch 19750, loss[loss=0.1432, simple_loss=0.222, pruned_loss=0.03213, over 4970.00 frames.], tot_loss[loss=0.139, simple_loss=0.2122, pruned_loss=0.03291, over 971562.58 frames.], batch size: 24, lr: 1.95e-04 +2022-05-07 04:40:55,511 INFO [train.py:715] (7/8) Epoch 11, batch 19800, loss[loss=0.1216, simple_loss=0.2014, pruned_loss=0.02093, over 4846.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2111, pruned_loss=0.03216, over 972097.88 frames.], batch size: 13, lr: 1.95e-04 +2022-05-07 04:41:35,008 INFO [train.py:715] (7/8) Epoch 11, batch 19850, loss[loss=0.1532, simple_loss=0.2277, pruned_loss=0.03935, over 4778.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.0323, over 971769.63 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 04:42:14,375 INFO [train.py:715] (7/8) Epoch 11, batch 19900, loss[loss=0.1695, simple_loss=0.226, pruned_loss=0.05648, over 4863.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03246, over 971704.93 frames.], batch size: 32, lr: 1.95e-04 +2022-05-07 04:42:53,604 INFO [train.py:715] (7/8) Epoch 11, batch 19950, loss[loss=0.1238, simple_loss=0.2058, pruned_loss=0.02091, over 4985.00 frames.], tot_loss[loss=0.1369, simple_loss=0.21, pruned_loss=0.03194, over 972127.38 frames.], batch size: 28, lr: 1.95e-04 +2022-05-07 04:43:32,804 INFO [train.py:715] (7/8) Epoch 11, batch 20000, loss[loss=0.1414, simple_loss=0.2166, pruned_loss=0.03305, over 4817.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2104, pruned_loss=0.03215, over 972573.94 frames.], batch size: 26, lr: 1.95e-04 +2022-05-07 04:44:11,787 INFO [train.py:715] (7/8) Epoch 11, batch 20050, loss[loss=0.1153, simple_loss=0.1951, pruned_loss=0.01779, over 4922.00 frames.], tot_loss[loss=0.1371, simple_loss=0.21, pruned_loss=0.03207, over 971670.09 frames.], batch size: 29, lr: 1.95e-04 +2022-05-07 04:44:51,034 INFO [train.py:715] (7/8) Epoch 11, batch 20100, loss[loss=0.1327, simple_loss=0.2159, pruned_loss=0.02473, over 4813.00 frames.], tot_loss[loss=0.138, simple_loss=0.2108, pruned_loss=0.03257, over 971531.72 frames.], batch size: 25, lr: 1.95e-04 +2022-05-07 04:45:29,366 INFO [train.py:715] (7/8) Epoch 11, batch 20150, loss[loss=0.15, simple_loss=0.2257, pruned_loss=0.03714, over 4875.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2109, pruned_loss=0.03278, over 972426.25 frames.], batch size: 22, lr: 1.95e-04 +2022-05-07 04:46:08,144 INFO [train.py:715] (7/8) Epoch 11, batch 20200, loss[loss=0.1182, simple_loss=0.1974, pruned_loss=0.01949, over 4936.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2101, pruned_loss=0.03222, over 972755.60 frames.], batch size: 21, lr: 1.95e-04 +2022-05-07 04:46:46,986 INFO [train.py:715] (7/8) Epoch 11, batch 20250, loss[loss=0.1308, simple_loss=0.1987, pruned_loss=0.03149, over 4836.00 frames.], tot_loss[loss=0.138, simple_loss=0.2108, pruned_loss=0.03259, over 973655.63 frames.], batch size: 13, lr: 1.95e-04 +2022-05-07 04:47:25,728 INFO [train.py:715] (7/8) Epoch 11, batch 20300, loss[loss=0.1492, simple_loss=0.2172, pruned_loss=0.04059, over 4850.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2114, pruned_loss=0.03274, over 973063.16 frames.], batch size: 34, lr: 1.95e-04 +2022-05-07 04:48:04,826 INFO [train.py:715] (7/8) Epoch 11, batch 20350, loss[loss=0.1533, simple_loss=0.2359, pruned_loss=0.03532, over 4802.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.03246, over 972880.29 frames.], batch size: 21, lr: 1.95e-04 +2022-05-07 04:48:43,800 INFO [train.py:715] (7/8) Epoch 11, batch 20400, loss[loss=0.1152, simple_loss=0.1901, pruned_loss=0.02018, over 4854.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.0324, over 972717.51 frames.], batch size: 32, lr: 1.95e-04 +2022-05-07 04:49:23,229 INFO [train.py:715] (7/8) Epoch 11, batch 20450, loss[loss=0.1768, simple_loss=0.2486, pruned_loss=0.05254, over 4887.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03259, over 971474.44 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 04:50:01,762 INFO [train.py:715] (7/8) Epoch 11, batch 20500, loss[loss=0.1245, simple_loss=0.1951, pruned_loss=0.027, over 4792.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03247, over 971812.15 frames.], batch size: 14, lr: 1.95e-04 +2022-05-07 04:50:41,081 INFO [train.py:715] (7/8) Epoch 11, batch 20550, loss[loss=0.1261, simple_loss=0.1927, pruned_loss=0.02981, over 4742.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03237, over 971132.85 frames.], batch size: 12, lr: 1.95e-04 +2022-05-07 04:51:19,715 INFO [train.py:715] (7/8) Epoch 11, batch 20600, loss[loss=0.1164, simple_loss=0.1872, pruned_loss=0.02281, over 4814.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03203, over 970543.44 frames.], batch size: 27, lr: 1.95e-04 +2022-05-07 04:51:57,491 INFO [train.py:715] (7/8) Epoch 11, batch 20650, loss[loss=0.1381, simple_loss=0.2138, pruned_loss=0.03114, over 4869.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2108, pruned_loss=0.03208, over 970969.50 frames.], batch size: 20, lr: 1.95e-04 +2022-05-07 04:52:36,871 INFO [train.py:715] (7/8) Epoch 11, batch 20700, loss[loss=0.1205, simple_loss=0.2026, pruned_loss=0.0192, over 4800.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2112, pruned_loss=0.03228, over 971249.23 frames.], batch size: 21, lr: 1.95e-04 +2022-05-07 04:53:16,099 INFO [train.py:715] (7/8) Epoch 11, batch 20750, loss[loss=0.1642, simple_loss=0.2347, pruned_loss=0.04685, over 4850.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03211, over 971977.35 frames.], batch size: 32, lr: 1.95e-04 +2022-05-07 04:53:54,802 INFO [train.py:715] (7/8) Epoch 11, batch 20800, loss[loss=0.141, simple_loss=0.2197, pruned_loss=0.03116, over 4760.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.03248, over 971144.45 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 04:54:33,172 INFO [train.py:715] (7/8) Epoch 11, batch 20850, loss[loss=0.1446, simple_loss=0.2157, pruned_loss=0.03671, over 4786.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2117, pruned_loss=0.03253, over 970300.61 frames.], batch size: 14, lr: 1.95e-04 +2022-05-07 04:55:12,421 INFO [train.py:715] (7/8) Epoch 11, batch 20900, loss[loss=0.1347, simple_loss=0.2182, pruned_loss=0.02556, over 4974.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03318, over 971618.65 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 04:55:52,031 INFO [train.py:715] (7/8) Epoch 11, batch 20950, loss[loss=0.1124, simple_loss=0.1863, pruned_loss=0.01925, over 4941.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2116, pruned_loss=0.03294, over 971986.83 frames.], batch size: 23, lr: 1.95e-04 +2022-05-07 04:56:30,994 INFO [train.py:715] (7/8) Epoch 11, batch 21000, loss[loss=0.1853, simple_loss=0.246, pruned_loss=0.06232, over 4943.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2109, pruned_loss=0.03276, over 972130.88 frames.], batch size: 21, lr: 1.95e-04 +2022-05-07 04:56:30,994 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 04:56:40,630 INFO [train.py:742] (7/8) Epoch 11, validation: loss=0.106, simple_loss=0.19, pruned_loss=0.01097, over 914524.00 frames. +2022-05-07 04:57:20,094 INFO [train.py:715] (7/8) Epoch 11, batch 21050, loss[loss=0.1474, simple_loss=0.2278, pruned_loss=0.03348, over 4819.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2111, pruned_loss=0.03282, over 971692.15 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 04:57:59,829 INFO [train.py:715] (7/8) Epoch 11, batch 21100, loss[loss=0.117, simple_loss=0.1998, pruned_loss=0.01708, over 4759.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2112, pruned_loss=0.03297, over 971718.28 frames.], batch size: 19, lr: 1.95e-04 +2022-05-07 04:58:38,865 INFO [train.py:715] (7/8) Epoch 11, batch 21150, loss[loss=0.1243, simple_loss=0.1997, pruned_loss=0.02445, over 4872.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2112, pruned_loss=0.03298, over 971373.41 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 04:59:18,201 INFO [train.py:715] (7/8) Epoch 11, batch 21200, loss[loss=0.1444, simple_loss=0.2214, pruned_loss=0.03365, over 4893.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2117, pruned_loss=0.03285, over 971429.01 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 04:59:56,326 INFO [train.py:715] (7/8) Epoch 11, batch 21250, loss[loss=0.1375, simple_loss=0.2122, pruned_loss=0.03144, over 4814.00 frames.], tot_loss[loss=0.138, simple_loss=0.211, pruned_loss=0.03248, over 971371.62 frames.], batch size: 26, lr: 1.95e-04 +2022-05-07 05:00:35,639 INFO [train.py:715] (7/8) Epoch 11, batch 21300, loss[loss=0.1386, simple_loss=0.2128, pruned_loss=0.0322, over 4947.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2108, pruned_loss=0.03235, over 972314.88 frames.], batch size: 21, lr: 1.95e-04 +2022-05-07 05:01:15,030 INFO [train.py:715] (7/8) Epoch 11, batch 21350, loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03244, over 4933.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2111, pruned_loss=0.03233, over 971675.43 frames.], batch size: 18, lr: 1.95e-04 +2022-05-07 05:01:53,537 INFO [train.py:715] (7/8) Epoch 11, batch 21400, loss[loss=0.1482, simple_loss=0.2146, pruned_loss=0.04092, over 4909.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03255, over 971862.85 frames.], batch size: 17, lr: 1.95e-04 +2022-05-07 05:02:32,176 INFO [train.py:715] (7/8) Epoch 11, batch 21450, loss[loss=0.1243, simple_loss=0.1904, pruned_loss=0.02914, over 4883.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03264, over 971792.71 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 05:03:11,027 INFO [train.py:715] (7/8) Epoch 11, batch 21500, loss[loss=0.1497, simple_loss=0.2197, pruned_loss=0.03982, over 4897.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03295, over 972076.41 frames.], batch size: 39, lr: 1.95e-04 +2022-05-07 05:03:50,387 INFO [train.py:715] (7/8) Epoch 11, batch 21550, loss[loss=0.1397, simple_loss=0.2116, pruned_loss=0.03389, over 4851.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2118, pruned_loss=0.0329, over 972088.39 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 05:04:28,682 INFO [train.py:715] (7/8) Epoch 11, batch 21600, loss[loss=0.141, simple_loss=0.2108, pruned_loss=0.03562, over 4770.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.0329, over 971790.45 frames.], batch size: 18, lr: 1.95e-04 +2022-05-07 05:05:07,530 INFO [train.py:715] (7/8) Epoch 11, batch 21650, loss[loss=0.1539, simple_loss=0.2306, pruned_loss=0.03863, over 4887.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2133, pruned_loss=0.03312, over 971716.16 frames.], batch size: 39, lr: 1.95e-04 +2022-05-07 05:05:47,581 INFO [train.py:715] (7/8) Epoch 11, batch 21700, loss[loss=0.1398, simple_loss=0.2113, pruned_loss=0.03418, over 4935.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2128, pruned_loss=0.03329, over 971673.58 frames.], batch size: 23, lr: 1.95e-04 +2022-05-07 05:06:26,874 INFO [train.py:715] (7/8) Epoch 11, batch 21750, loss[loss=0.1439, simple_loss=0.2294, pruned_loss=0.02917, over 4752.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2124, pruned_loss=0.03341, over 971488.89 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 05:07:07,060 INFO [train.py:715] (7/8) Epoch 11, batch 21800, loss[loss=0.1656, simple_loss=0.2265, pruned_loss=0.05237, over 4942.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2126, pruned_loss=0.03364, over 971116.92 frames.], batch size: 39, lr: 1.95e-04 +2022-05-07 05:07:46,735 INFO [train.py:715] (7/8) Epoch 11, batch 21850, loss[loss=0.1461, simple_loss=0.2119, pruned_loss=0.0401, over 4808.00 frames.], tot_loss[loss=0.1406, simple_loss=0.2131, pruned_loss=0.03399, over 971716.71 frames.], batch size: 13, lr: 1.95e-04 +2022-05-07 05:08:27,227 INFO [train.py:715] (7/8) Epoch 11, batch 21900, loss[loss=0.155, simple_loss=0.2208, pruned_loss=0.04461, over 4811.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2127, pruned_loss=0.03374, over 972286.25 frames.], batch size: 13, lr: 1.95e-04 +2022-05-07 05:09:06,449 INFO [train.py:715] (7/8) Epoch 11, batch 21950, loss[loss=0.1298, simple_loss=0.2078, pruned_loss=0.02595, over 4749.00 frames.], tot_loss[loss=0.1392, simple_loss=0.212, pruned_loss=0.03324, over 972413.30 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 05:09:46,766 INFO [train.py:715] (7/8) Epoch 11, batch 22000, loss[loss=0.1133, simple_loss=0.1892, pruned_loss=0.01871, over 4780.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03265, over 972546.98 frames.], batch size: 14, lr: 1.95e-04 +2022-05-07 05:10:27,231 INFO [train.py:715] (7/8) Epoch 11, batch 22050, loss[loss=0.1194, simple_loss=0.1915, pruned_loss=0.02363, over 4977.00 frames.], tot_loss[loss=0.1392, simple_loss=0.212, pruned_loss=0.03314, over 973145.57 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 05:11:05,500 INFO [train.py:715] (7/8) Epoch 11, batch 22100, loss[loss=0.1267, simple_loss=0.2047, pruned_loss=0.02437, over 4834.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2126, pruned_loss=0.03335, over 973388.31 frames.], batch size: 26, lr: 1.95e-04 +2022-05-07 05:11:45,099 INFO [train.py:715] (7/8) Epoch 11, batch 22150, loss[loss=0.1087, simple_loss=0.1847, pruned_loss=0.01635, over 4869.00 frames.], tot_loss[loss=0.1389, simple_loss=0.212, pruned_loss=0.0329, over 973690.88 frames.], batch size: 16, lr: 1.95e-04 +2022-05-07 05:12:24,691 INFO [train.py:715] (7/8) Epoch 11, batch 22200, loss[loss=0.1406, simple_loss=0.2021, pruned_loss=0.03954, over 4957.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.03285, over 973128.04 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 05:13:03,455 INFO [train.py:715] (7/8) Epoch 11, batch 22250, loss[loss=0.1155, simple_loss=0.1826, pruned_loss=0.02416, over 4786.00 frames.], tot_loss[loss=0.1381, simple_loss=0.211, pruned_loss=0.03263, over 972374.94 frames.], batch size: 18, lr: 1.95e-04 +2022-05-07 05:13:41,891 INFO [train.py:715] (7/8) Epoch 11, batch 22300, loss[loss=0.1666, simple_loss=0.2299, pruned_loss=0.05168, over 4839.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2111, pruned_loss=0.03291, over 972281.92 frames.], batch size: 30, lr: 1.95e-04 +2022-05-07 05:14:21,103 INFO [train.py:715] (7/8) Epoch 11, batch 22350, loss[loss=0.1293, simple_loss=0.2027, pruned_loss=0.02799, over 4850.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2106, pruned_loss=0.03242, over 971624.70 frames.], batch size: 32, lr: 1.95e-04 +2022-05-07 05:15:00,557 INFO [train.py:715] (7/8) Epoch 11, batch 22400, loss[loss=0.1495, simple_loss=0.2211, pruned_loss=0.03897, over 4847.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2099, pruned_loss=0.03216, over 971939.45 frames.], batch size: 32, lr: 1.95e-04 +2022-05-07 05:15:38,492 INFO [train.py:715] (7/8) Epoch 11, batch 22450, loss[loss=0.1034, simple_loss=0.1753, pruned_loss=0.01575, over 4783.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2101, pruned_loss=0.03214, over 971546.17 frames.], batch size: 14, lr: 1.95e-04 +2022-05-07 05:16:18,411 INFO [train.py:715] (7/8) Epoch 11, batch 22500, loss[loss=0.141, simple_loss=0.2194, pruned_loss=0.03128, over 4864.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2105, pruned_loss=0.03241, over 970630.14 frames.], batch size: 20, lr: 1.95e-04 +2022-05-07 05:16:57,486 INFO [train.py:715] (7/8) Epoch 11, batch 22550, loss[loss=0.1585, simple_loss=0.243, pruned_loss=0.03697, over 4780.00 frames.], tot_loss[loss=0.138, simple_loss=0.2108, pruned_loss=0.03262, over 970816.36 frames.], batch size: 17, lr: 1.95e-04 +2022-05-07 05:17:36,657 INFO [train.py:715] (7/8) Epoch 11, batch 22600, loss[loss=0.1056, simple_loss=0.1736, pruned_loss=0.01875, over 4934.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2112, pruned_loss=0.03235, over 971555.41 frames.], batch size: 18, lr: 1.95e-04 +2022-05-07 05:18:15,029 INFO [train.py:715] (7/8) Epoch 11, batch 22650, loss[loss=0.1734, simple_loss=0.242, pruned_loss=0.05241, over 4964.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2115, pruned_loss=0.0326, over 971638.75 frames.], batch size: 35, lr: 1.95e-04 +2022-05-07 05:18:54,215 INFO [train.py:715] (7/8) Epoch 11, batch 22700, loss[loss=0.1182, simple_loss=0.1994, pruned_loss=0.01856, over 4926.00 frames.], tot_loss[loss=0.1388, simple_loss=0.212, pruned_loss=0.03278, over 972283.32 frames.], batch size: 23, lr: 1.95e-04 +2022-05-07 05:19:34,077 INFO [train.py:715] (7/8) Epoch 11, batch 22750, loss[loss=0.1259, simple_loss=0.1973, pruned_loss=0.02725, over 4715.00 frames.], tot_loss[loss=0.1387, simple_loss=0.212, pruned_loss=0.03274, over 972583.20 frames.], batch size: 15, lr: 1.95e-04 +2022-05-07 05:20:12,498 INFO [train.py:715] (7/8) Epoch 11, batch 22800, loss[loss=0.1331, simple_loss=0.2028, pruned_loss=0.03172, over 4937.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2118, pruned_loss=0.03268, over 973291.32 frames.], batch size: 39, lr: 1.95e-04 +2022-05-07 05:20:52,299 INFO [train.py:715] (7/8) Epoch 11, batch 22850, loss[loss=0.1159, simple_loss=0.2015, pruned_loss=0.01509, over 4938.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2124, pruned_loss=0.03305, over 973304.48 frames.], batch size: 23, lr: 1.95e-04 +2022-05-07 05:21:31,222 INFO [train.py:715] (7/8) Epoch 11, batch 22900, loss[loss=0.1678, simple_loss=0.2384, pruned_loss=0.04856, over 4906.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.0332, over 973815.46 frames.], batch size: 17, lr: 1.95e-04 +2022-05-07 05:22:10,210 INFO [train.py:715] (7/8) Epoch 11, batch 22950, loss[loss=0.1403, simple_loss=0.2189, pruned_loss=0.03085, over 4792.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03254, over 973363.56 frames.], batch size: 21, lr: 1.95e-04 +2022-05-07 05:22:48,360 INFO [train.py:715] (7/8) Epoch 11, batch 23000, loss[loss=0.1312, simple_loss=0.2089, pruned_loss=0.02679, over 4792.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2116, pruned_loss=0.03278, over 972627.01 frames.], batch size: 21, lr: 1.95e-04 +2022-05-07 05:23:27,331 INFO [train.py:715] (7/8) Epoch 11, batch 23050, loss[loss=0.1717, simple_loss=0.2351, pruned_loss=0.05415, over 4934.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2112, pruned_loss=0.03262, over 972438.65 frames.], batch size: 35, lr: 1.95e-04 +2022-05-07 05:24:06,661 INFO [train.py:715] (7/8) Epoch 11, batch 23100, loss[loss=0.1302, simple_loss=0.2108, pruned_loss=0.02477, over 4872.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2117, pruned_loss=0.03255, over 972024.71 frames.], batch size: 22, lr: 1.95e-04 +2022-05-07 05:24:44,407 INFO [train.py:715] (7/8) Epoch 11, batch 23150, loss[loss=0.1292, simple_loss=0.2075, pruned_loss=0.02547, over 4910.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03302, over 971804.06 frames.], batch size: 18, lr: 1.95e-04 +2022-05-07 05:25:23,978 INFO [train.py:715] (7/8) Epoch 11, batch 23200, loss[loss=0.1445, simple_loss=0.2043, pruned_loss=0.04239, over 4838.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03282, over 971763.71 frames.], batch size: 12, lr: 1.95e-04 +2022-05-07 05:26:02,910 INFO [train.py:715] (7/8) Epoch 11, batch 23250, loss[loss=0.1387, simple_loss=0.2171, pruned_loss=0.03018, over 4887.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2124, pruned_loss=0.03321, over 971582.17 frames.], batch size: 22, lr: 1.95e-04 +2022-05-07 05:26:41,984 INFO [train.py:715] (7/8) Epoch 11, batch 23300, loss[loss=0.1459, simple_loss=0.2116, pruned_loss=0.04012, over 4853.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03263, over 972074.20 frames.], batch size: 30, lr: 1.95e-04 +2022-05-07 05:27:20,072 INFO [train.py:715] (7/8) Epoch 11, batch 23350, loss[loss=0.1519, simple_loss=0.2186, pruned_loss=0.04262, over 4850.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.0328, over 972677.09 frames.], batch size: 32, lr: 1.95e-04 +2022-05-07 05:27:59,123 INFO [train.py:715] (7/8) Epoch 11, batch 23400, loss[loss=0.1391, simple_loss=0.2105, pruned_loss=0.03382, over 4885.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03264, over 973222.02 frames.], batch size: 22, lr: 1.95e-04 +2022-05-07 05:28:38,746 INFO [train.py:715] (7/8) Epoch 11, batch 23450, loss[loss=0.142, simple_loss=0.2178, pruned_loss=0.03312, over 4852.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2117, pruned_loss=0.03254, over 971859.39 frames.], batch size: 38, lr: 1.95e-04 +2022-05-07 05:29:16,871 INFO [train.py:715] (7/8) Epoch 11, batch 23500, loss[loss=0.1521, simple_loss=0.2298, pruned_loss=0.03716, over 4782.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2115, pruned_loss=0.03199, over 971964.01 frames.], batch size: 17, lr: 1.95e-04 +2022-05-07 05:29:55,784 INFO [train.py:715] (7/8) Epoch 11, batch 23550, loss[loss=0.1266, simple_loss=0.2028, pruned_loss=0.02525, over 4785.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03205, over 971233.00 frames.], batch size: 14, lr: 1.95e-04 +2022-05-07 05:30:34,766 INFO [train.py:715] (7/8) Epoch 11, batch 23600, loss[loss=0.1384, simple_loss=0.215, pruned_loss=0.03089, over 4837.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03234, over 970826.15 frames.], batch size: 30, lr: 1.94e-04 +2022-05-07 05:31:14,123 INFO [train.py:715] (7/8) Epoch 11, batch 23650, loss[loss=0.1336, simple_loss=0.206, pruned_loss=0.03058, over 4831.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03282, over 970667.40 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:31:51,829 INFO [train.py:715] (7/8) Epoch 11, batch 23700, loss[loss=0.1309, simple_loss=0.205, pruned_loss=0.02838, over 4707.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2117, pruned_loss=0.03277, over 970043.97 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:32:30,818 INFO [train.py:715] (7/8) Epoch 11, batch 23750, loss[loss=0.1375, simple_loss=0.2147, pruned_loss=0.03018, over 4748.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03249, over 971524.36 frames.], batch size: 19, lr: 1.94e-04 +2022-05-07 05:33:09,309 INFO [train.py:715] (7/8) Epoch 11, batch 23800, loss[loss=0.1828, simple_loss=0.2605, pruned_loss=0.05257, over 4987.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03239, over 972061.20 frames.], batch size: 31, lr: 1.94e-04 +2022-05-07 05:33:46,737 INFO [train.py:715] (7/8) Epoch 11, batch 23850, loss[loss=0.1294, simple_loss=0.2026, pruned_loss=0.0281, over 4963.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03285, over 972471.54 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 05:34:24,311 INFO [train.py:715] (7/8) Epoch 11, batch 23900, loss[loss=0.1498, simple_loss=0.2195, pruned_loss=0.04006, over 4831.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2128, pruned_loss=0.03356, over 972512.39 frames.], batch size: 26, lr: 1.94e-04 +2022-05-07 05:35:01,655 INFO [train.py:715] (7/8) Epoch 11, batch 23950, loss[loss=0.1628, simple_loss=0.2454, pruned_loss=0.04016, over 4946.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2119, pruned_loss=0.03311, over 971365.75 frames.], batch size: 39, lr: 1.94e-04 +2022-05-07 05:35:39,342 INFO [train.py:715] (7/8) Epoch 11, batch 24000, loss[loss=0.1475, simple_loss=0.2143, pruned_loss=0.0403, over 4752.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.0332, over 972467.24 frames.], batch size: 14, lr: 1.94e-04 +2022-05-07 05:35:39,343 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 05:35:48,813 INFO [train.py:742] (7/8) Epoch 11, validation: loss=0.1059, simple_loss=0.19, pruned_loss=0.01092, over 914524.00 frames. +2022-05-07 05:36:27,135 INFO [train.py:715] (7/8) Epoch 11, batch 24050, loss[loss=0.129, simple_loss=0.1966, pruned_loss=0.03065, over 4789.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2112, pruned_loss=0.03283, over 972354.19 frames.], batch size: 14, lr: 1.94e-04 +2022-05-07 05:37:04,266 INFO [train.py:715] (7/8) Epoch 11, batch 24100, loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03254, over 4963.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2115, pruned_loss=0.03301, over 972903.01 frames.], batch size: 35, lr: 1.94e-04 +2022-05-07 05:37:42,094 INFO [train.py:715] (7/8) Epoch 11, batch 24150, loss[loss=0.1427, simple_loss=0.2187, pruned_loss=0.03333, over 4854.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2114, pruned_loss=0.0329, over 972796.23 frames.], batch size: 20, lr: 1.94e-04 +2022-05-07 05:38:20,368 INFO [train.py:715] (7/8) Epoch 11, batch 24200, loss[loss=0.1312, simple_loss=0.2129, pruned_loss=0.02477, over 4890.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03263, over 972058.45 frames.], batch size: 22, lr: 1.94e-04 +2022-05-07 05:38:57,453 INFO [train.py:715] (7/8) Epoch 11, batch 24250, loss[loss=0.1373, simple_loss=0.2155, pruned_loss=0.0296, over 4776.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2107, pruned_loss=0.03227, over 971588.95 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 05:39:35,484 INFO [train.py:715] (7/8) Epoch 11, batch 24300, loss[loss=0.1399, simple_loss=0.2153, pruned_loss=0.03227, over 4871.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2105, pruned_loss=0.03219, over 971713.48 frames.], batch size: 16, lr: 1.94e-04 +2022-05-07 05:40:13,072 INFO [train.py:715] (7/8) Epoch 11, batch 24350, loss[loss=0.1474, simple_loss=0.2212, pruned_loss=0.03681, over 4762.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03176, over 971536.26 frames.], batch size: 19, lr: 1.94e-04 +2022-05-07 05:40:50,677 INFO [train.py:715] (7/8) Epoch 11, batch 24400, loss[loss=0.1416, simple_loss=0.2133, pruned_loss=0.03498, over 4958.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03192, over 971240.74 frames.], batch size: 35, lr: 1.94e-04 +2022-05-07 05:41:28,272 INFO [train.py:715] (7/8) Epoch 11, batch 24450, loss[loss=0.1562, simple_loss=0.2394, pruned_loss=0.03646, over 4937.00 frames.], tot_loss[loss=0.137, simple_loss=0.2107, pruned_loss=0.03169, over 971861.35 frames.], batch size: 35, lr: 1.94e-04 +2022-05-07 05:42:06,386 INFO [train.py:715] (7/8) Epoch 11, batch 24500, loss[loss=0.1389, simple_loss=0.2209, pruned_loss=0.02842, over 4898.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03186, over 972557.35 frames.], batch size: 19, lr: 1.94e-04 +2022-05-07 05:42:45,027 INFO [train.py:715] (7/8) Epoch 11, batch 24550, loss[loss=0.1294, simple_loss=0.2123, pruned_loss=0.02323, over 4890.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03201, over 971499.46 frames.], batch size: 22, lr: 1.94e-04 +2022-05-07 05:43:23,051 INFO [train.py:715] (7/8) Epoch 11, batch 24600, loss[loss=0.1404, simple_loss=0.2177, pruned_loss=0.03153, over 4787.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03179, over 971533.29 frames.], batch size: 17, lr: 1.94e-04 +2022-05-07 05:44:01,527 INFO [train.py:715] (7/8) Epoch 11, batch 24650, loss[loss=0.152, simple_loss=0.2327, pruned_loss=0.03566, over 4918.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03181, over 971892.70 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 05:44:39,862 INFO [train.py:715] (7/8) Epoch 11, batch 24700, loss[loss=0.1313, simple_loss=0.2152, pruned_loss=0.02377, over 4817.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.0321, over 971141.54 frames.], batch size: 25, lr: 1.94e-04 +2022-05-07 05:45:18,487 INFO [train.py:715] (7/8) Epoch 11, batch 24750, loss[loss=0.1426, simple_loss=0.2129, pruned_loss=0.03618, over 4657.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03214, over 971210.06 frames.], batch size: 13, lr: 1.94e-04 +2022-05-07 05:45:56,382 INFO [train.py:715] (7/8) Epoch 11, batch 24800, loss[loss=0.1313, simple_loss=0.1951, pruned_loss=0.03371, over 4951.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2099, pruned_loss=0.03184, over 971395.92 frames.], batch size: 23, lr: 1.94e-04 +2022-05-07 05:46:34,702 INFO [train.py:715] (7/8) Epoch 11, batch 24850, loss[loss=0.1182, simple_loss=0.1915, pruned_loss=0.02246, over 4981.00 frames.], tot_loss[loss=0.137, simple_loss=0.2101, pruned_loss=0.03199, over 972435.66 frames.], batch size: 35, lr: 1.94e-04 +2022-05-07 05:47:13,629 INFO [train.py:715] (7/8) Epoch 11, batch 24900, loss[loss=0.1158, simple_loss=0.1984, pruned_loss=0.01661, over 4906.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2095, pruned_loss=0.03182, over 973362.78 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 05:47:51,694 INFO [train.py:715] (7/8) Epoch 11, batch 24950, loss[loss=0.1502, simple_loss=0.2349, pruned_loss=0.0328, over 4952.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2098, pruned_loss=0.03184, over 973731.77 frames.], batch size: 24, lr: 1.94e-04 +2022-05-07 05:48:30,023 INFO [train.py:715] (7/8) Epoch 11, batch 25000, loss[loss=0.1647, simple_loss=0.2383, pruned_loss=0.0455, over 4793.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2103, pruned_loss=0.03191, over 973390.90 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 05:49:08,319 INFO [train.py:715] (7/8) Epoch 11, batch 25050, loss[loss=0.139, simple_loss=0.2232, pruned_loss=0.02741, over 4932.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03161, over 973975.67 frames.], batch size: 23, lr: 1.94e-04 +2022-05-07 05:49:49,678 INFO [train.py:715] (7/8) Epoch 11, batch 25100, loss[loss=0.132, simple_loss=0.1964, pruned_loss=0.03381, over 4963.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2115, pruned_loss=0.03218, over 973235.18 frames.], batch size: 14, lr: 1.94e-04 +2022-05-07 05:50:27,839 INFO [train.py:715] (7/8) Epoch 11, batch 25150, loss[loss=0.1326, simple_loss=0.2092, pruned_loss=0.02796, over 4822.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2115, pruned_loss=0.03222, over 973826.47 frames.], batch size: 12, lr: 1.94e-04 +2022-05-07 05:51:06,430 INFO [train.py:715] (7/8) Epoch 11, batch 25200, loss[loss=0.1197, simple_loss=0.1922, pruned_loss=0.02355, over 4953.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03228, over 973320.21 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 05:51:45,284 INFO [train.py:715] (7/8) Epoch 11, batch 25250, loss[loss=0.133, simple_loss=0.2007, pruned_loss=0.03266, over 4849.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03247, over 973224.74 frames.], batch size: 30, lr: 1.94e-04 +2022-05-07 05:52:23,561 INFO [train.py:715] (7/8) Epoch 11, batch 25300, loss[loss=0.1297, simple_loss=0.207, pruned_loss=0.02615, over 4989.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03246, over 973387.40 frames.], batch size: 14, lr: 1.94e-04 +2022-05-07 05:53:01,961 INFO [train.py:715] (7/8) Epoch 11, batch 25350, loss[loss=0.1276, simple_loss=0.1984, pruned_loss=0.02839, over 4847.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.03279, over 972976.97 frames.], batch size: 13, lr: 1.94e-04 +2022-05-07 05:53:40,600 INFO [train.py:715] (7/8) Epoch 11, batch 25400, loss[loss=0.1403, simple_loss=0.2096, pruned_loss=0.03552, over 4703.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2117, pruned_loss=0.03288, over 971449.59 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:54:19,417 INFO [train.py:715] (7/8) Epoch 11, batch 25450, loss[loss=0.1165, simple_loss=0.1945, pruned_loss=0.01923, over 4819.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03336, over 971701.26 frames.], batch size: 27, lr: 1.94e-04 +2022-05-07 05:54:57,469 INFO [train.py:715] (7/8) Epoch 11, batch 25500, loss[loss=0.1536, simple_loss=0.2303, pruned_loss=0.03839, over 4830.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03313, over 972238.09 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 05:55:36,080 INFO [train.py:715] (7/8) Epoch 11, batch 25550, loss[loss=0.1429, simple_loss=0.2262, pruned_loss=0.02985, over 4770.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.03242, over 971252.59 frames.], batch size: 14, lr: 1.94e-04 +2022-05-07 05:56:15,314 INFO [train.py:715] (7/8) Epoch 11, batch 25600, loss[loss=0.1267, simple_loss=0.2028, pruned_loss=0.02534, over 4743.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2109, pruned_loss=0.03171, over 971781.70 frames.], batch size: 19, lr: 1.94e-04 +2022-05-07 05:56:53,593 INFO [train.py:715] (7/8) Epoch 11, batch 25650, loss[loss=0.1233, simple_loss=0.2005, pruned_loss=0.02301, over 4871.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.03208, over 971457.20 frames.], batch size: 22, lr: 1.94e-04 +2022-05-07 05:57:31,749 INFO [train.py:715] (7/8) Epoch 11, batch 25700, loss[loss=0.1537, simple_loss=0.2228, pruned_loss=0.04227, over 4901.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03267, over 971745.17 frames.], batch size: 39, lr: 1.94e-04 +2022-05-07 05:58:10,579 INFO [train.py:715] (7/8) Epoch 11, batch 25750, loss[loss=0.1338, simple_loss=0.215, pruned_loss=0.0263, over 4879.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03255, over 970693.45 frames.], batch size: 16, lr: 1.94e-04 +2022-05-07 05:58:48,900 INFO [train.py:715] (7/8) Epoch 11, batch 25800, loss[loss=0.1655, simple_loss=0.2387, pruned_loss=0.04616, over 4799.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2122, pruned_loss=0.03299, over 971470.13 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 05:59:26,904 INFO [train.py:715] (7/8) Epoch 11, batch 25850, loss[loss=0.1374, simple_loss=0.2078, pruned_loss=0.03348, over 4782.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03295, over 971128.90 frames.], batch size: 17, lr: 1.94e-04 +2022-05-07 06:00:05,563 INFO [train.py:715] (7/8) Epoch 11, batch 25900, loss[loss=0.1234, simple_loss=0.1939, pruned_loss=0.02648, over 4800.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2125, pruned_loss=0.03309, over 971084.54 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 06:00:44,269 INFO [train.py:715] (7/8) Epoch 11, batch 25950, loss[loss=0.1652, simple_loss=0.2298, pruned_loss=0.05035, over 4874.00 frames.], tot_loss[loss=0.14, simple_loss=0.213, pruned_loss=0.03347, over 971222.15 frames.], batch size: 39, lr: 1.94e-04 +2022-05-07 06:01:22,319 INFO [train.py:715] (7/8) Epoch 11, batch 26000, loss[loss=0.1253, simple_loss=0.2039, pruned_loss=0.02334, over 4878.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2134, pruned_loss=0.03342, over 971649.76 frames.], batch size: 16, lr: 1.94e-04 +2022-05-07 06:02:00,402 INFO [train.py:715] (7/8) Epoch 11, batch 26050, loss[loss=0.1359, simple_loss=0.2151, pruned_loss=0.02837, over 4871.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2128, pruned_loss=0.03284, over 971494.40 frames.], batch size: 20, lr: 1.94e-04 +2022-05-07 06:02:38,955 INFO [train.py:715] (7/8) Epoch 11, batch 26100, loss[loss=0.1736, simple_loss=0.2308, pruned_loss=0.05816, over 4848.00 frames.], tot_loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03223, over 972422.37 frames.], batch size: 13, lr: 1.94e-04 +2022-05-07 06:03:17,345 INFO [train.py:715] (7/8) Epoch 11, batch 26150, loss[loss=0.1441, simple_loss=0.2273, pruned_loss=0.03042, over 4811.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2117, pruned_loss=0.03204, over 972404.01 frames.], batch size: 26, lr: 1.94e-04 +2022-05-07 06:03:55,314 INFO [train.py:715] (7/8) Epoch 11, batch 26200, loss[loss=0.1337, simple_loss=0.2032, pruned_loss=0.0321, over 4830.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2118, pruned_loss=0.03226, over 972172.44 frames.], batch size: 30, lr: 1.94e-04 +2022-05-07 06:04:32,930 INFO [train.py:715] (7/8) Epoch 11, batch 26250, loss[loss=0.123, simple_loss=0.1928, pruned_loss=0.02654, over 4921.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.0325, over 972585.44 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 06:05:10,976 INFO [train.py:715] (7/8) Epoch 11, batch 26300, loss[loss=0.1276, simple_loss=0.2014, pruned_loss=0.02687, over 4921.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2119, pruned_loss=0.0323, over 972338.47 frames.], batch size: 17, lr: 1.94e-04 +2022-05-07 06:05:48,409 INFO [train.py:715] (7/8) Epoch 11, batch 26350, loss[loss=0.1405, simple_loss=0.2191, pruned_loss=0.03093, over 4981.00 frames.], tot_loss[loss=0.138, simple_loss=0.2115, pruned_loss=0.03224, over 971993.48 frames.], batch size: 14, lr: 1.94e-04 +2022-05-07 06:06:25,428 INFO [train.py:715] (7/8) Epoch 11, batch 26400, loss[loss=0.133, simple_loss=0.2097, pruned_loss=0.02818, over 4926.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2118, pruned_loss=0.03293, over 972431.31 frames.], batch size: 21, lr: 1.94e-04 +2022-05-07 06:07:03,856 INFO [train.py:715] (7/8) Epoch 11, batch 26450, loss[loss=0.1506, simple_loss=0.2182, pruned_loss=0.04146, over 4772.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2109, pruned_loss=0.03246, over 972078.72 frames.], batch size: 14, lr: 1.94e-04 +2022-05-07 06:07:41,340 INFO [train.py:715] (7/8) Epoch 11, batch 26500, loss[loss=0.1263, simple_loss=0.2026, pruned_loss=0.02506, over 4864.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2111, pruned_loss=0.03231, over 971728.72 frames.], batch size: 20, lr: 1.94e-04 +2022-05-07 06:08:19,082 INFO [train.py:715] (7/8) Epoch 11, batch 26550, loss[loss=0.1328, simple_loss=0.2081, pruned_loss=0.02876, over 4842.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2112, pruned_loss=0.03231, over 970130.92 frames.], batch size: 13, lr: 1.94e-04 +2022-05-07 06:08:56,820 INFO [train.py:715] (7/8) Epoch 11, batch 26600, loss[loss=0.1341, simple_loss=0.2096, pruned_loss=0.02927, over 4974.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03264, over 971231.39 frames.], batch size: 28, lr: 1.94e-04 +2022-05-07 06:09:34,844 INFO [train.py:715] (7/8) Epoch 11, batch 26650, loss[loss=0.1375, simple_loss=0.2045, pruned_loss=0.03525, over 4702.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03165, over 971263.01 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 06:10:12,909 INFO [train.py:715] (7/8) Epoch 11, batch 26700, loss[loss=0.1344, simple_loss=0.2061, pruned_loss=0.03138, over 4716.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03168, over 970299.25 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 06:10:49,897 INFO [train.py:715] (7/8) Epoch 11, batch 26750, loss[loss=0.1188, simple_loss=0.2002, pruned_loss=0.0187, over 4909.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03179, over 971001.84 frames.], batch size: 17, lr: 1.94e-04 +2022-05-07 06:11:28,540 INFO [train.py:715] (7/8) Epoch 11, batch 26800, loss[loss=0.1207, simple_loss=0.1945, pruned_loss=0.02342, over 4850.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2088, pruned_loss=0.03102, over 971004.28 frames.], batch size: 34, lr: 1.94e-04 +2022-05-07 06:12:06,143 INFO [train.py:715] (7/8) Epoch 11, batch 26850, loss[loss=0.1459, simple_loss=0.2181, pruned_loss=0.03685, over 4921.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2081, pruned_loss=0.03088, over 970657.06 frames.], batch size: 17, lr: 1.94e-04 +2022-05-07 06:12:43,630 INFO [train.py:715] (7/8) Epoch 11, batch 26900, loss[loss=0.1574, simple_loss=0.2373, pruned_loss=0.03871, over 4762.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2081, pruned_loss=0.03092, over 970583.60 frames.], batch size: 19, lr: 1.94e-04 +2022-05-07 06:13:21,269 INFO [train.py:715] (7/8) Epoch 11, batch 26950, loss[loss=0.1854, simple_loss=0.2413, pruned_loss=0.06474, over 4776.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2101, pruned_loss=0.03207, over 971826.53 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 06:13:59,655 INFO [train.py:715] (7/8) Epoch 11, batch 27000, loss[loss=0.1377, simple_loss=0.217, pruned_loss=0.02916, over 4771.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2105, pruned_loss=0.03236, over 971771.06 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 06:13:59,655 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 06:14:09,117 INFO [train.py:742] (7/8) Epoch 11, validation: loss=0.1059, simple_loss=0.19, pruned_loss=0.01084, over 914524.00 frames. +2022-05-07 06:14:47,547 INFO [train.py:715] (7/8) Epoch 11, batch 27050, loss[loss=0.1287, simple_loss=0.2076, pruned_loss=0.0249, over 4932.00 frames.], tot_loss[loss=0.1383, simple_loss=0.211, pruned_loss=0.03273, over 972872.69 frames.], batch size: 39, lr: 1.94e-04 +2022-05-07 06:15:25,148 INFO [train.py:715] (7/8) Epoch 11, batch 27100, loss[loss=0.1312, simple_loss=0.2006, pruned_loss=0.03092, over 4803.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03293, over 973174.72 frames.], batch size: 13, lr: 1.94e-04 +2022-05-07 06:16:02,382 INFO [train.py:715] (7/8) Epoch 11, batch 27150, loss[loss=0.1473, simple_loss=0.2184, pruned_loss=0.03806, over 4836.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.0329, over 973093.18 frames.], batch size: 15, lr: 1.94e-04 +2022-05-07 06:16:41,019 INFO [train.py:715] (7/8) Epoch 11, batch 27200, loss[loss=0.1338, simple_loss=0.2142, pruned_loss=0.02667, over 4862.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2126, pruned_loss=0.03331, over 973871.26 frames.], batch size: 22, lr: 1.94e-04 +2022-05-07 06:17:18,728 INFO [train.py:715] (7/8) Epoch 11, batch 27250, loss[loss=0.14, simple_loss=0.2223, pruned_loss=0.02881, over 4960.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2126, pruned_loss=0.03379, over 973873.90 frames.], batch size: 24, lr: 1.94e-04 +2022-05-07 06:17:56,617 INFO [train.py:715] (7/8) Epoch 11, batch 27300, loss[loss=0.139, simple_loss=0.2043, pruned_loss=0.03683, over 4945.00 frames.], tot_loss[loss=0.1401, simple_loss=0.213, pruned_loss=0.03365, over 974112.11 frames.], batch size: 23, lr: 1.94e-04 +2022-05-07 06:18:34,288 INFO [train.py:715] (7/8) Epoch 11, batch 27350, loss[loss=0.1111, simple_loss=0.19, pruned_loss=0.01611, over 4821.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2122, pruned_loss=0.03341, over 973913.46 frames.], batch size: 13, lr: 1.94e-04 +2022-05-07 06:19:13,063 INFO [train.py:715] (7/8) Epoch 11, batch 27400, loss[loss=0.147, simple_loss=0.2228, pruned_loss=0.03561, over 4877.00 frames.], tot_loss[loss=0.1395, simple_loss=0.212, pruned_loss=0.03354, over 972993.81 frames.], batch size: 20, lr: 1.94e-04 +2022-05-07 06:19:50,855 INFO [train.py:715] (7/8) Epoch 11, batch 27450, loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03282, over 4794.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2115, pruned_loss=0.03303, over 973198.36 frames.], batch size: 18, lr: 1.94e-04 +2022-05-07 06:20:28,119 INFO [train.py:715] (7/8) Epoch 11, batch 27500, loss[loss=0.1111, simple_loss=0.187, pruned_loss=0.01765, over 4826.00 frames.], tot_loss[loss=0.139, simple_loss=0.2119, pruned_loss=0.03299, over 972641.86 frames.], batch size: 26, lr: 1.94e-04 +2022-05-07 06:21:07,285 INFO [train.py:715] (7/8) Epoch 11, batch 27550, loss[loss=0.1474, simple_loss=0.2216, pruned_loss=0.03664, over 4968.00 frames.], tot_loss[loss=0.1402, simple_loss=0.213, pruned_loss=0.03371, over 972144.47 frames.], batch size: 24, lr: 1.94e-04 +2022-05-07 06:21:45,752 INFO [train.py:715] (7/8) Epoch 11, batch 27600, loss[loss=0.1435, simple_loss=0.2108, pruned_loss=0.03813, over 4813.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2125, pruned_loss=0.0336, over 972323.07 frames.], batch size: 27, lr: 1.94e-04 +2022-05-07 06:22:23,476 INFO [train.py:715] (7/8) Epoch 11, batch 27650, loss[loss=0.1593, simple_loss=0.228, pruned_loss=0.04533, over 4964.00 frames.], tot_loss[loss=0.1391, simple_loss=0.212, pruned_loss=0.03315, over 972807.77 frames.], batch size: 14, lr: 1.94e-04 +2022-05-07 06:23:01,299 INFO [train.py:715] (7/8) Epoch 11, batch 27700, loss[loss=0.138, simple_loss=0.213, pruned_loss=0.03146, over 4957.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2117, pruned_loss=0.03309, over 973997.45 frames.], batch size: 28, lr: 1.94e-04 +2022-05-07 06:23:39,626 INFO [train.py:715] (7/8) Epoch 11, batch 27750, loss[loss=0.1314, simple_loss=0.1991, pruned_loss=0.03185, over 4960.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2116, pruned_loss=0.03279, over 973778.40 frames.], batch size: 24, lr: 1.94e-04 +2022-05-07 06:24:17,573 INFO [train.py:715] (7/8) Epoch 11, batch 27800, loss[loss=0.147, simple_loss=0.2081, pruned_loss=0.04291, over 4795.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2116, pruned_loss=0.03263, over 973479.49 frames.], batch size: 18, lr: 1.93e-04 +2022-05-07 06:24:54,557 INFO [train.py:715] (7/8) Epoch 11, batch 27850, loss[loss=0.1409, simple_loss=0.2094, pruned_loss=0.03617, over 4735.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.03277, over 973312.03 frames.], batch size: 16, lr: 1.93e-04 +2022-05-07 06:25:32,918 INFO [train.py:715] (7/8) Epoch 11, batch 27900, loss[loss=0.1325, simple_loss=0.2041, pruned_loss=0.03041, over 4870.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2127, pruned_loss=0.03342, over 973804.85 frames.], batch size: 32, lr: 1.93e-04 +2022-05-07 06:26:10,973 INFO [train.py:715] (7/8) Epoch 11, batch 27950, loss[loss=0.1236, simple_loss=0.1893, pruned_loss=0.02899, over 4940.00 frames.], tot_loss[loss=0.1397, simple_loss=0.2124, pruned_loss=0.03349, over 973326.24 frames.], batch size: 21, lr: 1.93e-04 +2022-05-07 06:26:48,583 INFO [train.py:715] (7/8) Epoch 11, batch 28000, loss[loss=0.1133, simple_loss=0.1816, pruned_loss=0.02254, over 4870.00 frames.], tot_loss[loss=0.1395, simple_loss=0.212, pruned_loss=0.03346, over 972948.87 frames.], batch size: 32, lr: 1.93e-04 +2022-05-07 06:27:26,139 INFO [train.py:715] (7/8) Epoch 11, batch 28050, loss[loss=0.1257, simple_loss=0.2058, pruned_loss=0.02277, over 4820.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2121, pruned_loss=0.0333, over 973062.44 frames.], batch size: 25, lr: 1.93e-04 +2022-05-07 06:28:04,142 INFO [train.py:715] (7/8) Epoch 11, batch 28100, loss[loss=0.1315, simple_loss=0.214, pruned_loss=0.0245, over 4983.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2128, pruned_loss=0.03341, over 973178.77 frames.], batch size: 35, lr: 1.93e-04 +2022-05-07 06:28:41,423 INFO [train.py:715] (7/8) Epoch 11, batch 28150, loss[loss=0.1176, simple_loss=0.1879, pruned_loss=0.02368, over 4844.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2116, pruned_loss=0.03288, over 973159.13 frames.], batch size: 20, lr: 1.93e-04 +2022-05-07 06:29:18,868 INFO [train.py:715] (7/8) Epoch 11, batch 28200, loss[loss=0.1431, simple_loss=0.2225, pruned_loss=0.03189, over 4888.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2115, pruned_loss=0.03279, over 972069.80 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 06:29:57,423 INFO [train.py:715] (7/8) Epoch 11, batch 28250, loss[loss=0.1123, simple_loss=0.1904, pruned_loss=0.01708, over 4813.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2116, pruned_loss=0.03286, over 972121.60 frames.], batch size: 26, lr: 1.93e-04 +2022-05-07 06:30:34,926 INFO [train.py:715] (7/8) Epoch 11, batch 28300, loss[loss=0.1279, simple_loss=0.195, pruned_loss=0.03045, over 4870.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2112, pruned_loss=0.03265, over 971824.00 frames.], batch size: 16, lr: 1.93e-04 +2022-05-07 06:31:12,830 INFO [train.py:715] (7/8) Epoch 11, batch 28350, loss[loss=0.1787, simple_loss=0.2531, pruned_loss=0.0522, over 4942.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2117, pruned_loss=0.03322, over 972007.13 frames.], batch size: 18, lr: 1.93e-04 +2022-05-07 06:31:50,525 INFO [train.py:715] (7/8) Epoch 11, batch 28400, loss[loss=0.1425, simple_loss=0.2061, pruned_loss=0.03945, over 4832.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2119, pruned_loss=0.03301, over 971454.58 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:32:28,898 INFO [train.py:715] (7/8) Epoch 11, batch 28450, loss[loss=0.1546, simple_loss=0.2292, pruned_loss=0.03994, over 4780.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2123, pruned_loss=0.03307, over 972156.55 frames.], batch size: 18, lr: 1.93e-04 +2022-05-07 06:33:06,936 INFO [train.py:715] (7/8) Epoch 11, batch 28500, loss[loss=0.1514, simple_loss=0.2256, pruned_loss=0.03858, over 4884.00 frames.], tot_loss[loss=0.1402, simple_loss=0.2136, pruned_loss=0.03339, over 972172.17 frames.], batch size: 22, lr: 1.93e-04 +2022-05-07 06:33:44,629 INFO [train.py:715] (7/8) Epoch 11, batch 28550, loss[loss=0.1352, simple_loss=0.2173, pruned_loss=0.02656, over 4973.00 frames.], tot_loss[loss=0.14, simple_loss=0.2134, pruned_loss=0.03326, over 972801.15 frames.], batch size: 28, lr: 1.93e-04 +2022-05-07 06:34:23,469 INFO [train.py:715] (7/8) Epoch 11, batch 28600, loss[loss=0.1439, simple_loss=0.2198, pruned_loss=0.03402, over 4695.00 frames.], tot_loss[loss=0.1407, simple_loss=0.214, pruned_loss=0.03374, over 973205.92 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:35:01,436 INFO [train.py:715] (7/8) Epoch 11, batch 28650, loss[loss=0.1589, simple_loss=0.2146, pruned_loss=0.0516, over 4768.00 frames.], tot_loss[loss=0.1404, simple_loss=0.2135, pruned_loss=0.03367, over 972278.29 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 06:35:39,422 INFO [train.py:715] (7/8) Epoch 11, batch 28700, loss[loss=0.1343, simple_loss=0.1922, pruned_loss=0.03823, over 4981.00 frames.], tot_loss[loss=0.1407, simple_loss=0.2137, pruned_loss=0.03388, over 972796.46 frames.], batch size: 14, lr: 1.93e-04 +2022-05-07 06:36:17,177 INFO [train.py:715] (7/8) Epoch 11, batch 28750, loss[loss=0.1352, simple_loss=0.1981, pruned_loss=0.03616, over 4916.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2128, pruned_loss=0.03337, over 972035.01 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 06:36:55,925 INFO [train.py:715] (7/8) Epoch 11, batch 28800, loss[loss=0.1378, simple_loss=0.2027, pruned_loss=0.03643, over 4911.00 frames.], tot_loss[loss=0.1398, simple_loss=0.2128, pruned_loss=0.03345, over 972109.95 frames.], batch size: 17, lr: 1.93e-04 +2022-05-07 06:37:33,392 INFO [train.py:715] (7/8) Epoch 11, batch 28850, loss[loss=0.1429, simple_loss=0.2233, pruned_loss=0.03125, over 4736.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.03292, over 972029.81 frames.], batch size: 16, lr: 1.93e-04 +2022-05-07 06:38:10,808 INFO [train.py:715] (7/8) Epoch 11, batch 28900, loss[loss=0.1473, simple_loss=0.2238, pruned_loss=0.03543, over 4884.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2121, pruned_loss=0.03302, over 972083.81 frames.], batch size: 22, lr: 1.93e-04 +2022-05-07 06:38:49,569 INFO [train.py:715] (7/8) Epoch 11, batch 28950, loss[loss=0.1327, simple_loss=0.2003, pruned_loss=0.03261, over 4874.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2125, pruned_loss=0.03309, over 971677.47 frames.], batch size: 13, lr: 1.93e-04 +2022-05-07 06:39:27,039 INFO [train.py:715] (7/8) Epoch 11, batch 29000, loss[loss=0.1313, simple_loss=0.2151, pruned_loss=0.02375, over 4954.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2122, pruned_loss=0.03261, over 971670.52 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:40:04,959 INFO [train.py:715] (7/8) Epoch 11, batch 29050, loss[loss=0.1613, simple_loss=0.2432, pruned_loss=0.03966, over 4949.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2128, pruned_loss=0.03288, over 971552.17 frames.], batch size: 23, lr: 1.93e-04 +2022-05-07 06:40:42,752 INFO [train.py:715] (7/8) Epoch 11, batch 29100, loss[loss=0.1482, simple_loss=0.2209, pruned_loss=0.03775, over 4933.00 frames.], tot_loss[loss=0.139, simple_loss=0.2126, pruned_loss=0.03275, over 971678.99 frames.], batch size: 23, lr: 1.93e-04 +2022-05-07 06:41:21,084 INFO [train.py:715] (7/8) Epoch 11, batch 29150, loss[loss=0.1268, simple_loss=0.1981, pruned_loss=0.02776, over 4840.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2122, pruned_loss=0.03253, over 972114.01 frames.], batch size: 32, lr: 1.93e-04 +2022-05-07 06:41:58,820 INFO [train.py:715] (7/8) Epoch 11, batch 29200, loss[loss=0.138, simple_loss=0.221, pruned_loss=0.0275, over 4986.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2119, pruned_loss=0.03244, over 972260.41 frames.], batch size: 28, lr: 1.93e-04 +2022-05-07 06:42:36,371 INFO [train.py:715] (7/8) Epoch 11, batch 29250, loss[loss=0.1241, simple_loss=0.1997, pruned_loss=0.02427, over 4859.00 frames.], tot_loss[loss=0.1384, simple_loss=0.212, pruned_loss=0.03246, over 972028.65 frames.], batch size: 20, lr: 1.93e-04 +2022-05-07 06:43:15,063 INFO [train.py:715] (7/8) Epoch 11, batch 29300, loss[loss=0.1244, simple_loss=0.2087, pruned_loss=0.01998, over 4783.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03186, over 971821.18 frames.], batch size: 23, lr: 1.93e-04 +2022-05-07 06:43:53,140 INFO [train.py:715] (7/8) Epoch 11, batch 29350, loss[loss=0.1292, simple_loss=0.2041, pruned_loss=0.02715, over 4777.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03131, over 972241.68 frames.], batch size: 14, lr: 1.93e-04 +2022-05-07 06:44:30,901 INFO [train.py:715] (7/8) Epoch 11, batch 29400, loss[loss=0.1214, simple_loss=0.2023, pruned_loss=0.02027, over 4919.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.03177, over 972411.59 frames.], batch size: 23, lr: 1.93e-04 +2022-05-07 06:45:08,812 INFO [train.py:715] (7/8) Epoch 11, batch 29450, loss[loss=0.124, simple_loss=0.199, pruned_loss=0.02448, over 4983.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03147, over 972483.26 frames.], batch size: 25, lr: 1.93e-04 +2022-05-07 06:45:46,711 INFO [train.py:715] (7/8) Epoch 11, batch 29500, loss[loss=0.1181, simple_loss=0.1945, pruned_loss=0.02082, over 4962.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.0315, over 973449.45 frames.], batch size: 24, lr: 1.93e-04 +2022-05-07 06:46:25,302 INFO [train.py:715] (7/8) Epoch 11, batch 29550, loss[loss=0.1607, simple_loss=0.2271, pruned_loss=0.04713, over 4893.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03132, over 974060.74 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 06:47:02,903 INFO [train.py:715] (7/8) Epoch 11, batch 29600, loss[loss=0.1161, simple_loss=0.1878, pruned_loss=0.02217, over 4940.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03151, over 974413.02 frames.], batch size: 23, lr: 1.93e-04 +2022-05-07 06:47:41,472 INFO [train.py:715] (7/8) Epoch 11, batch 29650, loss[loss=0.1259, simple_loss=0.1991, pruned_loss=0.02631, over 4805.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03172, over 973913.41 frames.], batch size: 24, lr: 1.93e-04 +2022-05-07 06:48:19,465 INFO [train.py:715] (7/8) Epoch 11, batch 29700, loss[loss=0.1222, simple_loss=0.2001, pruned_loss=0.0221, over 4895.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03186, over 974339.76 frames.], batch size: 17, lr: 1.93e-04 +2022-05-07 06:48:57,624 INFO [train.py:715] (7/8) Epoch 11, batch 29750, loss[loss=0.1604, simple_loss=0.2342, pruned_loss=0.04332, over 4943.00 frames.], tot_loss[loss=0.138, simple_loss=0.2117, pruned_loss=0.03213, over 973744.17 frames.], batch size: 29, lr: 1.93e-04 +2022-05-07 06:49:35,435 INFO [train.py:715] (7/8) Epoch 11, batch 29800, loss[loss=0.1487, simple_loss=0.2299, pruned_loss=0.03378, over 4811.00 frames.], tot_loss[loss=0.1387, simple_loss=0.212, pruned_loss=0.03272, over 972957.07 frames.], batch size: 27, lr: 1.93e-04 +2022-05-07 06:50:13,827 INFO [train.py:715] (7/8) Epoch 11, batch 29850, loss[loss=0.1094, simple_loss=0.1868, pruned_loss=0.01603, over 4772.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03239, over 972514.87 frames.], batch size: 12, lr: 1.93e-04 +2022-05-07 06:50:52,363 INFO [train.py:715] (7/8) Epoch 11, batch 29900, loss[loss=0.1291, simple_loss=0.2005, pruned_loss=0.02884, over 4906.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03224, over 972520.21 frames.], batch size: 17, lr: 1.93e-04 +2022-05-07 06:51:29,991 INFO [train.py:715] (7/8) Epoch 11, batch 29950, loss[loss=0.1485, simple_loss=0.222, pruned_loss=0.03753, over 4846.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2111, pruned_loss=0.03218, over 972713.53 frames.], batch size: 34, lr: 1.93e-04 +2022-05-07 06:52:08,178 INFO [train.py:715] (7/8) Epoch 11, batch 30000, loss[loss=0.132, simple_loss=0.191, pruned_loss=0.03648, over 4795.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.03203, over 972235.31 frames.], batch size: 13, lr: 1.93e-04 +2022-05-07 06:52:08,179 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 06:52:17,626 INFO [train.py:742] (7/8) Epoch 11, validation: loss=0.106, simple_loss=0.19, pruned_loss=0.01095, over 914524.00 frames. +2022-05-07 06:52:56,514 INFO [train.py:715] (7/8) Epoch 11, batch 30050, loss[loss=0.1335, simple_loss=0.2032, pruned_loss=0.03185, over 4811.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2101, pruned_loss=0.03201, over 972151.13 frames.], batch size: 21, lr: 1.93e-04 +2022-05-07 06:53:34,388 INFO [train.py:715] (7/8) Epoch 11, batch 30100, loss[loss=0.1437, simple_loss=0.2119, pruned_loss=0.03773, over 4950.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03176, over 972601.08 frames.], batch size: 39, lr: 1.93e-04 +2022-05-07 06:54:13,052 INFO [train.py:715] (7/8) Epoch 11, batch 30150, loss[loss=0.1253, simple_loss=0.2067, pruned_loss=0.02197, over 4938.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03193, over 972959.78 frames.], batch size: 21, lr: 1.93e-04 +2022-05-07 06:54:50,401 INFO [train.py:715] (7/8) Epoch 11, batch 30200, loss[loss=0.1171, simple_loss=0.1856, pruned_loss=0.02429, over 4828.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03203, over 973240.62 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:55:29,244 INFO [train.py:715] (7/8) Epoch 11, batch 30250, loss[loss=0.1552, simple_loss=0.2279, pruned_loss=0.04128, over 4972.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03209, over 974687.88 frames.], batch size: 39, lr: 1.93e-04 +2022-05-07 06:56:07,229 INFO [train.py:715] (7/8) Epoch 11, batch 30300, loss[loss=0.1476, simple_loss=0.2196, pruned_loss=0.03777, over 4869.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03255, over 974649.54 frames.], batch size: 22, lr: 1.93e-04 +2022-05-07 06:56:45,178 INFO [train.py:715] (7/8) Epoch 11, batch 30350, loss[loss=0.1225, simple_loss=0.2009, pruned_loss=0.02202, over 4887.00 frames.], tot_loss[loss=0.138, simple_loss=0.2112, pruned_loss=0.03238, over 973882.77 frames.], batch size: 22, lr: 1.93e-04 +2022-05-07 06:57:23,262 INFO [train.py:715] (7/8) Epoch 11, batch 30400, loss[loss=0.1585, simple_loss=0.2279, pruned_loss=0.04452, over 4978.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03246, over 974003.99 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:58:01,500 INFO [train.py:715] (7/8) Epoch 11, batch 30450, loss[loss=0.1291, simple_loss=0.1954, pruned_loss=0.03143, over 4978.00 frames.], tot_loss[loss=0.1371, simple_loss=0.211, pruned_loss=0.03163, over 973422.84 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 06:58:39,331 INFO [train.py:715] (7/8) Epoch 11, batch 30500, loss[loss=0.1183, simple_loss=0.1869, pruned_loss=0.02482, over 4848.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2117, pruned_loss=0.03222, over 972891.94 frames.], batch size: 20, lr: 1.93e-04 +2022-05-07 06:59:17,144 INFO [train.py:715] (7/8) Epoch 11, batch 30550, loss[loss=0.1348, simple_loss=0.2092, pruned_loss=0.03023, over 4925.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2122, pruned_loss=0.03238, over 972663.85 frames.], batch size: 23, lr: 1.93e-04 +2022-05-07 06:59:56,405 INFO [train.py:715] (7/8) Epoch 11, batch 30600, loss[loss=0.1327, simple_loss=0.2103, pruned_loss=0.02758, over 4993.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03247, over 972337.00 frames.], batch size: 16, lr: 1.93e-04 +2022-05-07 07:00:35,034 INFO [train.py:715] (7/8) Epoch 11, batch 30650, loss[loss=0.1542, simple_loss=0.2232, pruned_loss=0.0426, over 4820.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2113, pruned_loss=0.03264, over 972356.34 frames.], batch size: 13, lr: 1.93e-04 +2022-05-07 07:01:13,829 INFO [train.py:715] (7/8) Epoch 11, batch 30700, loss[loss=0.1231, simple_loss=0.1927, pruned_loss=0.02676, over 4703.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2106, pruned_loss=0.03213, over 972376.06 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 07:01:52,335 INFO [train.py:715] (7/8) Epoch 11, batch 30750, loss[loss=0.1432, simple_loss=0.2216, pruned_loss=0.03237, over 4871.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.03236, over 973101.76 frames.], batch size: 16, lr: 1.93e-04 +2022-05-07 07:02:30,947 INFO [train.py:715] (7/8) Epoch 11, batch 30800, loss[loss=0.1471, simple_loss=0.2191, pruned_loss=0.03759, over 4892.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03221, over 972854.45 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 07:03:09,709 INFO [train.py:715] (7/8) Epoch 11, batch 30850, loss[loss=0.1673, simple_loss=0.2423, pruned_loss=0.04611, over 4900.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2121, pruned_loss=0.03213, over 973525.14 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 07:03:48,267 INFO [train.py:715] (7/8) Epoch 11, batch 30900, loss[loss=0.1619, simple_loss=0.2312, pruned_loss=0.04631, over 4989.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2125, pruned_loss=0.03244, over 972845.68 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 07:04:27,074 INFO [train.py:715] (7/8) Epoch 11, batch 30950, loss[loss=0.1164, simple_loss=0.1942, pruned_loss=0.01928, over 4850.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2129, pruned_loss=0.03277, over 972746.69 frames.], batch size: 13, lr: 1.93e-04 +2022-05-07 07:05:06,006 INFO [train.py:715] (7/8) Epoch 11, batch 31000, loss[loss=0.1196, simple_loss=0.1956, pruned_loss=0.02185, over 4929.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.03258, over 973042.00 frames.], batch size: 23, lr: 1.93e-04 +2022-05-07 07:05:44,522 INFO [train.py:715] (7/8) Epoch 11, batch 31050, loss[loss=0.1612, simple_loss=0.2284, pruned_loss=0.04697, over 4988.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2123, pruned_loss=0.0327, over 972723.62 frames.], batch size: 25, lr: 1.93e-04 +2022-05-07 07:06:23,343 INFO [train.py:715] (7/8) Epoch 11, batch 31100, loss[loss=0.1628, simple_loss=0.2418, pruned_loss=0.04194, over 4869.00 frames.], tot_loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03213, over 973279.14 frames.], batch size: 20, lr: 1.93e-04 +2022-05-07 07:07:01,741 INFO [train.py:715] (7/8) Epoch 11, batch 31150, loss[loss=0.1218, simple_loss=0.195, pruned_loss=0.0243, over 4993.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2114, pruned_loss=0.03192, over 973144.92 frames.], batch size: 14, lr: 1.93e-04 +2022-05-07 07:07:39,379 INFO [train.py:715] (7/8) Epoch 11, batch 31200, loss[loss=0.1336, simple_loss=0.2164, pruned_loss=0.02541, over 4986.00 frames.], tot_loss[loss=0.1383, simple_loss=0.212, pruned_loss=0.03226, over 973505.31 frames.], batch size: 28, lr: 1.93e-04 +2022-05-07 07:08:17,483 INFO [train.py:715] (7/8) Epoch 11, batch 31250, loss[loss=0.1491, simple_loss=0.2116, pruned_loss=0.04332, over 4824.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2125, pruned_loss=0.03234, over 972685.50 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 07:08:55,764 INFO [train.py:715] (7/8) Epoch 11, batch 31300, loss[loss=0.1082, simple_loss=0.1728, pruned_loss=0.02177, over 4784.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2117, pruned_loss=0.03193, over 971986.25 frames.], batch size: 12, lr: 1.93e-04 +2022-05-07 07:09:33,561 INFO [train.py:715] (7/8) Epoch 11, batch 31350, loss[loss=0.186, simple_loss=0.2591, pruned_loss=0.05641, over 4910.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2121, pruned_loss=0.03236, over 972007.58 frames.], batch size: 17, lr: 1.93e-04 +2022-05-07 07:10:10,909 INFO [train.py:715] (7/8) Epoch 11, batch 31400, loss[loss=0.1548, simple_loss=0.2264, pruned_loss=0.04159, over 4896.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03211, over 972540.41 frames.], batch size: 39, lr: 1.93e-04 +2022-05-07 07:10:48,405 INFO [train.py:715] (7/8) Epoch 11, batch 31450, loss[loss=0.1259, simple_loss=0.2002, pruned_loss=0.02575, over 4826.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.03191, over 972342.80 frames.], batch size: 15, lr: 1.93e-04 +2022-05-07 07:11:26,018 INFO [train.py:715] (7/8) Epoch 11, batch 31500, loss[loss=0.1451, simple_loss=0.2298, pruned_loss=0.03016, over 4765.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03255, over 971976.82 frames.], batch size: 18, lr: 1.93e-04 +2022-05-07 07:12:03,665 INFO [train.py:715] (7/8) Epoch 11, batch 31550, loss[loss=0.1038, simple_loss=0.1776, pruned_loss=0.01496, over 4840.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03215, over 972123.03 frames.], batch size: 13, lr: 1.93e-04 +2022-05-07 07:12:41,668 INFO [train.py:715] (7/8) Epoch 11, batch 31600, loss[loss=0.1495, simple_loss=0.2233, pruned_loss=0.03786, over 4792.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.03191, over 972590.01 frames.], batch size: 17, lr: 1.93e-04 +2022-05-07 07:13:19,756 INFO [train.py:715] (7/8) Epoch 11, batch 31650, loss[loss=0.1391, simple_loss=0.2083, pruned_loss=0.03496, over 4815.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2123, pruned_loss=0.03315, over 972470.74 frames.], batch size: 26, lr: 1.93e-04 +2022-05-07 07:13:57,688 INFO [train.py:715] (7/8) Epoch 11, batch 31700, loss[loss=0.1569, simple_loss=0.2322, pruned_loss=0.04075, over 4961.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2121, pruned_loss=0.03277, over 972318.83 frames.], batch size: 24, lr: 1.93e-04 +2022-05-07 07:14:35,211 INFO [train.py:715] (7/8) Epoch 11, batch 31750, loss[loss=0.125, simple_loss=0.2084, pruned_loss=0.02082, over 4948.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2118, pruned_loss=0.03276, over 972158.03 frames.], batch size: 21, lr: 1.93e-04 +2022-05-07 07:15:14,060 INFO [train.py:715] (7/8) Epoch 11, batch 31800, loss[loss=0.1491, simple_loss=0.2235, pruned_loss=0.03733, over 4929.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2119, pruned_loss=0.0328, over 971934.35 frames.], batch size: 29, lr: 1.93e-04 +2022-05-07 07:15:52,638 INFO [train.py:715] (7/8) Epoch 11, batch 31850, loss[loss=0.1479, simple_loss=0.2225, pruned_loss=0.03666, over 4840.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2112, pruned_loss=0.03234, over 972651.38 frames.], batch size: 13, lr: 1.93e-04 +2022-05-07 07:16:30,871 INFO [train.py:715] (7/8) Epoch 11, batch 31900, loss[loss=0.1213, simple_loss=0.1976, pruned_loss=0.02252, over 4804.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2121, pruned_loss=0.0326, over 972395.82 frames.], batch size: 25, lr: 1.93e-04 +2022-05-07 07:17:09,162 INFO [train.py:715] (7/8) Epoch 11, batch 31950, loss[loss=0.1512, simple_loss=0.2285, pruned_loss=0.03692, over 4904.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2123, pruned_loss=0.0333, over 971889.67 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 07:17:47,943 INFO [train.py:715] (7/8) Epoch 11, batch 32000, loss[loss=0.1458, simple_loss=0.2294, pruned_loss=0.03106, over 4755.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03319, over 972584.80 frames.], batch size: 19, lr: 1.93e-04 +2022-05-07 07:18:26,169 INFO [train.py:715] (7/8) Epoch 11, batch 32050, loss[loss=0.1474, simple_loss=0.2221, pruned_loss=0.03632, over 4932.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2121, pruned_loss=0.03265, over 972932.47 frames.], batch size: 40, lr: 1.93e-04 +2022-05-07 07:19:04,553 INFO [train.py:715] (7/8) Epoch 11, batch 32100, loss[loss=0.1269, simple_loss=0.2086, pruned_loss=0.02262, over 4964.00 frames.], tot_loss[loss=0.1382, simple_loss=0.212, pruned_loss=0.03224, over 972963.70 frames.], batch size: 24, lr: 1.92e-04 +2022-05-07 07:19:42,571 INFO [train.py:715] (7/8) Epoch 11, batch 32150, loss[loss=0.1182, simple_loss=0.1928, pruned_loss=0.02179, over 4924.00 frames.], tot_loss[loss=0.1373, simple_loss=0.211, pruned_loss=0.0318, over 973014.45 frames.], batch size: 23, lr: 1.92e-04 +2022-05-07 07:20:19,991 INFO [train.py:715] (7/8) Epoch 11, batch 32200, loss[loss=0.1718, simple_loss=0.233, pruned_loss=0.05524, over 4972.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03192, over 972761.13 frames.], batch size: 14, lr: 1.92e-04 +2022-05-07 07:20:57,515 INFO [train.py:715] (7/8) Epoch 11, batch 32250, loss[loss=0.164, simple_loss=0.2322, pruned_loss=0.04793, over 4928.00 frames.], tot_loss[loss=0.1367, simple_loss=0.21, pruned_loss=0.03165, over 972491.98 frames.], batch size: 23, lr: 1.92e-04 +2022-05-07 07:21:35,347 INFO [train.py:715] (7/8) Epoch 11, batch 32300, loss[loss=0.1313, simple_loss=0.2065, pruned_loss=0.02804, over 4851.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03151, over 971453.43 frames.], batch size: 32, lr: 1.92e-04 +2022-05-07 07:22:13,997 INFO [train.py:715] (7/8) Epoch 11, batch 32350, loss[loss=0.1358, simple_loss=0.2056, pruned_loss=0.03296, over 4766.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03097, over 972474.55 frames.], batch size: 16, lr: 1.92e-04 +2022-05-07 07:22:51,417 INFO [train.py:715] (7/8) Epoch 11, batch 32400, loss[loss=0.1418, simple_loss=0.2081, pruned_loss=0.03773, over 4874.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.03117, over 971695.17 frames.], batch size: 32, lr: 1.92e-04 +2022-05-07 07:23:29,418 INFO [train.py:715] (7/8) Epoch 11, batch 32450, loss[loss=0.1502, simple_loss=0.2254, pruned_loss=0.03752, over 4923.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03133, over 971666.51 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:24:07,457 INFO [train.py:715] (7/8) Epoch 11, batch 32500, loss[loss=0.1232, simple_loss=0.1941, pruned_loss=0.02615, over 4832.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.03182, over 971970.38 frames.], batch size: 26, lr: 1.92e-04 +2022-05-07 07:24:45,516 INFO [train.py:715] (7/8) Epoch 11, batch 32550, loss[loss=0.201, simple_loss=0.2775, pruned_loss=0.06226, over 4863.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03201, over 971451.80 frames.], batch size: 16, lr: 1.92e-04 +2022-05-07 07:25:23,172 INFO [train.py:715] (7/8) Epoch 11, batch 32600, loss[loss=0.1323, simple_loss=0.2092, pruned_loss=0.02767, over 4796.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.03246, over 972239.24 frames.], batch size: 21, lr: 1.92e-04 +2022-05-07 07:26:01,257 INFO [train.py:715] (7/8) Epoch 11, batch 32650, loss[loss=0.1258, simple_loss=0.2035, pruned_loss=0.02403, over 4909.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03245, over 971787.93 frames.], batch size: 29, lr: 1.92e-04 +2022-05-07 07:26:39,443 INFO [train.py:715] (7/8) Epoch 11, batch 32700, loss[loss=0.1903, simple_loss=0.2521, pruned_loss=0.06425, over 4812.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2109, pruned_loss=0.0324, over 971358.46 frames.], batch size: 15, lr: 1.92e-04 +2022-05-07 07:27:16,890 INFO [train.py:715] (7/8) Epoch 11, batch 32750, loss[loss=0.1433, simple_loss=0.2165, pruned_loss=0.03503, over 4963.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2107, pruned_loss=0.03222, over 971805.77 frames.], batch size: 24, lr: 1.92e-04 +2022-05-07 07:27:55,657 INFO [train.py:715] (7/8) Epoch 11, batch 32800, loss[loss=0.1344, simple_loss=0.2094, pruned_loss=0.02969, over 4961.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03194, over 971970.75 frames.], batch size: 15, lr: 1.92e-04 +2022-05-07 07:28:35,368 INFO [train.py:715] (7/8) Epoch 11, batch 32850, loss[loss=0.1278, simple_loss=0.2052, pruned_loss=0.02517, over 4808.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2103, pruned_loss=0.03221, over 972128.60 frames.], batch size: 25, lr: 1.92e-04 +2022-05-07 07:29:13,921 INFO [train.py:715] (7/8) Epoch 11, batch 32900, loss[loss=0.1458, simple_loss=0.2153, pruned_loss=0.03813, over 4921.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2101, pruned_loss=0.03212, over 973515.59 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:29:52,133 INFO [train.py:715] (7/8) Epoch 11, batch 32950, loss[loss=0.1608, simple_loss=0.2249, pruned_loss=0.04835, over 4838.00 frames.], tot_loss[loss=0.138, simple_loss=0.2108, pruned_loss=0.03259, over 972848.36 frames.], batch size: 32, lr: 1.92e-04 +2022-05-07 07:30:31,048 INFO [train.py:715] (7/8) Epoch 11, batch 33000, loss[loss=0.1203, simple_loss=0.1979, pruned_loss=0.02138, over 4871.00 frames.], tot_loss[loss=0.137, simple_loss=0.2099, pruned_loss=0.03199, over 972309.71 frames.], batch size: 22, lr: 1.92e-04 +2022-05-07 07:30:31,049 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 07:30:40,493 INFO [train.py:742] (7/8) Epoch 11, validation: loss=0.1059, simple_loss=0.1899, pruned_loss=0.0109, over 914524.00 frames. +2022-05-07 07:31:19,415 INFO [train.py:715] (7/8) Epoch 11, batch 33050, loss[loss=0.1252, simple_loss=0.2107, pruned_loss=0.0199, over 4744.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2103, pruned_loss=0.03216, over 971777.14 frames.], batch size: 16, lr: 1.92e-04 +2022-05-07 07:32:00,922 INFO [train.py:715] (7/8) Epoch 11, batch 33100, loss[loss=0.1453, simple_loss=0.2138, pruned_loss=0.03845, over 4780.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2102, pruned_loss=0.032, over 971792.77 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:32:38,878 INFO [train.py:715] (7/8) Epoch 11, batch 33150, loss[loss=0.1132, simple_loss=0.1858, pruned_loss=0.0203, over 4945.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2103, pruned_loss=0.03202, over 971746.60 frames.], batch size: 21, lr: 1.92e-04 +2022-05-07 07:33:17,492 INFO [train.py:715] (7/8) Epoch 11, batch 33200, loss[loss=0.1291, simple_loss=0.2069, pruned_loss=0.02563, over 4728.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03202, over 972020.04 frames.], batch size: 16, lr: 1.92e-04 +2022-05-07 07:33:56,617 INFO [train.py:715] (7/8) Epoch 11, batch 33250, loss[loss=0.136, simple_loss=0.2068, pruned_loss=0.03267, over 4919.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2108, pruned_loss=0.03236, over 972771.37 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:34:35,410 INFO [train.py:715] (7/8) Epoch 11, batch 33300, loss[loss=0.1237, simple_loss=0.1886, pruned_loss=0.02945, over 4790.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2111, pruned_loss=0.03263, over 971737.67 frames.], batch size: 12, lr: 1.92e-04 +2022-05-07 07:35:13,283 INFO [train.py:715] (7/8) Epoch 11, batch 33350, loss[loss=0.1609, simple_loss=0.2333, pruned_loss=0.04425, over 4947.00 frames.], tot_loss[loss=0.1379, simple_loss=0.211, pruned_loss=0.03243, over 971622.76 frames.], batch size: 15, lr: 1.92e-04 +2022-05-07 07:35:51,703 INFO [train.py:715] (7/8) Epoch 11, batch 33400, loss[loss=0.1216, simple_loss=0.2005, pruned_loss=0.02135, over 4952.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2109, pruned_loss=0.03224, over 972448.52 frames.], batch size: 15, lr: 1.92e-04 +2022-05-07 07:36:30,392 INFO [train.py:715] (7/8) Epoch 11, batch 33450, loss[loss=0.1474, simple_loss=0.2183, pruned_loss=0.03823, over 4979.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2114, pruned_loss=0.03197, over 972026.96 frames.], batch size: 25, lr: 1.92e-04 +2022-05-07 07:37:08,730 INFO [train.py:715] (7/8) Epoch 11, batch 33500, loss[loss=0.1356, simple_loss=0.2179, pruned_loss=0.02664, over 4807.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2118, pruned_loss=0.03205, over 972434.30 frames.], batch size: 21, lr: 1.92e-04 +2022-05-07 07:37:47,174 INFO [train.py:715] (7/8) Epoch 11, batch 33550, loss[loss=0.1366, simple_loss=0.2122, pruned_loss=0.03044, over 4962.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2124, pruned_loss=0.03269, over 972329.47 frames.], batch size: 15, lr: 1.92e-04 +2022-05-07 07:38:25,761 INFO [train.py:715] (7/8) Epoch 11, batch 33600, loss[loss=0.1466, simple_loss=0.2175, pruned_loss=0.03785, over 4933.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2124, pruned_loss=0.03292, over 972266.89 frames.], batch size: 21, lr: 1.92e-04 +2022-05-07 07:39:04,165 INFO [train.py:715] (7/8) Epoch 11, batch 33650, loss[loss=0.1263, simple_loss=0.2142, pruned_loss=0.01915, over 4974.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2121, pruned_loss=0.03276, over 971931.36 frames.], batch size: 24, lr: 1.92e-04 +2022-05-07 07:39:42,291 INFO [train.py:715] (7/8) Epoch 11, batch 33700, loss[loss=0.1357, simple_loss=0.2104, pruned_loss=0.03043, over 4952.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2122, pruned_loss=0.03269, over 972565.93 frames.], batch size: 29, lr: 1.92e-04 +2022-05-07 07:40:20,578 INFO [train.py:715] (7/8) Epoch 11, batch 33750, loss[loss=0.1428, simple_loss=0.2287, pruned_loss=0.02851, over 4799.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03222, over 972184.51 frames.], batch size: 24, lr: 1.92e-04 +2022-05-07 07:40:59,159 INFO [train.py:715] (7/8) Epoch 11, batch 33800, loss[loss=0.2249, simple_loss=0.2894, pruned_loss=0.08023, over 4783.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2119, pruned_loss=0.03233, over 972584.19 frames.], batch size: 14, lr: 1.92e-04 +2022-05-07 07:41:37,146 INFO [train.py:715] (7/8) Epoch 11, batch 33850, loss[loss=0.1317, simple_loss=0.2003, pruned_loss=0.03156, over 4822.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03186, over 972387.70 frames.], batch size: 12, lr: 1.92e-04 +2022-05-07 07:42:15,180 INFO [train.py:715] (7/8) Epoch 11, batch 33900, loss[loss=0.148, simple_loss=0.2166, pruned_loss=0.03968, over 4931.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2116, pruned_loss=0.03208, over 972540.65 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:42:53,933 INFO [train.py:715] (7/8) Epoch 11, batch 33950, loss[loss=0.1072, simple_loss=0.1795, pruned_loss=0.01745, over 4823.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2121, pruned_loss=0.03249, over 972088.05 frames.], batch size: 12, lr: 1.92e-04 +2022-05-07 07:43:32,250 INFO [train.py:715] (7/8) Epoch 11, batch 34000, loss[loss=0.1345, simple_loss=0.2108, pruned_loss=0.0291, over 4768.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2125, pruned_loss=0.03283, over 971984.72 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:44:10,350 INFO [train.py:715] (7/8) Epoch 11, batch 34050, loss[loss=0.1407, simple_loss=0.2147, pruned_loss=0.03332, over 4796.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2122, pruned_loss=0.0327, over 971657.97 frames.], batch size: 24, lr: 1.92e-04 +2022-05-07 07:44:48,874 INFO [train.py:715] (7/8) Epoch 11, batch 34100, loss[loss=0.128, simple_loss=0.1939, pruned_loss=0.03105, over 4763.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03292, over 971174.57 frames.], batch size: 12, lr: 1.92e-04 +2022-05-07 07:45:27,612 INFO [train.py:715] (7/8) Epoch 11, batch 34150, loss[loss=0.1096, simple_loss=0.1793, pruned_loss=0.01992, over 4771.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2111, pruned_loss=0.03223, over 971596.49 frames.], batch size: 12, lr: 1.92e-04 +2022-05-07 07:46:05,699 INFO [train.py:715] (7/8) Epoch 11, batch 34200, loss[loss=0.1308, simple_loss=0.2005, pruned_loss=0.03049, over 4776.00 frames.], tot_loss[loss=0.1384, simple_loss=0.212, pruned_loss=0.03242, over 971896.37 frames.], batch size: 17, lr: 1.92e-04 +2022-05-07 07:46:44,125 INFO [train.py:715] (7/8) Epoch 11, batch 34250, loss[loss=0.1481, simple_loss=0.2238, pruned_loss=0.03618, over 4761.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2121, pruned_loss=0.03279, over 971775.62 frames.], batch size: 16, lr: 1.92e-04 +2022-05-07 07:47:23,287 INFO [train.py:715] (7/8) Epoch 11, batch 34300, loss[loss=0.1468, simple_loss=0.2153, pruned_loss=0.0392, over 4764.00 frames.], tot_loss[loss=0.1386, simple_loss=0.212, pruned_loss=0.03262, over 972318.26 frames.], batch size: 16, lr: 1.92e-04 +2022-05-07 07:48:01,580 INFO [train.py:715] (7/8) Epoch 11, batch 34350, loss[loss=0.1247, simple_loss=0.1933, pruned_loss=0.02803, over 4773.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2121, pruned_loss=0.03237, over 972057.31 frames.], batch size: 17, lr: 1.92e-04 +2022-05-07 07:48:40,022 INFO [train.py:715] (7/8) Epoch 11, batch 34400, loss[loss=0.1272, simple_loss=0.1944, pruned_loss=0.03002, over 4974.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2115, pruned_loss=0.03187, over 972249.63 frames.], batch size: 14, lr: 1.92e-04 +2022-05-07 07:49:18,673 INFO [train.py:715] (7/8) Epoch 11, batch 34450, loss[loss=0.1581, simple_loss=0.2295, pruned_loss=0.04335, over 4850.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2124, pruned_loss=0.03241, over 971994.25 frames.], batch size: 13, lr: 1.92e-04 +2022-05-07 07:49:57,850 INFO [train.py:715] (7/8) Epoch 11, batch 34500, loss[loss=0.1458, simple_loss=0.2307, pruned_loss=0.03051, over 4918.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03247, over 971763.01 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:50:35,957 INFO [train.py:715] (7/8) Epoch 11, batch 34550, loss[loss=0.1204, simple_loss=0.208, pruned_loss=0.01637, over 4692.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03215, over 971182.90 frames.], batch size: 15, lr: 1.92e-04 +2022-05-07 07:51:12,742 INFO [train.py:715] (7/8) Epoch 11, batch 34600, loss[loss=0.1598, simple_loss=0.2308, pruned_loss=0.04437, over 4903.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03263, over 970909.96 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:51:50,531 INFO [train.py:715] (7/8) Epoch 11, batch 34650, loss[loss=0.1449, simple_loss=0.218, pruned_loss=0.03586, over 4704.00 frames.], tot_loss[loss=0.138, simple_loss=0.2112, pruned_loss=0.03237, over 971156.73 frames.], batch size: 15, lr: 1.92e-04 +2022-05-07 07:52:27,797 INFO [train.py:715] (7/8) Epoch 11, batch 34700, loss[loss=0.1367, simple_loss=0.2105, pruned_loss=0.03146, over 4794.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03239, over 971831.78 frames.], batch size: 24, lr: 1.92e-04 +2022-05-07 07:53:04,318 INFO [train.py:715] (7/8) Epoch 11, batch 34750, loss[loss=0.144, simple_loss=0.2145, pruned_loss=0.03669, over 4785.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03244, over 971850.34 frames.], batch size: 14, lr: 1.92e-04 +2022-05-07 07:53:39,313 INFO [train.py:715] (7/8) Epoch 11, batch 34800, loss[loss=0.1544, simple_loss=0.2184, pruned_loss=0.04514, over 4918.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2106, pruned_loss=0.03225, over 972361.67 frames.], batch size: 18, lr: 1.92e-04 +2022-05-07 07:54:26,266 INFO [train.py:715] (7/8) Epoch 12, batch 0, loss[loss=0.1118, simple_loss=0.1791, pruned_loss=0.02221, over 4795.00 frames.], tot_loss[loss=0.1118, simple_loss=0.1791, pruned_loss=0.02221, over 4795.00 frames.], batch size: 21, lr: 1.85e-04 +2022-05-07 07:55:04,630 INFO [train.py:715] (7/8) Epoch 12, batch 50, loss[loss=0.1629, simple_loss=0.2289, pruned_loss=0.04843, over 4822.00 frames.], tot_loss[loss=0.138, simple_loss=0.2112, pruned_loss=0.03243, over 218728.84 frames.], batch size: 15, lr: 1.85e-04 +2022-05-07 07:55:42,694 INFO [train.py:715] (7/8) Epoch 12, batch 100, loss[loss=0.1412, simple_loss=0.216, pruned_loss=0.03323, over 4880.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03224, over 386375.11 frames.], batch size: 16, lr: 1.85e-04 +2022-05-07 07:56:21,320 INFO [train.py:715] (7/8) Epoch 12, batch 150, loss[loss=0.1369, simple_loss=0.221, pruned_loss=0.02638, over 4860.00 frames.], tot_loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.0315, over 516270.73 frames.], batch size: 20, lr: 1.85e-04 +2022-05-07 07:56:59,066 INFO [train.py:715] (7/8) Epoch 12, batch 200, loss[loss=0.1418, simple_loss=0.2091, pruned_loss=0.0373, over 4735.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03179, over 617412.25 frames.], batch size: 16, lr: 1.85e-04 +2022-05-07 07:57:38,283 INFO [train.py:715] (7/8) Epoch 12, batch 250, loss[loss=0.1283, simple_loss=0.2038, pruned_loss=0.02635, over 4789.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03153, over 696108.59 frames.], batch size: 14, lr: 1.85e-04 +2022-05-07 07:58:16,541 INFO [train.py:715] (7/8) Epoch 12, batch 300, loss[loss=0.1288, simple_loss=0.2048, pruned_loss=0.02641, over 4798.00 frames.], tot_loss[loss=0.138, simple_loss=0.2117, pruned_loss=0.03219, over 758105.09 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 07:58:54,471 INFO [train.py:715] (7/8) Epoch 12, batch 350, loss[loss=0.1236, simple_loss=0.1969, pruned_loss=0.02518, over 4910.00 frames.], tot_loss[loss=0.138, simple_loss=0.2118, pruned_loss=0.03206, over 806545.95 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 07:59:32,945 INFO [train.py:715] (7/8) Epoch 12, batch 400, loss[loss=0.1278, simple_loss=0.2028, pruned_loss=0.02642, over 4806.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2123, pruned_loss=0.03236, over 842818.30 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 08:00:10,577 INFO [train.py:715] (7/8) Epoch 12, batch 450, loss[loss=0.1169, simple_loss=0.1894, pruned_loss=0.02223, over 4841.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.03197, over 871773.50 frames.], batch size: 30, lr: 1.84e-04 +2022-05-07 08:00:48,788 INFO [train.py:715] (7/8) Epoch 12, batch 500, loss[loss=0.1399, simple_loss=0.2073, pruned_loss=0.03631, over 4820.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03192, over 893266.71 frames.], batch size: 26, lr: 1.84e-04 +2022-05-07 08:01:26,233 INFO [train.py:715] (7/8) Epoch 12, batch 550, loss[loss=0.127, simple_loss=0.2012, pruned_loss=0.02646, over 4829.00 frames.], tot_loss[loss=0.1384, simple_loss=0.212, pruned_loss=0.0324, over 911311.03 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:02:04,575 INFO [train.py:715] (7/8) Epoch 12, batch 600, loss[loss=0.1364, simple_loss=0.2203, pruned_loss=0.02626, over 4962.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.03263, over 925661.59 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 08:02:41,618 INFO [train.py:715] (7/8) Epoch 12, batch 650, loss[loss=0.1518, simple_loss=0.2121, pruned_loss=0.04574, over 4967.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03278, over 936415.86 frames.], batch size: 35, lr: 1.84e-04 +2022-05-07 08:03:20,186 INFO [train.py:715] (7/8) Epoch 12, batch 700, loss[loss=0.1347, simple_loss=0.2112, pruned_loss=0.02911, over 4945.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2123, pruned_loss=0.03289, over 944792.85 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 08:03:58,807 INFO [train.py:715] (7/8) Epoch 12, batch 750, loss[loss=0.1532, simple_loss=0.2204, pruned_loss=0.04301, over 4883.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2118, pruned_loss=0.03233, over 950562.94 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:04:37,571 INFO [train.py:715] (7/8) Epoch 12, batch 800, loss[loss=0.1519, simple_loss=0.2192, pruned_loss=0.04236, over 4803.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.03232, over 955751.35 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:05:16,050 INFO [train.py:715] (7/8) Epoch 12, batch 850, loss[loss=0.1272, simple_loss=0.1973, pruned_loss=0.02853, over 4816.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2113, pruned_loss=0.03224, over 958396.95 frames.], batch size: 12, lr: 1.84e-04 +2022-05-07 08:05:54,151 INFO [train.py:715] (7/8) Epoch 12, batch 900, loss[loss=0.1335, simple_loss=0.2059, pruned_loss=0.03053, over 4911.00 frames.], tot_loss[loss=0.138, simple_loss=0.2111, pruned_loss=0.03246, over 961001.93 frames.], batch size: 18, lr: 1.84e-04 +2022-05-07 08:06:32,491 INFO [train.py:715] (7/8) Epoch 12, batch 950, loss[loss=0.1432, simple_loss=0.2124, pruned_loss=0.03705, over 4942.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2098, pruned_loss=0.03219, over 963537.11 frames.], batch size: 18, lr: 1.84e-04 +2022-05-07 08:07:09,850 INFO [train.py:715] (7/8) Epoch 12, batch 1000, loss[loss=0.1267, simple_loss=0.2044, pruned_loss=0.02447, over 4819.00 frames.], tot_loss[loss=0.1381, simple_loss=0.211, pruned_loss=0.03262, over 966142.50 frames.], batch size: 25, lr: 1.84e-04 +2022-05-07 08:07:47,342 INFO [train.py:715] (7/8) Epoch 12, batch 1050, loss[loss=0.1578, simple_loss=0.2244, pruned_loss=0.04565, over 4956.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.03267, over 967197.35 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:08:25,194 INFO [train.py:715] (7/8) Epoch 12, batch 1100, loss[loss=0.1243, simple_loss=0.1954, pruned_loss=0.02659, over 4898.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03252, over 968043.04 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:09:03,092 INFO [train.py:715] (7/8) Epoch 12, batch 1150, loss[loss=0.1382, simple_loss=0.2172, pruned_loss=0.02962, over 4933.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2129, pruned_loss=0.0332, over 969872.97 frames.], batch size: 29, lr: 1.84e-04 +2022-05-07 08:09:41,431 INFO [train.py:715] (7/8) Epoch 12, batch 1200, loss[loss=0.1586, simple_loss=0.2404, pruned_loss=0.03841, over 4881.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2117, pruned_loss=0.03257, over 970386.71 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:10:18,738 INFO [train.py:715] (7/8) Epoch 12, batch 1250, loss[loss=0.1301, simple_loss=0.2074, pruned_loss=0.0264, over 4780.00 frames.], tot_loss[loss=0.138, simple_loss=0.2117, pruned_loss=0.03213, over 970902.30 frames.], batch size: 18, lr: 1.84e-04 +2022-05-07 08:10:56,844 INFO [train.py:715] (7/8) Epoch 12, batch 1300, loss[loss=0.1637, simple_loss=0.2289, pruned_loss=0.04926, over 4976.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2113, pruned_loss=0.03227, over 971176.79 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:11:33,988 INFO [train.py:715] (7/8) Epoch 12, batch 1350, loss[loss=0.1294, simple_loss=0.2036, pruned_loss=0.02761, over 4972.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2107, pruned_loss=0.03218, over 971958.70 frames.], batch size: 35, lr: 1.84e-04 +2022-05-07 08:12:12,113 INFO [train.py:715] (7/8) Epoch 12, batch 1400, loss[loss=0.1505, simple_loss=0.2127, pruned_loss=0.04417, over 4685.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2105, pruned_loss=0.03202, over 972615.05 frames.], batch size: 12, lr: 1.84e-04 +2022-05-07 08:12:49,762 INFO [train.py:715] (7/8) Epoch 12, batch 1450, loss[loss=0.1267, simple_loss=0.199, pruned_loss=0.02717, over 4975.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2097, pruned_loss=0.03201, over 973026.28 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:13:27,680 INFO [train.py:715] (7/8) Epoch 12, batch 1500, loss[loss=0.1231, simple_loss=0.1976, pruned_loss=0.02431, over 4880.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2105, pruned_loss=0.03221, over 972844.14 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:14:05,277 INFO [train.py:715] (7/8) Epoch 12, batch 1550, loss[loss=0.1377, simple_loss=0.2025, pruned_loss=0.03647, over 4977.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2108, pruned_loss=0.03252, over 973006.35 frames.], batch size: 35, lr: 1.84e-04 +2022-05-07 08:14:42,471 INFO [train.py:715] (7/8) Epoch 12, batch 1600, loss[loss=0.1265, simple_loss=0.2025, pruned_loss=0.02523, over 4707.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2101, pruned_loss=0.03208, over 972932.30 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:15:20,489 INFO [train.py:715] (7/8) Epoch 12, batch 1650, loss[loss=0.1376, simple_loss=0.2209, pruned_loss=0.02719, over 4843.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2104, pruned_loss=0.03207, over 972534.86 frames.], batch size: 20, lr: 1.84e-04 +2022-05-07 08:15:57,867 INFO [train.py:715] (7/8) Epoch 12, batch 1700, loss[loss=0.1361, simple_loss=0.2165, pruned_loss=0.0278, over 4975.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2104, pruned_loss=0.03224, over 972544.05 frames.], batch size: 28, lr: 1.84e-04 +2022-05-07 08:16:35,313 INFO [train.py:715] (7/8) Epoch 12, batch 1750, loss[loss=0.1568, simple_loss=0.2232, pruned_loss=0.04516, over 4825.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2111, pruned_loss=0.03254, over 972402.07 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:17:12,472 INFO [train.py:715] (7/8) Epoch 12, batch 1800, loss[loss=0.1353, simple_loss=0.2077, pruned_loss=0.03143, over 4900.00 frames.], tot_loss[loss=0.138, simple_loss=0.2109, pruned_loss=0.03254, over 972947.15 frames.], batch size: 22, lr: 1.84e-04 +2022-05-07 08:17:50,168 INFO [train.py:715] (7/8) Epoch 12, batch 1850, loss[loss=0.131, simple_loss=0.1916, pruned_loss=0.03521, over 4853.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2111, pruned_loss=0.03259, over 972772.47 frames.], batch size: 30, lr: 1.84e-04 +2022-05-07 08:18:27,658 INFO [train.py:715] (7/8) Epoch 12, batch 1900, loss[loss=0.1473, simple_loss=0.214, pruned_loss=0.04028, over 4982.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2104, pruned_loss=0.03245, over 972546.70 frames.], batch size: 35, lr: 1.84e-04 +2022-05-07 08:19:05,284 INFO [train.py:715] (7/8) Epoch 12, batch 1950, loss[loss=0.09451, simple_loss=0.1646, pruned_loss=0.01223, over 4803.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2099, pruned_loss=0.03237, over 971527.56 frames.], batch size: 12, lr: 1.84e-04 +2022-05-07 08:19:43,102 INFO [train.py:715] (7/8) Epoch 12, batch 2000, loss[loss=0.1391, simple_loss=0.2112, pruned_loss=0.0335, over 4860.00 frames.], tot_loss[loss=0.137, simple_loss=0.2098, pruned_loss=0.03217, over 972022.96 frames.], batch size: 32, lr: 1.84e-04 +2022-05-07 08:20:21,263 INFO [train.py:715] (7/8) Epoch 12, batch 2050, loss[loss=0.1635, simple_loss=0.2323, pruned_loss=0.04738, over 4875.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03215, over 972515.44 frames.], batch size: 22, lr: 1.84e-04 +2022-05-07 08:20:59,320 INFO [train.py:715] (7/8) Epoch 12, batch 2100, loss[loss=0.1387, simple_loss=0.2048, pruned_loss=0.03627, over 4645.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2098, pruned_loss=0.0317, over 972078.66 frames.], batch size: 13, lr: 1.84e-04 +2022-05-07 08:21:36,624 INFO [train.py:715] (7/8) Epoch 12, batch 2150, loss[loss=0.1188, simple_loss=0.1984, pruned_loss=0.0196, over 4804.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2095, pruned_loss=0.03162, over 972347.74 frames.], batch size: 24, lr: 1.84e-04 +2022-05-07 08:22:14,597 INFO [train.py:715] (7/8) Epoch 12, batch 2200, loss[loss=0.1261, simple_loss=0.1964, pruned_loss=0.02794, over 4851.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03171, over 972351.31 frames.], batch size: 13, lr: 1.84e-04 +2022-05-07 08:22:52,522 INFO [train.py:715] (7/8) Epoch 12, batch 2250, loss[loss=0.109, simple_loss=0.1792, pruned_loss=0.01934, over 4809.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03193, over 972967.85 frames.], batch size: 12, lr: 1.84e-04 +2022-05-07 08:23:30,601 INFO [train.py:715] (7/8) Epoch 12, batch 2300, loss[loss=0.1226, simple_loss=0.19, pruned_loss=0.02763, over 4979.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03212, over 972710.05 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:24:07,787 INFO [train.py:715] (7/8) Epoch 12, batch 2350, loss[loss=0.1937, simple_loss=0.2488, pruned_loss=0.06934, over 4902.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2109, pruned_loss=0.03234, over 972211.32 frames.], batch size: 17, lr: 1.84e-04 +2022-05-07 08:24:45,330 INFO [train.py:715] (7/8) Epoch 12, batch 2400, loss[loss=0.1395, simple_loss=0.2086, pruned_loss=0.03521, over 4845.00 frames.], tot_loss[loss=0.1382, simple_loss=0.211, pruned_loss=0.03274, over 971723.45 frames.], batch size: 30, lr: 1.84e-04 +2022-05-07 08:25:23,251 INFO [train.py:715] (7/8) Epoch 12, batch 2450, loss[loss=0.1374, simple_loss=0.212, pruned_loss=0.03141, over 4984.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2107, pruned_loss=0.0324, over 971930.12 frames.], batch size: 31, lr: 1.84e-04 +2022-05-07 08:26:00,043 INFO [train.py:715] (7/8) Epoch 12, batch 2500, loss[loss=0.1376, simple_loss=0.2231, pruned_loss=0.02609, over 4943.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2108, pruned_loss=0.03251, over 972257.53 frames.], batch size: 35, lr: 1.84e-04 +2022-05-07 08:26:38,142 INFO [train.py:715] (7/8) Epoch 12, batch 2550, loss[loss=0.1214, simple_loss=0.1921, pruned_loss=0.02535, over 4790.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2102, pruned_loss=0.03227, over 972291.67 frames.], batch size: 17, lr: 1.84e-04 +2022-05-07 08:27:15,553 INFO [train.py:715] (7/8) Epoch 12, batch 2600, loss[loss=0.1533, simple_loss=0.2315, pruned_loss=0.03758, over 4789.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2104, pruned_loss=0.03231, over 972011.07 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 08:27:54,371 INFO [train.py:715] (7/8) Epoch 12, batch 2650, loss[loss=0.09957, simple_loss=0.1715, pruned_loss=0.01384, over 4772.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2095, pruned_loss=0.03167, over 972122.19 frames.], batch size: 12, lr: 1.84e-04 +2022-05-07 08:28:32,743 INFO [train.py:715] (7/8) Epoch 12, batch 2700, loss[loss=0.123, simple_loss=0.1991, pruned_loss=0.02348, over 4760.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03203, over 972287.79 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:29:11,534 INFO [train.py:715] (7/8) Epoch 12, batch 2750, loss[loss=0.1588, simple_loss=0.2315, pruned_loss=0.043, over 4741.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2097, pruned_loss=0.03183, over 972508.74 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:29:50,412 INFO [train.py:715] (7/8) Epoch 12, batch 2800, loss[loss=0.1626, simple_loss=0.2451, pruned_loss=0.04004, over 4926.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03162, over 971146.21 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:30:28,422 INFO [train.py:715] (7/8) Epoch 12, batch 2850, loss[loss=0.1266, simple_loss=0.2041, pruned_loss=0.02457, over 4987.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03148, over 971214.98 frames.], batch size: 31, lr: 1.84e-04 +2022-05-07 08:31:07,089 INFO [train.py:715] (7/8) Epoch 12, batch 2900, loss[loss=0.1362, simple_loss=0.209, pruned_loss=0.03171, over 4842.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2098, pruned_loss=0.03177, over 972752.72 frames.], batch size: 30, lr: 1.84e-04 +2022-05-07 08:31:45,562 INFO [train.py:715] (7/8) Epoch 12, batch 2950, loss[loss=0.1315, simple_loss=0.197, pruned_loss=0.03302, over 4937.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2096, pruned_loss=0.03165, over 973314.55 frames.], batch size: 29, lr: 1.84e-04 +2022-05-07 08:32:24,277 INFO [train.py:715] (7/8) Epoch 12, batch 3000, loss[loss=0.1602, simple_loss=0.2157, pruned_loss=0.0524, over 4753.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03189, over 973376.37 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:32:24,278 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 08:32:33,757 INFO [train.py:742] (7/8) Epoch 12, validation: loss=0.1056, simple_loss=0.1896, pruned_loss=0.01082, over 914524.00 frames. +2022-05-07 08:33:11,807 INFO [train.py:715] (7/8) Epoch 12, batch 3050, loss[loss=0.1386, simple_loss=0.2157, pruned_loss=0.03082, over 4802.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03182, over 972638.37 frames.], batch size: 24, lr: 1.84e-04 +2022-05-07 08:33:49,492 INFO [train.py:715] (7/8) Epoch 12, batch 3100, loss[loss=0.1688, simple_loss=0.2305, pruned_loss=0.05355, over 4903.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2111, pruned_loss=0.03233, over 972016.55 frames.], batch size: 17, lr: 1.84e-04 +2022-05-07 08:34:27,407 INFO [train.py:715] (7/8) Epoch 12, batch 3150, loss[loss=0.1812, simple_loss=0.2679, pruned_loss=0.04728, over 4877.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2109, pruned_loss=0.0323, over 973270.41 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:35:05,544 INFO [train.py:715] (7/8) Epoch 12, batch 3200, loss[loss=0.1384, simple_loss=0.1964, pruned_loss=0.04022, over 4973.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2112, pruned_loss=0.0327, over 973721.57 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:35:43,248 INFO [train.py:715] (7/8) Epoch 12, batch 3250, loss[loss=0.1327, simple_loss=0.2143, pruned_loss=0.02562, over 4936.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03258, over 973519.96 frames.], batch size: 23, lr: 1.84e-04 +2022-05-07 08:36:21,484 INFO [train.py:715] (7/8) Epoch 12, batch 3300, loss[loss=0.184, simple_loss=0.2463, pruned_loss=0.0608, over 4902.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03269, over 973380.11 frames.], batch size: 17, lr: 1.84e-04 +2022-05-07 08:36:59,236 INFO [train.py:715] (7/8) Epoch 12, batch 3350, loss[loss=0.1185, simple_loss=0.2018, pruned_loss=0.01762, over 4740.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03242, over 973025.37 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:37:37,375 INFO [train.py:715] (7/8) Epoch 12, batch 3400, loss[loss=0.1575, simple_loss=0.229, pruned_loss=0.04303, over 4922.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.0328, over 973590.18 frames.], batch size: 23, lr: 1.84e-04 +2022-05-07 08:38:14,960 INFO [train.py:715] (7/8) Epoch 12, batch 3450, loss[loss=0.1433, simple_loss=0.2067, pruned_loss=0.03995, over 4966.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2122, pruned_loss=0.03257, over 972839.38 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:38:52,884 INFO [train.py:715] (7/8) Epoch 12, batch 3500, loss[loss=0.116, simple_loss=0.1909, pruned_loss=0.02053, over 4975.00 frames.], tot_loss[loss=0.1383, simple_loss=0.212, pruned_loss=0.03236, over 971675.96 frames.], batch size: 28, lr: 1.84e-04 +2022-05-07 08:39:31,085 INFO [train.py:715] (7/8) Epoch 12, batch 3550, loss[loss=0.1271, simple_loss=0.2012, pruned_loss=0.02647, over 4953.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03217, over 971998.94 frames.], batch size: 24, lr: 1.84e-04 +2022-05-07 08:40:08,795 INFO [train.py:715] (7/8) Epoch 12, batch 3600, loss[loss=0.1282, simple_loss=0.1881, pruned_loss=0.03414, over 4890.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2106, pruned_loss=0.03235, over 972340.38 frames.], batch size: 16, lr: 1.84e-04 +2022-05-07 08:40:46,533 INFO [train.py:715] (7/8) Epoch 12, batch 3650, loss[loss=0.1362, simple_loss=0.2065, pruned_loss=0.03296, over 4819.00 frames.], tot_loss[loss=0.137, simple_loss=0.21, pruned_loss=0.03201, over 972482.17 frames.], batch size: 25, lr: 1.84e-04 +2022-05-07 08:41:24,468 INFO [train.py:715] (7/8) Epoch 12, batch 3700, loss[loss=0.1624, simple_loss=0.2322, pruned_loss=0.04636, over 4694.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2102, pruned_loss=0.03222, over 971526.51 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:42:02,374 INFO [train.py:715] (7/8) Epoch 12, batch 3750, loss[loss=0.1239, simple_loss=0.198, pruned_loss=0.02488, over 4817.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03185, over 972129.15 frames.], batch size: 12, lr: 1.84e-04 +2022-05-07 08:42:40,469 INFO [train.py:715] (7/8) Epoch 12, batch 3800, loss[loss=0.1419, simple_loss=0.2164, pruned_loss=0.03367, over 4854.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03248, over 972544.72 frames.], batch size: 30, lr: 1.84e-04 +2022-05-07 08:43:18,088 INFO [train.py:715] (7/8) Epoch 12, batch 3850, loss[loss=0.1424, simple_loss=0.2178, pruned_loss=0.0335, over 4968.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2114, pruned_loss=0.03264, over 972527.53 frames.], batch size: 35, lr: 1.84e-04 +2022-05-07 08:43:55,564 INFO [train.py:715] (7/8) Epoch 12, batch 3900, loss[loss=0.1088, simple_loss=0.1795, pruned_loss=0.01906, over 4860.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2107, pruned_loss=0.03246, over 972493.30 frames.], batch size: 12, lr: 1.84e-04 +2022-05-07 08:44:33,441 INFO [train.py:715] (7/8) Epoch 12, batch 3950, loss[loss=0.1136, simple_loss=0.18, pruned_loss=0.02366, over 4808.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2104, pruned_loss=0.03219, over 972646.80 frames.], batch size: 12, lr: 1.84e-04 +2022-05-07 08:45:11,211 INFO [train.py:715] (7/8) Epoch 12, batch 4000, loss[loss=0.1476, simple_loss=0.2235, pruned_loss=0.0358, over 4968.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2106, pruned_loss=0.03235, over 972836.64 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:45:49,153 INFO [train.py:715] (7/8) Epoch 12, batch 4050, loss[loss=0.1495, simple_loss=0.2162, pruned_loss=0.04138, over 4927.00 frames.], tot_loss[loss=0.137, simple_loss=0.2102, pruned_loss=0.03187, over 972465.49 frames.], batch size: 23, lr: 1.84e-04 +2022-05-07 08:46:27,044 INFO [train.py:715] (7/8) Epoch 12, batch 4100, loss[loss=0.114, simple_loss=0.184, pruned_loss=0.02203, over 4913.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2105, pruned_loss=0.03199, over 972063.92 frames.], batch size: 18, lr: 1.84e-04 +2022-05-07 08:47:05,071 INFO [train.py:715] (7/8) Epoch 12, batch 4150, loss[loss=0.1564, simple_loss=0.2279, pruned_loss=0.0424, over 4807.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2106, pruned_loss=0.03224, over 972444.27 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:47:43,032 INFO [train.py:715] (7/8) Epoch 12, batch 4200, loss[loss=0.1443, simple_loss=0.2067, pruned_loss=0.04098, over 4966.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03206, over 973161.42 frames.], batch size: 15, lr: 1.84e-04 +2022-05-07 08:48:20,659 INFO [train.py:715] (7/8) Epoch 12, batch 4250, loss[loss=0.1376, simple_loss=0.2166, pruned_loss=0.0293, over 4943.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03196, over 973084.74 frames.], batch size: 21, lr: 1.84e-04 +2022-05-07 08:48:58,347 INFO [train.py:715] (7/8) Epoch 12, batch 4300, loss[loss=0.153, simple_loss=0.2399, pruned_loss=0.03307, over 4860.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03142, over 972951.96 frames.], batch size: 20, lr: 1.84e-04 +2022-05-07 08:49:37,514 INFO [train.py:715] (7/8) Epoch 12, batch 4350, loss[loss=0.1323, simple_loss=0.2189, pruned_loss=0.02282, over 4768.00 frames.], tot_loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.03147, over 972361.39 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:50:16,264 INFO [train.py:715] (7/8) Epoch 12, batch 4400, loss[loss=0.1269, simple_loss=0.2095, pruned_loss=0.02216, over 4983.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.032, over 971560.79 frames.], batch size: 28, lr: 1.84e-04 +2022-05-07 08:50:54,765 INFO [train.py:715] (7/8) Epoch 12, batch 4450, loss[loss=0.1296, simple_loss=0.1993, pruned_loss=0.02998, over 4824.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2105, pruned_loss=0.03224, over 970601.71 frames.], batch size: 27, lr: 1.84e-04 +2022-05-07 08:51:33,202 INFO [train.py:715] (7/8) Epoch 12, batch 4500, loss[loss=0.1247, simple_loss=0.1956, pruned_loss=0.02683, over 4919.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2112, pruned_loss=0.03266, over 970896.05 frames.], batch size: 23, lr: 1.84e-04 +2022-05-07 08:52:12,281 INFO [train.py:715] (7/8) Epoch 12, batch 4550, loss[loss=0.1307, simple_loss=0.2041, pruned_loss=0.02859, over 4901.00 frames.], tot_loss[loss=0.1391, simple_loss=0.2124, pruned_loss=0.0329, over 971928.91 frames.], batch size: 17, lr: 1.84e-04 +2022-05-07 08:52:50,490 INFO [train.py:715] (7/8) Epoch 12, batch 4600, loss[loss=0.1473, simple_loss=0.2203, pruned_loss=0.03708, over 4865.00 frames.], tot_loss[loss=0.1394, simple_loss=0.2129, pruned_loss=0.03296, over 971560.04 frames.], batch size: 32, lr: 1.84e-04 +2022-05-07 08:53:29,039 INFO [train.py:715] (7/8) Epoch 12, batch 4650, loss[loss=0.1352, simple_loss=0.2046, pruned_loss=0.0329, over 4871.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2127, pruned_loss=0.03286, over 971372.12 frames.], batch size: 32, lr: 1.84e-04 +2022-05-07 08:54:07,728 INFO [train.py:715] (7/8) Epoch 12, batch 4700, loss[loss=0.1129, simple_loss=0.1822, pruned_loss=0.02186, over 4898.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2117, pruned_loss=0.03267, over 971377.69 frames.], batch size: 22, lr: 1.84e-04 +2022-05-07 08:54:46,298 INFO [train.py:715] (7/8) Epoch 12, batch 4750, loss[loss=0.1302, simple_loss=0.1984, pruned_loss=0.03095, over 4970.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.03237, over 972456.41 frames.], batch size: 24, lr: 1.84e-04 +2022-05-07 08:55:25,000 INFO [train.py:715] (7/8) Epoch 12, batch 4800, loss[loss=0.1356, simple_loss=0.2101, pruned_loss=0.03054, over 4959.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2113, pruned_loss=0.03215, over 973032.94 frames.], batch size: 14, lr: 1.84e-04 +2022-05-07 08:56:03,562 INFO [train.py:715] (7/8) Epoch 12, batch 4850, loss[loss=0.164, simple_loss=0.2381, pruned_loss=0.04495, over 4911.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.03256, over 973129.19 frames.], batch size: 19, lr: 1.84e-04 +2022-05-07 08:56:42,593 INFO [train.py:715] (7/8) Epoch 12, batch 4900, loss[loss=0.1189, simple_loss=0.1945, pruned_loss=0.02165, over 4984.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03237, over 973322.37 frames.], batch size: 25, lr: 1.83e-04 +2022-05-07 08:57:20,602 INFO [train.py:715] (7/8) Epoch 12, batch 4950, loss[loss=0.1464, simple_loss=0.2139, pruned_loss=0.03949, over 4840.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03219, over 973616.82 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 08:57:58,209 INFO [train.py:715] (7/8) Epoch 12, batch 5000, loss[loss=0.1314, simple_loss=0.2033, pruned_loss=0.0298, over 4817.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03254, over 973361.95 frames.], batch size: 25, lr: 1.83e-04 +2022-05-07 08:58:36,392 INFO [train.py:715] (7/8) Epoch 12, batch 5050, loss[loss=0.1399, simple_loss=0.2199, pruned_loss=0.02993, over 4791.00 frames.], tot_loss[loss=0.139, simple_loss=0.2121, pruned_loss=0.03292, over 972411.44 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 08:59:13,984 INFO [train.py:715] (7/8) Epoch 12, batch 5100, loss[loss=0.1273, simple_loss=0.2037, pruned_loss=0.02542, over 4876.00 frames.], tot_loss[loss=0.1399, simple_loss=0.2128, pruned_loss=0.03355, over 972983.07 frames.], batch size: 22, lr: 1.83e-04 +2022-05-07 08:59:52,112 INFO [train.py:715] (7/8) Epoch 12, batch 5150, loss[loss=0.1259, simple_loss=0.203, pruned_loss=0.02437, over 4829.00 frames.], tot_loss[loss=0.139, simple_loss=0.2123, pruned_loss=0.03289, over 971878.02 frames.], batch size: 26, lr: 1.83e-04 +2022-05-07 09:00:30,012 INFO [train.py:715] (7/8) Epoch 12, batch 5200, loss[loss=0.1174, simple_loss=0.1955, pruned_loss=0.01965, over 4802.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03279, over 971374.02 frames.], batch size: 21, lr: 1.83e-04 +2022-05-07 09:01:08,125 INFO [train.py:715] (7/8) Epoch 12, batch 5250, loss[loss=0.1416, simple_loss=0.2085, pruned_loss=0.03738, over 4910.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2125, pruned_loss=0.03337, over 971781.71 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:01:45,992 INFO [train.py:715] (7/8) Epoch 12, batch 5300, loss[loss=0.1514, simple_loss=0.2213, pruned_loss=0.04072, over 4888.00 frames.], tot_loss[loss=0.139, simple_loss=0.212, pruned_loss=0.03297, over 971772.67 frames.], batch size: 22, lr: 1.83e-04 +2022-05-07 09:02:24,110 INFO [train.py:715] (7/8) Epoch 12, batch 5350, loss[loss=0.1315, simple_loss=0.1975, pruned_loss=0.03275, over 4761.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03214, over 970650.60 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:03:02,671 INFO [train.py:715] (7/8) Epoch 12, batch 5400, loss[loss=0.144, simple_loss=0.2143, pruned_loss=0.03684, over 4750.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03205, over 970568.25 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:03:40,514 INFO [train.py:715] (7/8) Epoch 12, batch 5450, loss[loss=0.1571, simple_loss=0.2244, pruned_loss=0.04487, over 4819.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03186, over 970201.81 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:04:18,718 INFO [train.py:715] (7/8) Epoch 12, batch 5500, loss[loss=0.1344, simple_loss=0.1996, pruned_loss=0.03459, over 4957.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2115, pruned_loss=0.03271, over 970651.08 frames.], batch size: 14, lr: 1.83e-04 +2022-05-07 09:04:56,507 INFO [train.py:715] (7/8) Epoch 12, batch 5550, loss[loss=0.1737, simple_loss=0.2413, pruned_loss=0.05309, over 4890.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2116, pruned_loss=0.03289, over 971221.91 frames.], batch size: 38, lr: 1.83e-04 +2022-05-07 09:05:35,153 INFO [train.py:715] (7/8) Epoch 12, batch 5600, loss[loss=0.1125, simple_loss=0.186, pruned_loss=0.01953, over 4791.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2117, pruned_loss=0.03287, over 970992.24 frames.], batch size: 24, lr: 1.83e-04 +2022-05-07 09:06:12,948 INFO [train.py:715] (7/8) Epoch 12, batch 5650, loss[loss=0.1323, simple_loss=0.1986, pruned_loss=0.03296, over 4820.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2116, pruned_loss=0.03274, over 970625.43 frames.], batch size: 12, lr: 1.83e-04 +2022-05-07 09:06:50,900 INFO [train.py:715] (7/8) Epoch 12, batch 5700, loss[loss=0.1415, simple_loss=0.2256, pruned_loss=0.02869, over 4849.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.03283, over 970956.85 frames.], batch size: 20, lr: 1.83e-04 +2022-05-07 09:07:29,807 INFO [train.py:715] (7/8) Epoch 12, batch 5750, loss[loss=0.1511, simple_loss=0.2205, pruned_loss=0.0408, over 4959.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2125, pruned_loss=0.03302, over 971413.62 frames.], batch size: 39, lr: 1.83e-04 +2022-05-07 09:08:07,980 INFO [train.py:715] (7/8) Epoch 12, batch 5800, loss[loss=0.1113, simple_loss=0.1831, pruned_loss=0.01973, over 4906.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03247, over 971354.60 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:08:46,180 INFO [train.py:715] (7/8) Epoch 12, batch 5850, loss[loss=0.1448, simple_loss=0.2211, pruned_loss=0.0342, over 4810.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03251, over 971973.78 frames.], batch size: 27, lr: 1.83e-04 +2022-05-07 09:09:24,396 INFO [train.py:715] (7/8) Epoch 12, batch 5900, loss[loss=0.1326, simple_loss=0.2136, pruned_loss=0.02585, over 4812.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2119, pruned_loss=0.03285, over 972149.90 frames.], batch size: 27, lr: 1.83e-04 +2022-05-07 09:10:02,493 INFO [train.py:715] (7/8) Epoch 12, batch 5950, loss[loss=0.144, simple_loss=0.2163, pruned_loss=0.03588, over 4868.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03237, over 972564.14 frames.], batch size: 32, lr: 1.83e-04 +2022-05-07 09:10:40,378 INFO [train.py:715] (7/8) Epoch 12, batch 6000, loss[loss=0.1628, simple_loss=0.2391, pruned_loss=0.04322, over 4967.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.03225, over 972557.21 frames.], batch size: 21, lr: 1.83e-04 +2022-05-07 09:10:40,379 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 09:10:49,853 INFO [train.py:742] (7/8) Epoch 12, validation: loss=0.1057, simple_loss=0.1897, pruned_loss=0.01086, over 914524.00 frames. +2022-05-07 09:11:28,466 INFO [train.py:715] (7/8) Epoch 12, batch 6050, loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02977, over 4925.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03193, over 972208.41 frames.], batch size: 29, lr: 1.83e-04 +2022-05-07 09:12:07,171 INFO [train.py:715] (7/8) Epoch 12, batch 6100, loss[loss=0.1315, simple_loss=0.2006, pruned_loss=0.03118, over 4809.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03192, over 971533.95 frames.], batch size: 21, lr: 1.83e-04 +2022-05-07 09:12:46,249 INFO [train.py:715] (7/8) Epoch 12, batch 6150, loss[loss=0.1611, simple_loss=0.2345, pruned_loss=0.04385, over 4805.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2116, pruned_loss=0.03208, over 972016.30 frames.], batch size: 21, lr: 1.83e-04 +2022-05-07 09:13:24,047 INFO [train.py:715] (7/8) Epoch 12, batch 6200, loss[loss=0.1243, simple_loss=0.2044, pruned_loss=0.02208, over 4789.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.03226, over 972409.17 frames.], batch size: 24, lr: 1.83e-04 +2022-05-07 09:14:02,110 INFO [train.py:715] (7/8) Epoch 12, batch 6250, loss[loss=0.1367, simple_loss=0.213, pruned_loss=0.03019, over 4751.00 frames.], tot_loss[loss=0.1383, simple_loss=0.212, pruned_loss=0.03236, over 972875.41 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:14:42,630 INFO [train.py:715] (7/8) Epoch 12, batch 6300, loss[loss=0.1485, simple_loss=0.2254, pruned_loss=0.0358, over 4706.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2111, pruned_loss=0.0321, over 972599.57 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:15:20,410 INFO [train.py:715] (7/8) Epoch 12, batch 6350, loss[loss=0.1455, simple_loss=0.2183, pruned_loss=0.03639, over 4788.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03204, over 972271.16 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:15:58,259 INFO [train.py:715] (7/8) Epoch 12, batch 6400, loss[loss=0.144, simple_loss=0.2138, pruned_loss=0.03713, over 4955.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2112, pruned_loss=0.03189, over 972324.69 frames.], batch size: 39, lr: 1.83e-04 +2022-05-07 09:16:36,188 INFO [train.py:715] (7/8) Epoch 12, batch 6450, loss[loss=0.1735, simple_loss=0.2391, pruned_loss=0.05399, over 4993.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2109, pruned_loss=0.03225, over 972132.08 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:17:14,181 INFO [train.py:715] (7/8) Epoch 12, batch 6500, loss[loss=0.1622, simple_loss=0.2328, pruned_loss=0.0458, over 4691.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03219, over 972526.61 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:17:51,825 INFO [train.py:715] (7/8) Epoch 12, batch 6550, loss[loss=0.121, simple_loss=0.1988, pruned_loss=0.0216, over 4870.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.03262, over 972810.16 frames.], batch size: 30, lr: 1.83e-04 +2022-05-07 09:18:29,930 INFO [train.py:715] (7/8) Epoch 12, batch 6600, loss[loss=0.1279, simple_loss=0.1975, pruned_loss=0.02914, over 4927.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2114, pruned_loss=0.03284, over 972162.10 frames.], batch size: 23, lr: 1.83e-04 +2022-05-07 09:19:08,078 INFO [train.py:715] (7/8) Epoch 12, batch 6650, loss[loss=0.1184, simple_loss=0.184, pruned_loss=0.02641, over 4833.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2108, pruned_loss=0.03244, over 972209.86 frames.], batch size: 13, lr: 1.83e-04 +2022-05-07 09:19:46,571 INFO [train.py:715] (7/8) Epoch 12, batch 6700, loss[loss=0.1507, simple_loss=0.2122, pruned_loss=0.04465, over 4927.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2102, pruned_loss=0.03231, over 972554.26 frames.], batch size: 23, lr: 1.83e-04 +2022-05-07 09:20:24,046 INFO [train.py:715] (7/8) Epoch 12, batch 6750, loss[loss=0.1238, simple_loss=0.1988, pruned_loss=0.02442, over 4848.00 frames.], tot_loss[loss=0.1368, simple_loss=0.21, pruned_loss=0.03185, over 972181.91 frames.], batch size: 13, lr: 1.83e-04 +2022-05-07 09:21:02,174 INFO [train.py:715] (7/8) Epoch 12, batch 6800, loss[loss=0.1138, simple_loss=0.1857, pruned_loss=0.02093, over 4945.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03171, over 971823.85 frames.], batch size: 14, lr: 1.83e-04 +2022-05-07 09:21:40,238 INFO [train.py:715] (7/8) Epoch 12, batch 6850, loss[loss=0.1382, simple_loss=0.2102, pruned_loss=0.03311, over 4848.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03157, over 972579.90 frames.], batch size: 34, lr: 1.83e-04 +2022-05-07 09:22:18,034 INFO [train.py:715] (7/8) Epoch 12, batch 6900, loss[loss=0.1467, simple_loss=0.2197, pruned_loss=0.0369, over 4808.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03134, over 972605.96 frames.], batch size: 24, lr: 1.83e-04 +2022-05-07 09:22:56,143 INFO [train.py:715] (7/8) Epoch 12, batch 6950, loss[loss=0.1416, simple_loss=0.2237, pruned_loss=0.0297, over 4944.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.03118, over 972242.69 frames.], batch size: 21, lr: 1.83e-04 +2022-05-07 09:23:34,138 INFO [train.py:715] (7/8) Epoch 12, batch 7000, loss[loss=0.1157, simple_loss=0.1782, pruned_loss=0.02655, over 4748.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03122, over 972022.84 frames.], batch size: 12, lr: 1.83e-04 +2022-05-07 09:24:12,556 INFO [train.py:715] (7/8) Epoch 12, batch 7050, loss[loss=0.149, simple_loss=0.2268, pruned_loss=0.0356, over 4785.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.0317, over 972178.71 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:24:50,037 INFO [train.py:715] (7/8) Epoch 12, batch 7100, loss[loss=0.1158, simple_loss=0.1838, pruned_loss=0.02397, over 4776.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2097, pruned_loss=0.0318, over 971489.20 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:25:28,613 INFO [train.py:715] (7/8) Epoch 12, batch 7150, loss[loss=0.1348, simple_loss=0.2104, pruned_loss=0.02963, over 4807.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03159, over 971285.21 frames.], batch size: 25, lr: 1.83e-04 +2022-05-07 09:26:06,441 INFO [train.py:715] (7/8) Epoch 12, batch 7200, loss[loss=0.1429, simple_loss=0.2171, pruned_loss=0.03435, over 4820.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2102, pruned_loss=0.03185, over 970894.60 frames.], batch size: 13, lr: 1.83e-04 +2022-05-07 09:26:44,290 INFO [train.py:715] (7/8) Epoch 12, batch 7250, loss[loss=0.1567, simple_loss=0.2351, pruned_loss=0.03914, over 4933.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2105, pruned_loss=0.03223, over 971053.35 frames.], batch size: 23, lr: 1.83e-04 +2022-05-07 09:27:22,552 INFO [train.py:715] (7/8) Epoch 12, batch 7300, loss[loss=0.1536, simple_loss=0.2305, pruned_loss=0.03837, over 4962.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2105, pruned_loss=0.03238, over 971180.06 frames.], batch size: 39, lr: 1.83e-04 +2022-05-07 09:28:00,297 INFO [train.py:715] (7/8) Epoch 12, batch 7350, loss[loss=0.1389, simple_loss=0.2182, pruned_loss=0.02979, over 4981.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.03193, over 971307.94 frames.], batch size: 25, lr: 1.83e-04 +2022-05-07 09:28:38,318 INFO [train.py:715] (7/8) Epoch 12, batch 7400, loss[loss=0.1116, simple_loss=0.1802, pruned_loss=0.02145, over 4779.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.03194, over 972557.80 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:29:16,070 INFO [train.py:715] (7/8) Epoch 12, batch 7450, loss[loss=0.1091, simple_loss=0.1809, pruned_loss=0.01862, over 4821.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03203, over 972510.04 frames.], batch size: 13, lr: 1.83e-04 +2022-05-07 09:29:54,161 INFO [train.py:715] (7/8) Epoch 12, batch 7500, loss[loss=0.1191, simple_loss=0.1959, pruned_loss=0.02113, over 4893.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2105, pruned_loss=0.03184, over 971386.71 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:30:32,181 INFO [train.py:715] (7/8) Epoch 12, batch 7550, loss[loss=0.1452, simple_loss=0.2101, pruned_loss=0.04018, over 4864.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03178, over 972014.01 frames.], batch size: 13, lr: 1.83e-04 +2022-05-07 09:31:10,032 INFO [train.py:715] (7/8) Epoch 12, batch 7600, loss[loss=0.1315, simple_loss=0.2085, pruned_loss=0.02725, over 4854.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03161, over 972091.01 frames.], batch size: 20, lr: 1.83e-04 +2022-05-07 09:31:48,260 INFO [train.py:715] (7/8) Epoch 12, batch 7650, loss[loss=0.1432, simple_loss=0.2192, pruned_loss=0.03366, over 4798.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03148, over 972537.83 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:32:26,438 INFO [train.py:715] (7/8) Epoch 12, batch 7700, loss[loss=0.1133, simple_loss=0.191, pruned_loss=0.01784, over 4805.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03125, over 972396.55 frames.], batch size: 21, lr: 1.83e-04 +2022-05-07 09:33:04,636 INFO [train.py:715] (7/8) Epoch 12, batch 7750, loss[loss=0.1335, simple_loss=0.2118, pruned_loss=0.02762, over 4907.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03133, over 972094.21 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:33:42,404 INFO [train.py:715] (7/8) Epoch 12, batch 7800, loss[loss=0.1307, simple_loss=0.2004, pruned_loss=0.03049, over 4786.00 frames.], tot_loss[loss=0.1372, simple_loss=0.211, pruned_loss=0.03169, over 972708.81 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:34:20,595 INFO [train.py:715] (7/8) Epoch 12, batch 7850, loss[loss=0.1112, simple_loss=0.1807, pruned_loss=0.02086, over 4939.00 frames.], tot_loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03219, over 972857.57 frames.], batch size: 35, lr: 1.83e-04 +2022-05-07 09:34:58,402 INFO [train.py:715] (7/8) Epoch 12, batch 7900, loss[loss=0.1299, simple_loss=0.2127, pruned_loss=0.02355, over 4811.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.03224, over 972643.86 frames.], batch size: 25, lr: 1.83e-04 +2022-05-07 09:35:36,648 INFO [train.py:715] (7/8) Epoch 12, batch 7950, loss[loss=0.1698, simple_loss=0.2309, pruned_loss=0.05435, over 4900.00 frames.], tot_loss[loss=0.138, simple_loss=0.2115, pruned_loss=0.03227, over 972685.58 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:36:14,624 INFO [train.py:715] (7/8) Epoch 12, batch 8000, loss[loss=0.117, simple_loss=0.1944, pruned_loss=0.01982, over 4704.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03248, over 971978.91 frames.], batch size: 15, lr: 1.83e-04 +2022-05-07 09:36:53,081 INFO [train.py:715] (7/8) Epoch 12, batch 8050, loss[loss=0.1271, simple_loss=0.2091, pruned_loss=0.02258, over 4871.00 frames.], tot_loss[loss=0.1388, simple_loss=0.212, pruned_loss=0.03279, over 971716.27 frames.], batch size: 22, lr: 1.83e-04 +2022-05-07 09:37:31,432 INFO [train.py:715] (7/8) Epoch 12, batch 8100, loss[loss=0.1357, simple_loss=0.2132, pruned_loss=0.02911, over 4898.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.03283, over 971897.55 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:38:09,022 INFO [train.py:715] (7/8) Epoch 12, batch 8150, loss[loss=0.1126, simple_loss=0.1777, pruned_loss=0.02369, over 4848.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2118, pruned_loss=0.03254, over 971242.66 frames.], batch size: 30, lr: 1.83e-04 +2022-05-07 09:38:47,284 INFO [train.py:715] (7/8) Epoch 12, batch 8200, loss[loss=0.1327, simple_loss=0.216, pruned_loss=0.02466, over 4851.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2114, pruned_loss=0.03202, over 972089.74 frames.], batch size: 20, lr: 1.83e-04 +2022-05-07 09:39:25,281 INFO [train.py:715] (7/8) Epoch 12, batch 8250, loss[loss=0.1431, simple_loss=0.2206, pruned_loss=0.03281, over 4780.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2127, pruned_loss=0.03282, over 971339.25 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:40:03,013 INFO [train.py:715] (7/8) Epoch 12, batch 8300, loss[loss=0.1341, simple_loss=0.2106, pruned_loss=0.02886, over 4868.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2122, pruned_loss=0.03251, over 972325.20 frames.], batch size: 20, lr: 1.83e-04 +2022-05-07 09:40:41,110 INFO [train.py:715] (7/8) Epoch 12, batch 8350, loss[loss=0.1244, simple_loss=0.1999, pruned_loss=0.02444, over 4857.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03253, over 972885.60 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:41:19,299 INFO [train.py:715] (7/8) Epoch 12, batch 8400, loss[loss=0.1336, simple_loss=0.2016, pruned_loss=0.03277, over 4756.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2117, pruned_loss=0.03283, over 973630.18 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:41:57,372 INFO [train.py:715] (7/8) Epoch 12, batch 8450, loss[loss=0.137, simple_loss=0.2141, pruned_loss=0.02993, over 4872.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2109, pruned_loss=0.03232, over 973323.69 frames.], batch size: 22, lr: 1.83e-04 +2022-05-07 09:42:34,902 INFO [train.py:715] (7/8) Epoch 12, batch 8500, loss[loss=0.1232, simple_loss=0.1894, pruned_loss=0.02854, over 4910.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2115, pruned_loss=0.0327, over 973052.65 frames.], batch size: 18, lr: 1.83e-04 +2022-05-07 09:43:13,188 INFO [train.py:715] (7/8) Epoch 12, batch 8550, loss[loss=0.1259, simple_loss=0.1885, pruned_loss=0.0317, over 4813.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03252, over 972120.77 frames.], batch size: 12, lr: 1.83e-04 +2022-05-07 09:43:51,187 INFO [train.py:715] (7/8) Epoch 12, batch 8600, loss[loss=0.1478, simple_loss=0.2254, pruned_loss=0.03507, over 4795.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2121, pruned_loss=0.03316, over 972195.77 frames.], batch size: 24, lr: 1.83e-04 +2022-05-07 09:44:28,879 INFO [train.py:715] (7/8) Epoch 12, batch 8650, loss[loss=0.1378, simple_loss=0.2066, pruned_loss=0.03447, over 4977.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2116, pruned_loss=0.03299, over 971497.14 frames.], batch size: 33, lr: 1.83e-04 +2022-05-07 09:45:07,104 INFO [train.py:715] (7/8) Epoch 12, batch 8700, loss[loss=0.1526, simple_loss=0.2182, pruned_loss=0.04347, over 4948.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2111, pruned_loss=0.03276, over 971345.34 frames.], batch size: 29, lr: 1.83e-04 +2022-05-07 09:45:45,270 INFO [train.py:715] (7/8) Epoch 12, batch 8750, loss[loss=0.1549, simple_loss=0.2341, pruned_loss=0.03784, over 4920.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2111, pruned_loss=0.03254, over 972359.92 frames.], batch size: 23, lr: 1.83e-04 +2022-05-07 09:46:23,701 INFO [train.py:715] (7/8) Epoch 12, batch 8800, loss[loss=0.1282, simple_loss=0.2039, pruned_loss=0.02622, over 4842.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.03241, over 972703.90 frames.], batch size: 13, lr: 1.83e-04 +2022-05-07 09:47:01,614 INFO [train.py:715] (7/8) Epoch 12, batch 8850, loss[loss=0.1329, simple_loss=0.2053, pruned_loss=0.03019, over 4747.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2107, pruned_loss=0.03238, over 972575.45 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:47:40,605 INFO [train.py:715] (7/8) Epoch 12, batch 8900, loss[loss=0.1392, simple_loss=0.2068, pruned_loss=0.0358, over 4653.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2108, pruned_loss=0.03241, over 972665.31 frames.], batch size: 13, lr: 1.83e-04 +2022-05-07 09:48:20,143 INFO [train.py:715] (7/8) Epoch 12, batch 8950, loss[loss=0.1293, simple_loss=0.2011, pruned_loss=0.02876, over 4860.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2108, pruned_loss=0.03227, over 972199.71 frames.], batch size: 20, lr: 1.83e-04 +2022-05-07 09:48:58,104 INFO [train.py:715] (7/8) Epoch 12, batch 9000, loss[loss=0.1419, simple_loss=0.2104, pruned_loss=0.03673, over 4749.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03242, over 972691.72 frames.], batch size: 14, lr: 1.83e-04 +2022-05-07 09:48:58,104 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 09:49:07,571 INFO [train.py:742] (7/8) Epoch 12, validation: loss=0.1057, simple_loss=0.1898, pruned_loss=0.01084, over 914524.00 frames. +2022-05-07 09:49:45,345 INFO [train.py:715] (7/8) Epoch 12, batch 9050, loss[loss=0.1243, simple_loss=0.2037, pruned_loss=0.02249, over 4755.00 frames.], tot_loss[loss=0.139, simple_loss=0.2124, pruned_loss=0.03284, over 972244.30 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:50:23,564 INFO [train.py:715] (7/8) Epoch 12, batch 9100, loss[loss=0.1375, simple_loss=0.222, pruned_loss=0.02646, over 4895.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2129, pruned_loss=0.03281, over 971520.01 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:51:01,823 INFO [train.py:715] (7/8) Epoch 12, batch 9150, loss[loss=0.1341, simple_loss=0.2164, pruned_loss=0.02587, over 4801.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2117, pruned_loss=0.03229, over 971203.20 frames.], batch size: 13, lr: 1.83e-04 +2022-05-07 09:51:39,541 INFO [train.py:715] (7/8) Epoch 12, batch 9200, loss[loss=0.1113, simple_loss=0.189, pruned_loss=0.01683, over 4832.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2114, pruned_loss=0.03211, over 971348.45 frames.], batch size: 30, lr: 1.83e-04 +2022-05-07 09:52:17,394 INFO [train.py:715] (7/8) Epoch 12, batch 9250, loss[loss=0.1384, simple_loss=0.2206, pruned_loss=0.02807, over 4749.00 frames.], tot_loss[loss=0.1371, simple_loss=0.211, pruned_loss=0.03162, over 971930.90 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:52:55,473 INFO [train.py:715] (7/8) Epoch 12, batch 9300, loss[loss=0.09932, simple_loss=0.1734, pruned_loss=0.01263, over 4899.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2109, pruned_loss=0.03141, over 971488.13 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:53:33,066 INFO [train.py:715] (7/8) Epoch 12, batch 9350, loss[loss=0.1165, simple_loss=0.1988, pruned_loss=0.01708, over 4904.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03169, over 971317.40 frames.], batch size: 17, lr: 1.83e-04 +2022-05-07 09:54:10,843 INFO [train.py:715] (7/8) Epoch 12, batch 9400, loss[loss=0.1139, simple_loss=0.1894, pruned_loss=0.01921, over 4810.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2115, pruned_loss=0.03216, over 971555.19 frames.], batch size: 25, lr: 1.83e-04 +2022-05-07 09:54:48,556 INFO [train.py:715] (7/8) Epoch 12, batch 9450, loss[loss=0.1326, simple_loss=0.204, pruned_loss=0.03063, over 4776.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2113, pruned_loss=0.0319, over 971737.45 frames.], batch size: 19, lr: 1.83e-04 +2022-05-07 09:55:26,595 INFO [train.py:715] (7/8) Epoch 12, batch 9500, loss[loss=0.132, simple_loss=0.2139, pruned_loss=0.02502, over 4755.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.03171, over 971507.94 frames.], batch size: 16, lr: 1.83e-04 +2022-05-07 09:56:04,147 INFO [train.py:715] (7/8) Epoch 12, batch 9550, loss[loss=0.1675, simple_loss=0.2235, pruned_loss=0.05573, over 4774.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2115, pruned_loss=0.03217, over 971466.39 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 09:56:41,641 INFO [train.py:715] (7/8) Epoch 12, batch 9600, loss[loss=0.1236, simple_loss=0.1947, pruned_loss=0.02626, over 4942.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2111, pruned_loss=0.03211, over 971210.52 frames.], batch size: 29, lr: 1.82e-04 +2022-05-07 09:57:19,886 INFO [train.py:715] (7/8) Epoch 12, batch 9650, loss[loss=0.139, simple_loss=0.2196, pruned_loss=0.02921, over 4787.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03153, over 971232.69 frames.], batch size: 12, lr: 1.82e-04 +2022-05-07 09:57:57,756 INFO [train.py:715] (7/8) Epoch 12, batch 9700, loss[loss=0.128, simple_loss=0.2041, pruned_loss=0.02594, over 4887.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03136, over 971548.53 frames.], batch size: 22, lr: 1.82e-04 +2022-05-07 09:58:35,536 INFO [train.py:715] (7/8) Epoch 12, batch 9750, loss[loss=0.171, simple_loss=0.2277, pruned_loss=0.05717, over 4800.00 frames.], tot_loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.03147, over 971617.23 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 09:59:13,483 INFO [train.py:715] (7/8) Epoch 12, batch 9800, loss[loss=0.1296, simple_loss=0.2008, pruned_loss=0.02922, over 4732.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03188, over 971595.38 frames.], batch size: 12, lr: 1.82e-04 +2022-05-07 09:59:52,002 INFO [train.py:715] (7/8) Epoch 12, batch 9850, loss[loss=0.2366, simple_loss=0.3132, pruned_loss=0.08001, over 4872.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03218, over 971639.90 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:00:29,633 INFO [train.py:715] (7/8) Epoch 12, batch 9900, loss[loss=0.126, simple_loss=0.2032, pruned_loss=0.02437, over 4855.00 frames.], tot_loss[loss=0.1383, simple_loss=0.212, pruned_loss=0.03228, over 972103.58 frames.], batch size: 20, lr: 1.82e-04 +2022-05-07 10:01:07,864 INFO [train.py:715] (7/8) Epoch 12, batch 9950, loss[loss=0.1288, simple_loss=0.2033, pruned_loss=0.02716, over 4771.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2119, pruned_loss=0.03255, over 972554.90 frames.], batch size: 18, lr: 1.82e-04 +2022-05-07 10:01:46,618 INFO [train.py:715] (7/8) Epoch 12, batch 10000, loss[loss=0.157, simple_loss=0.2255, pruned_loss=0.04423, over 4911.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2122, pruned_loss=0.03257, over 972693.63 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:02:25,150 INFO [train.py:715] (7/8) Epoch 12, batch 10050, loss[loss=0.1384, simple_loss=0.2108, pruned_loss=0.03297, over 4891.00 frames.], tot_loss[loss=0.1386, simple_loss=0.212, pruned_loss=0.03261, over 973559.80 frames.], batch size: 22, lr: 1.82e-04 +2022-05-07 10:03:03,488 INFO [train.py:715] (7/8) Epoch 12, batch 10100, loss[loss=0.1248, simple_loss=0.2029, pruned_loss=0.02334, over 4776.00 frames.], tot_loss[loss=0.139, simple_loss=0.2125, pruned_loss=0.03274, over 972887.95 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:03:41,897 INFO [train.py:715] (7/8) Epoch 12, batch 10150, loss[loss=0.1296, simple_loss=0.2173, pruned_loss=0.02097, over 4928.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2127, pruned_loss=0.03317, over 971778.35 frames.], batch size: 29, lr: 1.82e-04 +2022-05-07 10:04:20,551 INFO [train.py:715] (7/8) Epoch 12, batch 10200, loss[loss=0.1268, simple_loss=0.1919, pruned_loss=0.03088, over 4852.00 frames.], tot_loss[loss=0.1386, simple_loss=0.212, pruned_loss=0.0326, over 972081.77 frames.], batch size: 32, lr: 1.82e-04 +2022-05-07 10:04:57,863 INFO [train.py:715] (7/8) Epoch 12, batch 10250, loss[loss=0.1482, simple_loss=0.2258, pruned_loss=0.03524, over 4737.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2123, pruned_loss=0.03265, over 972053.36 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:05:36,037 INFO [train.py:715] (7/8) Epoch 12, batch 10300, loss[loss=0.1361, simple_loss=0.2103, pruned_loss=0.03089, over 4858.00 frames.], tot_loss[loss=0.138, simple_loss=0.2117, pruned_loss=0.03215, over 971949.99 frames.], batch size: 39, lr: 1.82e-04 +2022-05-07 10:06:14,197 INFO [train.py:715] (7/8) Epoch 12, batch 10350, loss[loss=0.1182, simple_loss=0.1762, pruned_loss=0.03009, over 4752.00 frames.], tot_loss[loss=0.138, simple_loss=0.2119, pruned_loss=0.03211, over 972326.75 frames.], batch size: 12, lr: 1.82e-04 +2022-05-07 10:06:52,241 INFO [train.py:715] (7/8) Epoch 12, batch 10400, loss[loss=0.1357, simple_loss=0.2115, pruned_loss=0.03001, over 4978.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2111, pruned_loss=0.03154, over 972865.33 frames.], batch size: 24, lr: 1.82e-04 +2022-05-07 10:07:29,797 INFO [train.py:715] (7/8) Epoch 12, batch 10450, loss[loss=0.1126, simple_loss=0.1931, pruned_loss=0.01605, over 4912.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.0314, over 973115.96 frames.], batch size: 29, lr: 1.82e-04 +2022-05-07 10:08:07,726 INFO [train.py:715] (7/8) Epoch 12, batch 10500, loss[loss=0.1288, simple_loss=0.2023, pruned_loss=0.0277, over 4871.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2097, pruned_loss=0.03126, over 972716.72 frames.], batch size: 20, lr: 1.82e-04 +2022-05-07 10:08:46,134 INFO [train.py:715] (7/8) Epoch 12, batch 10550, loss[loss=0.143, simple_loss=0.2154, pruned_loss=0.03526, over 4804.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03128, over 972266.82 frames.], batch size: 24, lr: 1.82e-04 +2022-05-07 10:09:23,508 INFO [train.py:715] (7/8) Epoch 12, batch 10600, loss[loss=0.1269, simple_loss=0.2036, pruned_loss=0.02512, over 4702.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.0309, over 971275.77 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:10:01,496 INFO [train.py:715] (7/8) Epoch 12, batch 10650, loss[loss=0.1498, simple_loss=0.2168, pruned_loss=0.0414, over 4807.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03109, over 971457.33 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:10:39,355 INFO [train.py:715] (7/8) Epoch 12, batch 10700, loss[loss=0.1295, simple_loss=0.213, pruned_loss=0.02299, over 4965.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2107, pruned_loss=0.03154, over 971381.38 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:11:16,858 INFO [train.py:715] (7/8) Epoch 12, batch 10750, loss[loss=0.1395, simple_loss=0.21, pruned_loss=0.03448, over 4850.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.03172, over 971471.05 frames.], batch size: 30, lr: 1.82e-04 +2022-05-07 10:11:54,743 INFO [train.py:715] (7/8) Epoch 12, batch 10800, loss[loss=0.1486, simple_loss=0.2141, pruned_loss=0.04157, over 4868.00 frames.], tot_loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.0315, over 971071.79 frames.], batch size: 38, lr: 1.82e-04 +2022-05-07 10:12:32,733 INFO [train.py:715] (7/8) Epoch 12, batch 10850, loss[loss=0.1326, simple_loss=0.2106, pruned_loss=0.02728, over 4776.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2106, pruned_loss=0.03165, over 970604.15 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:13:11,527 INFO [train.py:715] (7/8) Epoch 12, batch 10900, loss[loss=0.1666, simple_loss=0.228, pruned_loss=0.05262, over 4963.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03155, over 971048.01 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:13:48,733 INFO [train.py:715] (7/8) Epoch 12, batch 10950, loss[loss=0.1352, simple_loss=0.1979, pruned_loss=0.03629, over 4860.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2097, pruned_loss=0.03124, over 969972.78 frames.], batch size: 30, lr: 1.82e-04 +2022-05-07 10:14:26,875 INFO [train.py:715] (7/8) Epoch 12, batch 11000, loss[loss=0.1396, simple_loss=0.219, pruned_loss=0.03012, over 4747.00 frames.], tot_loss[loss=0.1358, simple_loss=0.21, pruned_loss=0.03083, over 970861.95 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:15:05,146 INFO [train.py:715] (7/8) Epoch 12, batch 11050, loss[loss=0.1324, simple_loss=0.2079, pruned_loss=0.02841, over 4810.00 frames.], tot_loss[loss=0.136, simple_loss=0.2101, pruned_loss=0.03095, over 971731.29 frames.], batch size: 27, lr: 1.82e-04 +2022-05-07 10:15:42,771 INFO [train.py:715] (7/8) Epoch 12, batch 11100, loss[loss=0.1641, simple_loss=0.2296, pruned_loss=0.04935, over 4759.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03103, over 972704.49 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:16:21,272 INFO [train.py:715] (7/8) Epoch 12, batch 11150, loss[loss=0.1207, simple_loss=0.1943, pruned_loss=0.0235, over 4688.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03124, over 973180.14 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:16:58,890 INFO [train.py:715] (7/8) Epoch 12, batch 11200, loss[loss=0.1265, simple_loss=0.1999, pruned_loss=0.02655, over 4872.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.03161, over 972495.99 frames.], batch size: 30, lr: 1.82e-04 +2022-05-07 10:17:36,988 INFO [train.py:715] (7/8) Epoch 12, batch 11250, loss[loss=0.1237, simple_loss=0.1928, pruned_loss=0.02727, over 4796.00 frames.], tot_loss[loss=0.137, simple_loss=0.2102, pruned_loss=0.03187, over 972846.16 frames.], batch size: 24, lr: 1.82e-04 +2022-05-07 10:18:14,709 INFO [train.py:715] (7/8) Epoch 12, batch 11300, loss[loss=0.1056, simple_loss=0.1759, pruned_loss=0.01769, over 4760.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2101, pruned_loss=0.03184, over 973391.18 frames.], batch size: 12, lr: 1.82e-04 +2022-05-07 10:18:51,980 INFO [train.py:715] (7/8) Epoch 12, batch 11350, loss[loss=0.1409, simple_loss=0.2101, pruned_loss=0.03581, over 4993.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03176, over 971860.10 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:19:30,200 INFO [train.py:715] (7/8) Epoch 12, batch 11400, loss[loss=0.138, simple_loss=0.211, pruned_loss=0.03252, over 4765.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2104, pruned_loss=0.03208, over 972470.50 frames.], batch size: 18, lr: 1.82e-04 +2022-05-07 10:20:07,741 INFO [train.py:715] (7/8) Epoch 12, batch 11450, loss[loss=0.1164, simple_loss=0.1851, pruned_loss=0.02381, over 4761.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2098, pruned_loss=0.03199, over 972647.29 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:20:45,257 INFO [train.py:715] (7/8) Epoch 12, batch 11500, loss[loss=0.1615, simple_loss=0.2476, pruned_loss=0.03772, over 4906.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2093, pruned_loss=0.03167, over 972611.26 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:21:23,003 INFO [train.py:715] (7/8) Epoch 12, batch 11550, loss[loss=0.1285, simple_loss=0.1994, pruned_loss=0.02879, over 4811.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2093, pruned_loss=0.03142, over 973042.92 frames.], batch size: 26, lr: 1.82e-04 +2022-05-07 10:22:01,393 INFO [train.py:715] (7/8) Epoch 12, batch 11600, loss[loss=0.129, simple_loss=0.1994, pruned_loss=0.02935, over 4821.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2099, pruned_loss=0.03182, over 973558.39 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:22:38,880 INFO [train.py:715] (7/8) Epoch 12, batch 11650, loss[loss=0.1342, simple_loss=0.2115, pruned_loss=0.0284, over 4748.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03196, over 973491.06 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:23:16,090 INFO [train.py:715] (7/8) Epoch 12, batch 11700, loss[loss=0.1253, simple_loss=0.2, pruned_loss=0.02528, over 4858.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03194, over 972783.92 frames.], batch size: 20, lr: 1.82e-04 +2022-05-07 10:23:53,748 INFO [train.py:715] (7/8) Epoch 12, batch 11750, loss[loss=0.123, simple_loss=0.1992, pruned_loss=0.02341, over 4793.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2117, pruned_loss=0.03239, over 972184.24 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:24:31,083 INFO [train.py:715] (7/8) Epoch 12, batch 11800, loss[loss=0.1753, simple_loss=0.237, pruned_loss=0.05685, over 4694.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.03283, over 972851.04 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:25:08,778 INFO [train.py:715] (7/8) Epoch 12, batch 11850, loss[loss=0.1368, simple_loss=0.2169, pruned_loss=0.02835, over 4819.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03247, over 972365.15 frames.], batch size: 27, lr: 1.82e-04 +2022-05-07 10:25:46,625 INFO [train.py:715] (7/8) Epoch 12, batch 11900, loss[loss=0.1614, simple_loss=0.2316, pruned_loss=0.04557, over 4768.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03235, over 972954.97 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:26:24,514 INFO [train.py:715] (7/8) Epoch 12, batch 11950, loss[loss=0.1153, simple_loss=0.1843, pruned_loss=0.02318, over 4791.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03193, over 973000.52 frames.], batch size: 13, lr: 1.82e-04 +2022-05-07 10:27:01,976 INFO [train.py:715] (7/8) Epoch 12, batch 12000, loss[loss=0.1448, simple_loss=0.2228, pruned_loss=0.03342, over 4755.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03148, over 972601.70 frames.], batch size: 18, lr: 1.82e-04 +2022-05-07 10:27:01,976 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 10:27:11,324 INFO [train.py:742] (7/8) Epoch 12, validation: loss=0.1058, simple_loss=0.1897, pruned_loss=0.01095, over 914524.00 frames. +2022-05-07 10:27:50,014 INFO [train.py:715] (7/8) Epoch 12, batch 12050, loss[loss=0.1147, simple_loss=0.191, pruned_loss=0.01919, over 4837.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03145, over 972824.56 frames.], batch size: 32, lr: 1.82e-04 +2022-05-07 10:28:29,091 INFO [train.py:715] (7/8) Epoch 12, batch 12100, loss[loss=0.1394, simple_loss=0.2137, pruned_loss=0.03257, over 4884.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03161, over 972029.12 frames.], batch size: 22, lr: 1.82e-04 +2022-05-07 10:29:08,846 INFO [train.py:715] (7/8) Epoch 12, batch 12150, loss[loss=0.128, simple_loss=0.1923, pruned_loss=0.03184, over 4807.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03178, over 972298.13 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:29:47,130 INFO [train.py:715] (7/8) Epoch 12, batch 12200, loss[loss=0.1318, simple_loss=0.2104, pruned_loss=0.0266, over 4823.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.03197, over 973021.08 frames.], batch size: 25, lr: 1.82e-04 +2022-05-07 10:30:25,384 INFO [train.py:715] (7/8) Epoch 12, batch 12250, loss[loss=0.1408, simple_loss=0.2186, pruned_loss=0.03145, over 4750.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2115, pruned_loss=0.0325, over 972137.97 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:31:04,238 INFO [train.py:715] (7/8) Epoch 12, batch 12300, loss[loss=0.1662, simple_loss=0.2304, pruned_loss=0.05102, over 4890.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2116, pruned_loss=0.03273, over 971773.47 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:31:42,815 INFO [train.py:715] (7/8) Epoch 12, batch 12350, loss[loss=0.1143, simple_loss=0.1991, pruned_loss=0.0148, over 4838.00 frames.], tot_loss[loss=0.1388, simple_loss=0.2121, pruned_loss=0.03275, over 971474.83 frames.], batch size: 27, lr: 1.82e-04 +2022-05-07 10:32:20,261 INFO [train.py:715] (7/8) Epoch 12, batch 12400, loss[loss=0.1211, simple_loss=0.197, pruned_loss=0.02259, over 4852.00 frames.], tot_loss[loss=0.138, simple_loss=0.2113, pruned_loss=0.03238, over 971808.12 frames.], batch size: 20, lr: 1.82e-04 +2022-05-07 10:32:57,986 INFO [train.py:715] (7/8) Epoch 12, batch 12450, loss[loss=0.1282, simple_loss=0.2078, pruned_loss=0.02427, over 4976.00 frames.], tot_loss[loss=0.1378, simple_loss=0.211, pruned_loss=0.03236, over 971739.46 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:33:36,207 INFO [train.py:715] (7/8) Epoch 12, batch 12500, loss[loss=0.1399, simple_loss=0.2063, pruned_loss=0.03676, over 4753.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2105, pruned_loss=0.03215, over 971520.59 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:34:13,317 INFO [train.py:715] (7/8) Epoch 12, batch 12550, loss[loss=0.1281, simple_loss=0.2044, pruned_loss=0.02592, over 4901.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03211, over 971463.86 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:34:51,153 INFO [train.py:715] (7/8) Epoch 12, batch 12600, loss[loss=0.1428, simple_loss=0.2098, pruned_loss=0.0379, over 4764.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2108, pruned_loss=0.03189, over 971403.44 frames.], batch size: 19, lr: 1.82e-04 +2022-05-07 10:35:28,922 INFO [train.py:715] (7/8) Epoch 12, batch 12650, loss[loss=0.1364, simple_loss=0.2138, pruned_loss=0.0295, over 4826.00 frames.], tot_loss[loss=0.1373, simple_loss=0.211, pruned_loss=0.03186, over 971834.36 frames.], batch size: 25, lr: 1.82e-04 +2022-05-07 10:36:06,673 INFO [train.py:715] (7/8) Epoch 12, batch 12700, loss[loss=0.1379, simple_loss=0.2109, pruned_loss=0.0324, over 4800.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2118, pruned_loss=0.032, over 972248.72 frames.], batch size: 24, lr: 1.82e-04 +2022-05-07 10:36:44,126 INFO [train.py:715] (7/8) Epoch 12, batch 12750, loss[loss=0.1525, simple_loss=0.2199, pruned_loss=0.04256, over 4853.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2117, pruned_loss=0.03208, over 973133.78 frames.], batch size: 30, lr: 1.82e-04 +2022-05-07 10:37:22,154 INFO [train.py:715] (7/8) Epoch 12, batch 12800, loss[loss=0.1488, simple_loss=0.2127, pruned_loss=0.0424, over 4920.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2115, pruned_loss=0.0322, over 972329.86 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:38:00,583 INFO [train.py:715] (7/8) Epoch 12, batch 12850, loss[loss=0.1176, simple_loss=0.196, pruned_loss=0.01953, over 4780.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.03232, over 972514.02 frames.], batch size: 12, lr: 1.82e-04 +2022-05-07 10:38:37,911 INFO [train.py:715] (7/8) Epoch 12, batch 12900, loss[loss=0.1443, simple_loss=0.2252, pruned_loss=0.03173, over 4862.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03178, over 972035.59 frames.], batch size: 20, lr: 1.82e-04 +2022-05-07 10:39:15,002 INFO [train.py:715] (7/8) Epoch 12, batch 12950, loss[loss=0.1276, simple_loss=0.2009, pruned_loss=0.02715, over 4706.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2114, pruned_loss=0.03216, over 972181.23 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:39:52,997 INFO [train.py:715] (7/8) Epoch 12, batch 13000, loss[loss=0.151, simple_loss=0.2252, pruned_loss=0.03836, over 4965.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03202, over 971897.96 frames.], batch size: 35, lr: 1.82e-04 +2022-05-07 10:40:30,779 INFO [train.py:715] (7/8) Epoch 12, batch 13050, loss[loss=0.1207, simple_loss=0.1975, pruned_loss=0.02192, over 4777.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.03196, over 972695.46 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:41:08,530 INFO [train.py:715] (7/8) Epoch 12, batch 13100, loss[loss=0.1389, simple_loss=0.222, pruned_loss=0.02788, over 4920.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2111, pruned_loss=0.03171, over 973004.82 frames.], batch size: 18, lr: 1.82e-04 +2022-05-07 10:41:46,121 INFO [train.py:715] (7/8) Epoch 12, batch 13150, loss[loss=0.1306, simple_loss=0.1961, pruned_loss=0.03258, over 4791.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2115, pruned_loss=0.03198, over 972668.40 frames.], batch size: 24, lr: 1.82e-04 +2022-05-07 10:42:23,788 INFO [train.py:715] (7/8) Epoch 12, batch 13200, loss[loss=0.1457, simple_loss=0.2103, pruned_loss=0.04054, over 4768.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03204, over 972778.85 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:43:01,013 INFO [train.py:715] (7/8) Epoch 12, batch 13250, loss[loss=0.1434, simple_loss=0.2147, pruned_loss=0.03608, over 4984.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2115, pruned_loss=0.0324, over 971816.31 frames.], batch size: 31, lr: 1.82e-04 +2022-05-07 10:43:38,186 INFO [train.py:715] (7/8) Epoch 12, batch 13300, loss[loss=0.1316, simple_loss=0.2126, pruned_loss=0.02526, over 4878.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03242, over 972174.56 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:44:16,077 INFO [train.py:715] (7/8) Epoch 12, batch 13350, loss[loss=0.1216, simple_loss=0.191, pruned_loss=0.02613, over 4869.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03196, over 972572.86 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:44:54,314 INFO [train.py:715] (7/8) Epoch 12, batch 13400, loss[loss=0.1266, simple_loss=0.2005, pruned_loss=0.0264, over 4818.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.0321, over 972966.16 frames.], batch size: 26, lr: 1.82e-04 +2022-05-07 10:45:31,692 INFO [train.py:715] (7/8) Epoch 12, batch 13450, loss[loss=0.124, simple_loss=0.193, pruned_loss=0.02746, over 4911.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03135, over 972720.61 frames.], batch size: 23, lr: 1.82e-04 +2022-05-07 10:46:09,029 INFO [train.py:715] (7/8) Epoch 12, batch 13500, loss[loss=0.1166, simple_loss=0.1895, pruned_loss=0.02184, over 4771.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2105, pruned_loss=0.03184, over 971990.56 frames.], batch size: 17, lr: 1.82e-04 +2022-05-07 10:46:47,475 INFO [train.py:715] (7/8) Epoch 12, batch 13550, loss[loss=0.1267, simple_loss=0.1987, pruned_loss=0.02729, over 4969.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2114, pruned_loss=0.03203, over 972803.03 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:47:24,686 INFO [train.py:715] (7/8) Epoch 12, batch 13600, loss[loss=0.1325, simple_loss=0.2123, pruned_loss=0.02632, over 4967.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2114, pruned_loss=0.03211, over 973282.61 frames.], batch size: 39, lr: 1.82e-04 +2022-05-07 10:48:02,572 INFO [train.py:715] (7/8) Epoch 12, batch 13650, loss[loss=0.1341, simple_loss=0.2068, pruned_loss=0.03073, over 4832.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03236, over 973602.67 frames.], batch size: 27, lr: 1.82e-04 +2022-05-07 10:48:40,708 INFO [train.py:715] (7/8) Epoch 12, batch 13700, loss[loss=0.1298, simple_loss=0.2111, pruned_loss=0.02423, over 4818.00 frames.], tot_loss[loss=0.1384, simple_loss=0.2118, pruned_loss=0.03255, over 973601.87 frames.], batch size: 27, lr: 1.82e-04 +2022-05-07 10:49:18,440 INFO [train.py:715] (7/8) Epoch 12, batch 13750, loss[loss=0.1219, simple_loss=0.1886, pruned_loss=0.02764, over 4813.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2109, pruned_loss=0.0323, over 973504.50 frames.], batch size: 12, lr: 1.82e-04 +2022-05-07 10:49:56,510 INFO [train.py:715] (7/8) Epoch 12, batch 13800, loss[loss=0.1317, simple_loss=0.2069, pruned_loss=0.02826, over 4804.00 frames.], tot_loss[loss=0.137, simple_loss=0.2102, pruned_loss=0.03187, over 973317.44 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:50:34,448 INFO [train.py:715] (7/8) Epoch 12, batch 13850, loss[loss=0.1418, simple_loss=0.2076, pruned_loss=0.03798, over 4799.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.0319, over 972668.86 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:51:12,970 INFO [train.py:715] (7/8) Epoch 12, batch 13900, loss[loss=0.1138, simple_loss=0.1919, pruned_loss=0.01784, over 4695.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2104, pruned_loss=0.03207, over 972245.19 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:51:50,184 INFO [train.py:715] (7/8) Epoch 12, batch 13950, loss[loss=0.163, simple_loss=0.2241, pruned_loss=0.05099, over 4954.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03204, over 973783.93 frames.], batch size: 35, lr: 1.82e-04 +2022-05-07 10:52:28,377 INFO [train.py:715] (7/8) Epoch 12, batch 14000, loss[loss=0.1285, simple_loss=0.2061, pruned_loss=0.02542, over 4956.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03235, over 973498.93 frames.], batch size: 21, lr: 1.82e-04 +2022-05-07 10:53:06,889 INFO [train.py:715] (7/8) Epoch 12, batch 14050, loss[loss=0.1285, simple_loss=0.1946, pruned_loss=0.03121, over 4953.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2117, pruned_loss=0.03241, over 973001.91 frames.], batch size: 24, lr: 1.82e-04 +2022-05-07 10:53:44,259 INFO [train.py:715] (7/8) Epoch 12, batch 14100, loss[loss=0.1503, simple_loss=0.2155, pruned_loss=0.04249, over 4954.00 frames.], tot_loss[loss=0.1384, simple_loss=0.212, pruned_loss=0.03246, over 973418.72 frames.], batch size: 15, lr: 1.82e-04 +2022-05-07 10:54:21,694 INFO [train.py:715] (7/8) Epoch 12, batch 14150, loss[loss=0.1384, simple_loss=0.2028, pruned_loss=0.03695, over 4761.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2115, pruned_loss=0.03292, over 972250.11 frames.], batch size: 14, lr: 1.82e-04 +2022-05-07 10:55:00,102 INFO [train.py:715] (7/8) Epoch 12, batch 14200, loss[loss=0.1234, simple_loss=0.1945, pruned_loss=0.02611, over 4750.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2109, pruned_loss=0.03252, over 971311.52 frames.], batch size: 16, lr: 1.82e-04 +2022-05-07 10:55:38,424 INFO [train.py:715] (7/8) Epoch 12, batch 14250, loss[loss=0.1098, simple_loss=0.1738, pruned_loss=0.0229, over 4768.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2104, pruned_loss=0.03235, over 971421.15 frames.], batch size: 12, lr: 1.81e-04 +2022-05-07 10:56:18,082 INFO [train.py:715] (7/8) Epoch 12, batch 14300, loss[loss=0.1377, simple_loss=0.2117, pruned_loss=0.03192, over 4918.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2098, pruned_loss=0.03202, over 970660.32 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 10:56:56,581 INFO [train.py:715] (7/8) Epoch 12, batch 14350, loss[loss=0.1531, simple_loss=0.2177, pruned_loss=0.04421, over 4812.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03188, over 971338.51 frames.], batch size: 25, lr: 1.81e-04 +2022-05-07 10:57:35,966 INFO [train.py:715] (7/8) Epoch 12, batch 14400, loss[loss=0.1234, simple_loss=0.2105, pruned_loss=0.01816, over 4837.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2102, pruned_loss=0.03203, over 971007.76 frames.], batch size: 26, lr: 1.81e-04 +2022-05-07 10:58:14,112 INFO [train.py:715] (7/8) Epoch 12, batch 14450, loss[loss=0.1305, simple_loss=0.2075, pruned_loss=0.02676, over 4703.00 frames.], tot_loss[loss=0.137, simple_loss=0.21, pruned_loss=0.03203, over 970871.54 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 10:58:53,034 INFO [train.py:715] (7/8) Epoch 12, batch 14500, loss[loss=0.159, simple_loss=0.2236, pruned_loss=0.04715, over 4963.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.0318, over 972446.41 frames.], batch size: 35, lr: 1.81e-04 +2022-05-07 10:59:32,144 INFO [train.py:715] (7/8) Epoch 12, batch 14550, loss[loss=0.1154, simple_loss=0.1941, pruned_loss=0.01832, over 4825.00 frames.], tot_loss[loss=0.1368, simple_loss=0.21, pruned_loss=0.0318, over 973165.90 frames.], batch size: 26, lr: 1.81e-04 +2022-05-07 11:00:11,029 INFO [train.py:715] (7/8) Epoch 12, batch 14600, loss[loss=0.1349, simple_loss=0.1993, pruned_loss=0.03529, over 4859.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2101, pruned_loss=0.03203, over 973589.33 frames.], batch size: 13, lr: 1.81e-04 +2022-05-07 11:00:49,645 INFO [train.py:715] (7/8) Epoch 12, batch 14650, loss[loss=0.1431, simple_loss=0.2126, pruned_loss=0.03683, over 4732.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2096, pruned_loss=0.03194, over 973741.23 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:01:27,544 INFO [train.py:715] (7/8) Epoch 12, batch 14700, loss[loss=0.1261, simple_loss=0.1958, pruned_loss=0.02814, over 4814.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03186, over 972976.93 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:02:06,067 INFO [train.py:715] (7/8) Epoch 12, batch 14750, loss[loss=0.1222, simple_loss=0.1916, pruned_loss=0.02641, over 4851.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2111, pruned_loss=0.03236, over 972698.83 frames.], batch size: 20, lr: 1.81e-04 +2022-05-07 11:02:43,582 INFO [train.py:715] (7/8) Epoch 12, batch 14800, loss[loss=0.1383, simple_loss=0.2067, pruned_loss=0.03498, over 4948.00 frames.], tot_loss[loss=0.1378, simple_loss=0.211, pruned_loss=0.03227, over 973140.79 frames.], batch size: 24, lr: 1.81e-04 +2022-05-07 11:03:21,326 INFO [train.py:715] (7/8) Epoch 12, batch 14850, loss[loss=0.1606, simple_loss=0.2436, pruned_loss=0.03879, over 4817.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2112, pruned_loss=0.03199, over 973150.07 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:03:59,687 INFO [train.py:715] (7/8) Epoch 12, batch 14900, loss[loss=0.1289, simple_loss=0.2017, pruned_loss=0.02808, over 4969.00 frames.], tot_loss[loss=0.1375, simple_loss=0.211, pruned_loss=0.03199, over 973243.82 frames.], batch size: 28, lr: 1.81e-04 +2022-05-07 11:04:38,247 INFO [train.py:715] (7/8) Epoch 12, batch 14950, loss[loss=0.1363, simple_loss=0.2052, pruned_loss=0.03369, over 4921.00 frames.], tot_loss[loss=0.1375, simple_loss=0.211, pruned_loss=0.03202, over 973311.21 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:05:15,438 INFO [train.py:715] (7/8) Epoch 12, batch 15000, loss[loss=0.1564, simple_loss=0.2226, pruned_loss=0.04514, over 4945.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2107, pruned_loss=0.03216, over 972483.59 frames.], batch size: 29, lr: 1.81e-04 +2022-05-07 11:05:15,439 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 11:05:25,070 INFO [train.py:742] (7/8) Epoch 12, validation: loss=0.1057, simple_loss=0.1897, pruned_loss=0.01083, over 914524.00 frames. +2022-05-07 11:06:02,916 INFO [train.py:715] (7/8) Epoch 12, batch 15050, loss[loss=0.1328, simple_loss=0.2044, pruned_loss=0.03065, over 4874.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2103, pruned_loss=0.03202, over 972956.84 frames.], batch size: 32, lr: 1.81e-04 +2022-05-07 11:06:41,208 INFO [train.py:715] (7/8) Epoch 12, batch 15100, loss[loss=0.1323, simple_loss=0.2073, pruned_loss=0.02867, over 4891.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2109, pruned_loss=0.03209, over 973810.14 frames.], batch size: 17, lr: 1.81e-04 +2022-05-07 11:07:20,388 INFO [train.py:715] (7/8) Epoch 12, batch 15150, loss[loss=0.1963, simple_loss=0.2543, pruned_loss=0.06919, over 4880.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03254, over 973278.47 frames.], batch size: 39, lr: 1.81e-04 +2022-05-07 11:07:58,861 INFO [train.py:715] (7/8) Epoch 12, batch 15200, loss[loss=0.1501, simple_loss=0.2226, pruned_loss=0.0388, over 4785.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2122, pruned_loss=0.03278, over 973509.25 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:08:37,648 INFO [train.py:715] (7/8) Epoch 12, batch 15250, loss[loss=0.1338, simple_loss=0.2072, pruned_loss=0.03017, over 4977.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2118, pruned_loss=0.03242, over 973338.81 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:09:16,367 INFO [train.py:715] (7/8) Epoch 12, batch 15300, loss[loss=0.1103, simple_loss=0.1851, pruned_loss=0.01777, over 4862.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03191, over 973482.81 frames.], batch size: 12, lr: 1.81e-04 +2022-05-07 11:09:54,567 INFO [train.py:715] (7/8) Epoch 12, batch 15350, loss[loss=0.1176, simple_loss=0.1946, pruned_loss=0.02026, over 4885.00 frames.], tot_loss[loss=0.137, simple_loss=0.211, pruned_loss=0.03153, over 973680.07 frames.], batch size: 22, lr: 1.81e-04 +2022-05-07 11:10:31,951 INFO [train.py:715] (7/8) Epoch 12, batch 15400, loss[loss=0.1338, simple_loss=0.2069, pruned_loss=0.0303, over 4983.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.03127, over 973467.04 frames.], batch size: 25, lr: 1.81e-04 +2022-05-07 11:11:09,688 INFO [train.py:715] (7/8) Epoch 12, batch 15450, loss[loss=0.1451, simple_loss=0.2125, pruned_loss=0.03882, over 4866.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03106, over 973452.83 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:11:48,438 INFO [train.py:715] (7/8) Epoch 12, batch 15500, loss[loss=0.1273, simple_loss=0.2108, pruned_loss=0.02187, over 4929.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2107, pruned_loss=0.03153, over 972889.46 frames.], batch size: 23, lr: 1.81e-04 +2022-05-07 11:12:26,564 INFO [train.py:715] (7/8) Epoch 12, batch 15550, loss[loss=0.1486, simple_loss=0.2216, pruned_loss=0.03774, over 4943.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03155, over 972955.89 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:13:04,459 INFO [train.py:715] (7/8) Epoch 12, batch 15600, loss[loss=0.1464, simple_loss=0.2095, pruned_loss=0.0417, over 4877.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03176, over 972903.45 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:13:42,237 INFO [train.py:715] (7/8) Epoch 12, batch 15650, loss[loss=0.1081, simple_loss=0.1845, pruned_loss=0.01581, over 4841.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2109, pruned_loss=0.03202, over 973317.82 frames.], batch size: 26, lr: 1.81e-04 +2022-05-07 11:14:20,673 INFO [train.py:715] (7/8) Epoch 12, batch 15700, loss[loss=0.13, simple_loss=0.2008, pruned_loss=0.02955, over 4889.00 frames.], tot_loss[loss=0.1375, simple_loss=0.211, pruned_loss=0.03196, over 973166.25 frames.], batch size: 22, lr: 1.81e-04 +2022-05-07 11:14:58,369 INFO [train.py:715] (7/8) Epoch 12, batch 15750, loss[loss=0.1069, simple_loss=0.1869, pruned_loss=0.01349, over 4912.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03256, over 973458.90 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:15:36,103 INFO [train.py:715] (7/8) Epoch 12, batch 15800, loss[loss=0.1321, simple_loss=0.2053, pruned_loss=0.02946, over 4762.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03208, over 972742.12 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 11:16:14,195 INFO [train.py:715] (7/8) Epoch 12, batch 15850, loss[loss=0.1474, simple_loss=0.2249, pruned_loss=0.03494, over 4683.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.0319, over 972627.85 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:16:51,695 INFO [train.py:715] (7/8) Epoch 12, batch 15900, loss[loss=0.1607, simple_loss=0.2256, pruned_loss=0.04787, over 4746.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2103, pruned_loss=0.03208, over 971478.86 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:17:29,508 INFO [train.py:715] (7/8) Epoch 12, batch 15950, loss[loss=0.1425, simple_loss=0.2051, pruned_loss=0.03993, over 4862.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03213, over 971745.61 frames.], batch size: 30, lr: 1.81e-04 +2022-05-07 11:18:07,570 INFO [train.py:715] (7/8) Epoch 12, batch 16000, loss[loss=0.1588, simple_loss=0.2232, pruned_loss=0.04716, over 4814.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2102, pruned_loss=0.03181, over 972297.32 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:18:47,326 INFO [train.py:715] (7/8) Epoch 12, batch 16050, loss[loss=0.129, simple_loss=0.2109, pruned_loss=0.02353, over 4913.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.03195, over 972604.86 frames.], batch size: 23, lr: 1.81e-04 +2022-05-07 11:19:25,277 INFO [train.py:715] (7/8) Epoch 12, batch 16100, loss[loss=0.118, simple_loss=0.1864, pruned_loss=0.02475, over 4843.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03186, over 973283.66 frames.], batch size: 32, lr: 1.81e-04 +2022-05-07 11:20:04,188 INFO [train.py:715] (7/8) Epoch 12, batch 16150, loss[loss=0.1533, simple_loss=0.2316, pruned_loss=0.03749, over 4949.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.03192, over 972880.45 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:20:43,068 INFO [train.py:715] (7/8) Epoch 12, batch 16200, loss[loss=0.1102, simple_loss=0.1834, pruned_loss=0.01852, over 4855.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03197, over 973449.41 frames.], batch size: 20, lr: 1.81e-04 +2022-05-07 11:21:21,841 INFO [train.py:715] (7/8) Epoch 12, batch 16250, loss[loss=0.1344, simple_loss=0.2036, pruned_loss=0.03253, over 4830.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2114, pruned_loss=0.03191, over 972121.32 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:21:59,696 INFO [train.py:715] (7/8) Epoch 12, batch 16300, loss[loss=0.1397, simple_loss=0.2147, pruned_loss=0.0323, over 4772.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2109, pruned_loss=0.03169, over 972293.28 frames.], batch size: 14, lr: 1.81e-04 +2022-05-07 11:22:37,477 INFO [train.py:715] (7/8) Epoch 12, batch 16350, loss[loss=0.1174, simple_loss=0.1892, pruned_loss=0.02276, over 4925.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2108, pruned_loss=0.03153, over 972951.88 frames.], batch size: 29, lr: 1.81e-04 +2022-05-07 11:23:16,255 INFO [train.py:715] (7/8) Epoch 12, batch 16400, loss[loss=0.1715, simple_loss=0.2381, pruned_loss=0.05241, over 4904.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2113, pruned_loss=0.03191, over 972457.36 frames.], batch size: 19, lr: 1.81e-04 +2022-05-07 11:23:54,212 INFO [train.py:715] (7/8) Epoch 12, batch 16450, loss[loss=0.1368, simple_loss=0.215, pruned_loss=0.0293, over 4950.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.0322, over 972655.70 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:24:33,096 INFO [train.py:715] (7/8) Epoch 12, batch 16500, loss[loss=0.122, simple_loss=0.1913, pruned_loss=0.02635, over 4825.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2111, pruned_loss=0.03193, over 971850.22 frames.], batch size: 13, lr: 1.81e-04 +2022-05-07 11:25:12,168 INFO [train.py:715] (7/8) Epoch 12, batch 16550, loss[loss=0.1403, simple_loss=0.1998, pruned_loss=0.04039, over 4853.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03179, over 972125.29 frames.], batch size: 30, lr: 1.81e-04 +2022-05-07 11:25:51,305 INFO [train.py:715] (7/8) Epoch 12, batch 16600, loss[loss=0.1403, simple_loss=0.2076, pruned_loss=0.03653, over 4853.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03096, over 972097.99 frames.], batch size: 34, lr: 1.81e-04 +2022-05-07 11:26:29,844 INFO [train.py:715] (7/8) Epoch 12, batch 16650, loss[loss=0.1399, simple_loss=0.2213, pruned_loss=0.02931, over 4783.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2092, pruned_loss=0.03119, over 971794.74 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:27:08,908 INFO [train.py:715] (7/8) Epoch 12, batch 16700, loss[loss=0.1481, simple_loss=0.2179, pruned_loss=0.03911, over 4971.00 frames.], tot_loss[loss=0.1357, simple_loss=0.209, pruned_loss=0.03126, over 971658.67 frames.], batch size: 39, lr: 1.81e-04 +2022-05-07 11:27:48,109 INFO [train.py:715] (7/8) Epoch 12, batch 16750, loss[loss=0.1329, simple_loss=0.1899, pruned_loss=0.03798, over 4829.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2089, pruned_loss=0.03132, over 972074.74 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:28:26,497 INFO [train.py:715] (7/8) Epoch 12, batch 16800, loss[loss=0.1163, simple_loss=0.1929, pruned_loss=0.01988, over 4837.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03146, over 972710.35 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:29:05,272 INFO [train.py:715] (7/8) Epoch 12, batch 16850, loss[loss=0.1447, simple_loss=0.2106, pruned_loss=0.03945, over 4938.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.032, over 972118.42 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:29:44,423 INFO [train.py:715] (7/8) Epoch 12, batch 16900, loss[loss=0.1332, simple_loss=0.2215, pruned_loss=0.02245, over 4964.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2111, pruned_loss=0.03267, over 972987.34 frames.], batch size: 24, lr: 1.81e-04 +2022-05-07 11:30:24,179 INFO [train.py:715] (7/8) Epoch 12, batch 16950, loss[loss=0.1319, simple_loss=0.1991, pruned_loss=0.03233, over 4795.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03232, over 972410.77 frames.], batch size: 14, lr: 1.81e-04 +2022-05-07 11:31:02,694 INFO [train.py:715] (7/8) Epoch 12, batch 17000, loss[loss=0.1391, simple_loss=0.2063, pruned_loss=0.03598, over 4737.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03227, over 971825.50 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:31:40,878 INFO [train.py:715] (7/8) Epoch 12, batch 17050, loss[loss=0.1388, simple_loss=0.2203, pruned_loss=0.02863, over 4926.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2113, pruned_loss=0.03225, over 971487.46 frames.], batch size: 18, lr: 1.81e-04 +2022-05-07 11:32:19,762 INFO [train.py:715] (7/8) Epoch 12, batch 17100, loss[loss=0.134, simple_loss=0.2104, pruned_loss=0.02877, over 4846.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.03203, over 972018.64 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:32:58,566 INFO [train.py:715] (7/8) Epoch 12, batch 17150, loss[loss=0.128, simple_loss=0.2012, pruned_loss=0.02743, over 4829.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.03197, over 972556.49 frames.], batch size: 30, lr: 1.81e-04 +2022-05-07 11:33:37,594 INFO [train.py:715] (7/8) Epoch 12, batch 17200, loss[loss=0.1562, simple_loss=0.2315, pruned_loss=0.04044, over 4813.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03194, over 972789.47 frames.], batch size: 25, lr: 1.81e-04 +2022-05-07 11:34:16,026 INFO [train.py:715] (7/8) Epoch 12, batch 17250, loss[loss=0.1315, simple_loss=0.1989, pruned_loss=0.03203, over 4771.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2118, pruned_loss=0.03257, over 972947.46 frames.], batch size: 17, lr: 1.81e-04 +2022-05-07 11:34:54,492 INFO [train.py:715] (7/8) Epoch 12, batch 17300, loss[loss=0.1192, simple_loss=0.193, pruned_loss=0.02266, over 4793.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2113, pruned_loss=0.03219, over 972346.04 frames.], batch size: 24, lr: 1.81e-04 +2022-05-07 11:35:32,126 INFO [train.py:715] (7/8) Epoch 12, batch 17350, loss[loss=0.1204, simple_loss=0.2017, pruned_loss=0.01952, over 4896.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2106, pruned_loss=0.03143, over 972056.14 frames.], batch size: 22, lr: 1.81e-04 +2022-05-07 11:36:10,078 INFO [train.py:715] (7/8) Epoch 12, batch 17400, loss[loss=0.1416, simple_loss=0.209, pruned_loss=0.03709, over 4985.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2105, pruned_loss=0.03139, over 972641.25 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:36:47,845 INFO [train.py:715] (7/8) Epoch 12, batch 17450, loss[loss=0.1516, simple_loss=0.2261, pruned_loss=0.03855, over 4884.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03192, over 972431.62 frames.], batch size: 22, lr: 1.81e-04 +2022-05-07 11:37:26,179 INFO [train.py:715] (7/8) Epoch 12, batch 17500, loss[loss=0.1234, simple_loss=0.1912, pruned_loss=0.02778, over 4984.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03199, over 972560.71 frames.], batch size: 28, lr: 1.81e-04 +2022-05-07 11:38:04,043 INFO [train.py:715] (7/8) Epoch 12, batch 17550, loss[loss=0.1452, simple_loss=0.2196, pruned_loss=0.03542, over 4866.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03175, over 972782.03 frames.], batch size: 39, lr: 1.81e-04 +2022-05-07 11:38:42,238 INFO [train.py:715] (7/8) Epoch 12, batch 17600, loss[loss=0.142, simple_loss=0.2189, pruned_loss=0.03254, over 4923.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03134, over 972658.12 frames.], batch size: 23, lr: 1.81e-04 +2022-05-07 11:39:19,885 INFO [train.py:715] (7/8) Epoch 12, batch 17650, loss[loss=0.1446, simple_loss=0.2166, pruned_loss=0.03625, over 4862.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03137, over 972706.00 frames.], batch size: 30, lr: 1.81e-04 +2022-05-07 11:39:57,991 INFO [train.py:715] (7/8) Epoch 12, batch 17700, loss[loss=0.1326, simple_loss=0.2033, pruned_loss=0.03093, over 4989.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.0314, over 971963.68 frames.], batch size: 25, lr: 1.81e-04 +2022-05-07 11:40:36,849 INFO [train.py:715] (7/8) Epoch 12, batch 17750, loss[loss=0.1821, simple_loss=0.2433, pruned_loss=0.06042, over 4987.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03217, over 972431.68 frames.], batch size: 31, lr: 1.81e-04 +2022-05-07 11:41:15,690 INFO [train.py:715] (7/8) Epoch 12, batch 17800, loss[loss=0.1432, simple_loss=0.2129, pruned_loss=0.03672, over 4877.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.0316, over 973044.25 frames.], batch size: 32, lr: 1.81e-04 +2022-05-07 11:41:54,189 INFO [train.py:715] (7/8) Epoch 12, batch 17850, loss[loss=0.1325, simple_loss=0.2146, pruned_loss=0.0252, over 4801.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03141, over 972233.13 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:42:32,958 INFO [train.py:715] (7/8) Epoch 12, batch 17900, loss[loss=0.1547, simple_loss=0.2382, pruned_loss=0.03561, over 4822.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.03159, over 972598.89 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:43:10,436 INFO [train.py:715] (7/8) Epoch 12, batch 17950, loss[loss=0.1217, simple_loss=0.1946, pruned_loss=0.02445, over 4852.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03113, over 972180.79 frames.], batch size: 20, lr: 1.81e-04 +2022-05-07 11:43:48,629 INFO [train.py:715] (7/8) Epoch 12, batch 18000, loss[loss=0.1371, simple_loss=0.2068, pruned_loss=0.03371, over 4962.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2108, pruned_loss=0.03154, over 972708.79 frames.], batch size: 35, lr: 1.81e-04 +2022-05-07 11:43:48,629 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 11:43:58,182 INFO [train.py:742] (7/8) Epoch 12, validation: loss=0.106, simple_loss=0.19, pruned_loss=0.011, over 914524.00 frames. +2022-05-07 11:44:36,606 INFO [train.py:715] (7/8) Epoch 12, batch 18050, loss[loss=0.1302, simple_loss=0.1945, pruned_loss=0.03297, over 4848.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03116, over 972238.48 frames.], batch size: 20, lr: 1.81e-04 +2022-05-07 11:45:14,477 INFO [train.py:715] (7/8) Epoch 12, batch 18100, loss[loss=0.1662, simple_loss=0.2453, pruned_loss=0.04353, over 4870.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2109, pruned_loss=0.03165, over 972158.23 frames.], batch size: 16, lr: 1.81e-04 +2022-05-07 11:45:52,628 INFO [train.py:715] (7/8) Epoch 12, batch 18150, loss[loss=0.1284, simple_loss=0.2086, pruned_loss=0.02408, over 4916.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2113, pruned_loss=0.03213, over 971822.15 frames.], batch size: 17, lr: 1.81e-04 +2022-05-07 11:46:30,451 INFO [train.py:715] (7/8) Epoch 12, batch 18200, loss[loss=0.125, simple_loss=0.2009, pruned_loss=0.02451, over 4986.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2114, pruned_loss=0.03246, over 971663.72 frames.], batch size: 28, lr: 1.81e-04 +2022-05-07 11:47:08,251 INFO [train.py:715] (7/8) Epoch 12, batch 18250, loss[loss=0.1672, simple_loss=0.2388, pruned_loss=0.04783, over 4825.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2121, pruned_loss=0.03257, over 973033.95 frames.], batch size: 26, lr: 1.81e-04 +2022-05-07 11:47:46,408 INFO [train.py:715] (7/8) Epoch 12, batch 18300, loss[loss=0.1324, simple_loss=0.1977, pruned_loss=0.03361, over 4837.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2119, pruned_loss=0.03269, over 973255.60 frames.], batch size: 30, lr: 1.81e-04 +2022-05-07 11:48:24,292 INFO [train.py:715] (7/8) Epoch 12, batch 18350, loss[loss=0.1319, simple_loss=0.2038, pruned_loss=0.03, over 4773.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2119, pruned_loss=0.03261, over 972793.31 frames.], batch size: 14, lr: 1.81e-04 +2022-05-07 11:49:02,250 INFO [train.py:715] (7/8) Epoch 12, batch 18400, loss[loss=0.1448, simple_loss=0.2202, pruned_loss=0.03473, over 4938.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2108, pruned_loss=0.03245, over 973100.13 frames.], batch size: 21, lr: 1.81e-04 +2022-05-07 11:49:39,750 INFO [train.py:715] (7/8) Epoch 12, batch 18450, loss[loss=0.1603, simple_loss=0.2281, pruned_loss=0.04623, over 4834.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.0319, over 973277.54 frames.], batch size: 13, lr: 1.81e-04 +2022-05-07 11:50:17,836 INFO [train.py:715] (7/8) Epoch 12, batch 18500, loss[loss=0.1203, simple_loss=0.1913, pruned_loss=0.0247, over 4967.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2122, pruned_loss=0.03253, over 972908.44 frames.], batch size: 14, lr: 1.81e-04 +2022-05-07 11:50:55,666 INFO [train.py:715] (7/8) Epoch 12, batch 18550, loss[loss=0.1085, simple_loss=0.1853, pruned_loss=0.01589, over 4928.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2122, pruned_loss=0.03261, over 972627.94 frames.], batch size: 29, lr: 1.81e-04 +2022-05-07 11:51:33,501 INFO [train.py:715] (7/8) Epoch 12, batch 18600, loss[loss=0.1261, simple_loss=0.1991, pruned_loss=0.02658, over 4942.00 frames.], tot_loss[loss=0.138, simple_loss=0.2116, pruned_loss=0.03217, over 972984.15 frames.], batch size: 29, lr: 1.81e-04 +2022-05-07 11:52:11,113 INFO [train.py:715] (7/8) Epoch 12, batch 18650, loss[loss=0.1478, simple_loss=0.2147, pruned_loss=0.04043, over 4992.00 frames.], tot_loss[loss=0.138, simple_loss=0.2117, pruned_loss=0.03216, over 972721.62 frames.], batch size: 14, lr: 1.81e-04 +2022-05-07 11:52:48,673 INFO [train.py:715] (7/8) Epoch 12, batch 18700, loss[loss=0.1096, simple_loss=0.1848, pruned_loss=0.01715, over 4873.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2118, pruned_loss=0.0322, over 972718.05 frames.], batch size: 22, lr: 1.81e-04 +2022-05-07 11:53:26,073 INFO [train.py:715] (7/8) Epoch 12, batch 18750, loss[loss=0.1396, simple_loss=0.2207, pruned_loss=0.02922, over 4950.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2114, pruned_loss=0.03224, over 971620.64 frames.], batch size: 23, lr: 1.81e-04 +2022-05-07 11:54:04,013 INFO [train.py:715] (7/8) Epoch 12, batch 18800, loss[loss=0.1439, simple_loss=0.2196, pruned_loss=0.03408, over 4819.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2106, pruned_loss=0.03159, over 972135.84 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:54:41,890 INFO [train.py:715] (7/8) Epoch 12, batch 18850, loss[loss=0.1479, simple_loss=0.2197, pruned_loss=0.03809, over 4839.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03186, over 972193.40 frames.], batch size: 15, lr: 1.81e-04 +2022-05-07 11:55:19,707 INFO [train.py:715] (7/8) Epoch 12, batch 18900, loss[loss=0.1324, simple_loss=0.2048, pruned_loss=0.03002, over 4930.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03216, over 972270.23 frames.], batch size: 29, lr: 1.81e-04 +2022-05-07 11:55:57,998 INFO [train.py:715] (7/8) Epoch 12, batch 18950, loss[loss=0.09946, simple_loss=0.177, pruned_loss=0.01098, over 4977.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2116, pruned_loss=0.03239, over 972757.94 frames.], batch size: 24, lr: 1.81e-04 +2022-05-07 11:56:35,807 INFO [train.py:715] (7/8) Epoch 12, batch 19000, loss[loss=0.1508, simple_loss=0.2281, pruned_loss=0.03673, over 4635.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2112, pruned_loss=0.03212, over 972876.92 frames.], batch size: 13, lr: 1.81e-04 +2022-05-07 11:57:13,288 INFO [train.py:715] (7/8) Epoch 12, batch 19050, loss[loss=0.1498, simple_loss=0.2205, pruned_loss=0.03953, over 4886.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03198, over 972994.20 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 11:57:50,568 INFO [train.py:715] (7/8) Epoch 12, batch 19100, loss[loss=0.1642, simple_loss=0.2184, pruned_loss=0.055, over 4755.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2108, pruned_loss=0.03216, over 972851.60 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 11:58:28,558 INFO [train.py:715] (7/8) Epoch 12, batch 19150, loss[loss=0.118, simple_loss=0.1878, pruned_loss=0.0241, over 4858.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2107, pruned_loss=0.03231, over 973037.76 frames.], batch size: 32, lr: 1.80e-04 +2022-05-07 11:59:07,193 INFO [train.py:715] (7/8) Epoch 12, batch 19200, loss[loss=0.1225, simple_loss=0.1879, pruned_loss=0.02852, over 4916.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03179, over 972828.06 frames.], batch size: 23, lr: 1.80e-04 +2022-05-07 11:59:45,241 INFO [train.py:715] (7/8) Epoch 12, batch 19250, loss[loss=0.1347, simple_loss=0.2061, pruned_loss=0.03163, over 4824.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03155, over 973266.46 frames.], batch size: 26, lr: 1.80e-04 +2022-05-07 12:00:23,721 INFO [train.py:715] (7/8) Epoch 12, batch 19300, loss[loss=0.1339, simple_loss=0.2024, pruned_loss=0.03271, over 4988.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03182, over 972534.66 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:01:01,910 INFO [train.py:715] (7/8) Epoch 12, batch 19350, loss[loss=0.1288, simple_loss=0.2066, pruned_loss=0.0255, over 4949.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03166, over 971580.85 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:01:39,912 INFO [train.py:715] (7/8) Epoch 12, batch 19400, loss[loss=0.1233, simple_loss=0.1986, pruned_loss=0.02401, over 4761.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2111, pruned_loss=0.03194, over 971395.01 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 12:02:17,950 INFO [train.py:715] (7/8) Epoch 12, batch 19450, loss[loss=0.1571, simple_loss=0.2413, pruned_loss=0.03643, over 4782.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2109, pruned_loss=0.03174, over 971191.97 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:02:56,770 INFO [train.py:715] (7/8) Epoch 12, batch 19500, loss[loss=0.1294, simple_loss=0.215, pruned_loss=0.02187, over 4895.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03205, over 969901.36 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 12:03:35,595 INFO [train.py:715] (7/8) Epoch 12, batch 19550, loss[loss=0.1293, simple_loss=0.2061, pruned_loss=0.02626, over 4929.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03168, over 970983.51 frames.], batch size: 39, lr: 1.80e-04 +2022-05-07 12:04:14,317 INFO [train.py:715] (7/8) Epoch 12, batch 19600, loss[loss=0.1402, simple_loss=0.2157, pruned_loss=0.03237, over 4770.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03139, over 971460.21 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:04:53,462 INFO [train.py:715] (7/8) Epoch 12, batch 19650, loss[loss=0.1343, simple_loss=0.2154, pruned_loss=0.02659, over 4780.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2115, pruned_loss=0.03187, over 971236.82 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:05:32,629 INFO [train.py:715] (7/8) Epoch 12, batch 19700, loss[loss=0.1648, simple_loss=0.2341, pruned_loss=0.04775, over 4987.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2119, pruned_loss=0.03165, over 970950.33 frames.], batch size: 31, lr: 1.80e-04 +2022-05-07 12:06:12,004 INFO [train.py:715] (7/8) Epoch 12, batch 19750, loss[loss=0.1322, simple_loss=0.2015, pruned_loss=0.0315, over 4824.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2129, pruned_loss=0.0322, over 971606.25 frames.], batch size: 27, lr: 1.80e-04 +2022-05-07 12:06:52,639 INFO [train.py:715] (7/8) Epoch 12, batch 19800, loss[loss=0.1459, simple_loss=0.2231, pruned_loss=0.0343, over 4812.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2122, pruned_loss=0.032, over 971558.46 frames.], batch size: 27, lr: 1.80e-04 +2022-05-07 12:07:33,085 INFO [train.py:715] (7/8) Epoch 12, batch 19850, loss[loss=0.1493, simple_loss=0.2215, pruned_loss=0.03859, over 4894.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2134, pruned_loss=0.03264, over 971587.38 frames.], batch size: 39, lr: 1.80e-04 +2022-05-07 12:08:14,256 INFO [train.py:715] (7/8) Epoch 12, batch 19900, loss[loss=0.1496, simple_loss=0.2394, pruned_loss=0.02989, over 4822.00 frames.], tot_loss[loss=0.1391, simple_loss=0.213, pruned_loss=0.03262, over 971621.93 frames.], batch size: 27, lr: 1.80e-04 +2022-05-07 12:08:54,592 INFO [train.py:715] (7/8) Epoch 12, batch 19950, loss[loss=0.1266, simple_loss=0.1897, pruned_loss=0.03178, over 4946.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2122, pruned_loss=0.03244, over 971743.23 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:09:35,206 INFO [train.py:715] (7/8) Epoch 12, batch 20000, loss[loss=0.1426, simple_loss=0.2131, pruned_loss=0.03603, over 4981.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.0318, over 972387.58 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:10:15,438 INFO [train.py:715] (7/8) Epoch 12, batch 20050, loss[loss=0.1037, simple_loss=0.1805, pruned_loss=0.01341, over 4874.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03184, over 972125.08 frames.], batch size: 22, lr: 1.80e-04 +2022-05-07 12:10:55,690 INFO [train.py:715] (7/8) Epoch 12, batch 20100, loss[loss=0.1198, simple_loss=0.1851, pruned_loss=0.02727, over 4984.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03145, over 972724.37 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:11:35,655 INFO [train.py:715] (7/8) Epoch 12, batch 20150, loss[loss=0.1229, simple_loss=0.2025, pruned_loss=0.02162, over 4891.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03131, over 972864.59 frames.], batch size: 22, lr: 1.80e-04 +2022-05-07 12:12:16,045 INFO [train.py:715] (7/8) Epoch 12, batch 20200, loss[loss=0.1335, simple_loss=0.217, pruned_loss=0.02507, over 4946.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.03123, over 973023.47 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:12:56,153 INFO [train.py:715] (7/8) Epoch 12, batch 20250, loss[loss=0.1556, simple_loss=0.231, pruned_loss=0.0401, over 4749.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2105, pruned_loss=0.03112, over 973040.12 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 12:13:36,209 INFO [train.py:715] (7/8) Epoch 12, batch 20300, loss[loss=0.1484, simple_loss=0.218, pruned_loss=0.03936, over 4913.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2104, pruned_loss=0.03093, over 972986.37 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:14:16,797 INFO [train.py:715] (7/8) Epoch 12, batch 20350, loss[loss=0.1348, simple_loss=0.2099, pruned_loss=0.02988, over 4791.00 frames.], tot_loss[loss=0.1359, simple_loss=0.21, pruned_loss=0.03088, over 972560.09 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:14:56,467 INFO [train.py:715] (7/8) Epoch 12, batch 20400, loss[loss=0.1342, simple_loss=0.2106, pruned_loss=0.02893, over 4970.00 frames.], tot_loss[loss=0.136, simple_loss=0.21, pruned_loss=0.03096, over 971948.27 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:15:36,234 INFO [train.py:715] (7/8) Epoch 12, batch 20450, loss[loss=0.1499, simple_loss=0.2169, pruned_loss=0.04144, over 4941.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03098, over 972627.96 frames.], batch size: 23, lr: 1.80e-04 +2022-05-07 12:16:15,877 INFO [train.py:715] (7/8) Epoch 12, batch 20500, loss[loss=0.1377, simple_loss=0.2162, pruned_loss=0.02961, over 4989.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.03122, over 973236.19 frames.], batch size: 25, lr: 1.80e-04 +2022-05-07 12:16:56,322 INFO [train.py:715] (7/8) Epoch 12, batch 20550, loss[loss=0.1324, simple_loss=0.2179, pruned_loss=0.02343, over 4919.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.03143, over 973466.49 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:17:36,238 INFO [train.py:715] (7/8) Epoch 12, batch 20600, loss[loss=0.1728, simple_loss=0.239, pruned_loss=0.05328, over 4803.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2117, pruned_loss=0.03206, over 974402.34 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:18:15,188 INFO [train.py:715] (7/8) Epoch 12, batch 20650, loss[loss=0.1459, simple_loss=0.2219, pruned_loss=0.03491, over 4827.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2113, pruned_loss=0.03212, over 973674.99 frames.], batch size: 13, lr: 1.80e-04 +2022-05-07 12:18:54,297 INFO [train.py:715] (7/8) Epoch 12, batch 20700, loss[loss=0.1426, simple_loss=0.2196, pruned_loss=0.0328, over 4823.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03188, over 973490.82 frames.], batch size: 26, lr: 1.80e-04 +2022-05-07 12:19:32,273 INFO [train.py:715] (7/8) Epoch 12, batch 20750, loss[loss=0.125, simple_loss=0.1974, pruned_loss=0.02626, over 4790.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03186, over 973513.86 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:20:10,574 INFO [train.py:715] (7/8) Epoch 12, batch 20800, loss[loss=0.1217, simple_loss=0.1934, pruned_loss=0.02504, over 4820.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.0321, over 973191.51 frames.], batch size: 13, lr: 1.80e-04 +2022-05-07 12:20:48,327 INFO [train.py:715] (7/8) Epoch 12, batch 20850, loss[loss=0.1734, simple_loss=0.2361, pruned_loss=0.05535, over 4886.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2114, pruned_loss=0.03235, over 972604.53 frames.], batch size: 39, lr: 1.80e-04 +2022-05-07 12:21:26,468 INFO [train.py:715] (7/8) Epoch 12, batch 20900, loss[loss=0.1324, simple_loss=0.1996, pruned_loss=0.03261, over 4866.00 frames.], tot_loss[loss=0.1389, simple_loss=0.2121, pruned_loss=0.03285, over 973166.67 frames.], batch size: 30, lr: 1.80e-04 +2022-05-07 12:22:04,013 INFO [train.py:715] (7/8) Epoch 12, batch 20950, loss[loss=0.1261, simple_loss=0.2059, pruned_loss=0.02317, over 4821.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2121, pruned_loss=0.03256, over 972717.19 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:22:41,375 INFO [train.py:715] (7/8) Epoch 12, batch 21000, loss[loss=0.1625, simple_loss=0.2326, pruned_loss=0.04625, over 4785.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03252, over 972850.83 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:22:41,376 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 12:22:50,900 INFO [train.py:742] (7/8) Epoch 12, validation: loss=0.1056, simple_loss=0.1896, pruned_loss=0.01081, over 914524.00 frames. +2022-05-07 12:23:28,722 INFO [train.py:715] (7/8) Epoch 12, batch 21050, loss[loss=0.1432, simple_loss=0.2245, pruned_loss=0.03092, over 4753.00 frames.], tot_loss[loss=0.1385, simple_loss=0.212, pruned_loss=0.03253, over 972602.80 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 12:24:06,827 INFO [train.py:715] (7/8) Epoch 12, batch 21100, loss[loss=0.1451, simple_loss=0.2229, pruned_loss=0.03368, over 4693.00 frames.], tot_loss[loss=0.139, simple_loss=0.2123, pruned_loss=0.03283, over 972749.12 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:24:44,627 INFO [train.py:715] (7/8) Epoch 12, batch 21150, loss[loss=0.1568, simple_loss=0.2089, pruned_loss=0.0523, over 4852.00 frames.], tot_loss[loss=0.1395, simple_loss=0.2128, pruned_loss=0.03309, over 972973.80 frames.], batch size: 30, lr: 1.80e-04 +2022-05-07 12:25:22,419 INFO [train.py:715] (7/8) Epoch 12, batch 21200, loss[loss=0.1328, simple_loss=0.2051, pruned_loss=0.03024, over 4938.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.03318, over 972876.56 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:26:00,693 INFO [train.py:715] (7/8) Epoch 12, batch 21250, loss[loss=0.1172, simple_loss=0.1948, pruned_loss=0.01978, over 4827.00 frames.], tot_loss[loss=0.1396, simple_loss=0.2128, pruned_loss=0.03313, over 972756.39 frames.], batch size: 13, lr: 1.80e-04 +2022-05-07 12:26:39,510 INFO [train.py:715] (7/8) Epoch 12, batch 21300, loss[loss=0.1314, simple_loss=0.2014, pruned_loss=0.03069, over 4846.00 frames.], tot_loss[loss=0.139, simple_loss=0.2123, pruned_loss=0.03282, over 972609.23 frames.], batch size: 30, lr: 1.80e-04 +2022-05-07 12:27:17,264 INFO [train.py:715] (7/8) Epoch 12, batch 21350, loss[loss=0.1448, simple_loss=0.2244, pruned_loss=0.03262, over 4922.00 frames.], tot_loss[loss=0.1385, simple_loss=0.2118, pruned_loss=0.03266, over 971733.64 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:27:56,358 INFO [train.py:715] (7/8) Epoch 12, batch 21400, loss[loss=0.1359, simple_loss=0.2131, pruned_loss=0.0294, over 4749.00 frames.], tot_loss[loss=0.1383, simple_loss=0.2116, pruned_loss=0.03254, over 972179.20 frames.], batch size: 16, lr: 1.80e-04 +2022-05-07 12:28:35,912 INFO [train.py:715] (7/8) Epoch 12, batch 21450, loss[loss=0.1492, simple_loss=0.2166, pruned_loss=0.04089, over 4972.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2109, pruned_loss=0.03206, over 972449.97 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:29:14,510 INFO [train.py:715] (7/8) Epoch 12, batch 21500, loss[loss=0.1525, simple_loss=0.217, pruned_loss=0.04406, over 4987.00 frames.], tot_loss[loss=0.137, simple_loss=0.2098, pruned_loss=0.03205, over 972492.60 frames.], batch size: 28, lr: 1.80e-04 +2022-05-07 12:29:53,098 INFO [train.py:715] (7/8) Epoch 12, batch 21550, loss[loss=0.1745, simple_loss=0.2424, pruned_loss=0.05331, over 4867.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2107, pruned_loss=0.03243, over 972660.86 frames.], batch size: 32, lr: 1.80e-04 +2022-05-07 12:30:31,274 INFO [train.py:715] (7/8) Epoch 12, batch 21600, loss[loss=0.1578, simple_loss=0.2276, pruned_loss=0.04405, over 4805.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2106, pruned_loss=0.03229, over 972008.91 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:31:09,733 INFO [train.py:715] (7/8) Epoch 12, batch 21650, loss[loss=0.1184, simple_loss=0.1998, pruned_loss=0.0185, over 4805.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2111, pruned_loss=0.03197, over 973041.76 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:31:46,934 INFO [train.py:715] (7/8) Epoch 12, batch 21700, loss[loss=0.1354, simple_loss=0.2149, pruned_loss=0.02791, over 4943.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03187, over 974263.89 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:32:25,495 INFO [train.py:715] (7/8) Epoch 12, batch 21750, loss[loss=0.1221, simple_loss=0.1966, pruned_loss=0.02384, over 4890.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2103, pruned_loss=0.03196, over 974830.19 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:33:04,220 INFO [train.py:715] (7/8) Epoch 12, batch 21800, loss[loss=0.129, simple_loss=0.1951, pruned_loss=0.03151, over 4816.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03145, over 973821.16 frames.], batch size: 26, lr: 1.80e-04 +2022-05-07 12:33:42,111 INFO [train.py:715] (7/8) Epoch 12, batch 21850, loss[loss=0.1456, simple_loss=0.2235, pruned_loss=0.03381, over 4939.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.0314, over 973121.89 frames.], batch size: 39, lr: 1.80e-04 +2022-05-07 12:34:19,726 INFO [train.py:715] (7/8) Epoch 12, batch 21900, loss[loss=0.1309, simple_loss=0.2037, pruned_loss=0.02903, over 4783.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03158, over 973517.05 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:34:58,473 INFO [train.py:715] (7/8) Epoch 12, batch 21950, loss[loss=0.1127, simple_loss=0.1898, pruned_loss=0.01782, over 4829.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03132, over 973282.64 frames.], batch size: 26, lr: 1.80e-04 +2022-05-07 12:35:37,478 INFO [train.py:715] (7/8) Epoch 12, batch 22000, loss[loss=0.1459, simple_loss=0.2263, pruned_loss=0.03279, over 4863.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2106, pruned_loss=0.03161, over 974319.44 frames.], batch size: 20, lr: 1.80e-04 +2022-05-07 12:36:15,710 INFO [train.py:715] (7/8) Epoch 12, batch 22050, loss[loss=0.1386, simple_loss=0.2087, pruned_loss=0.03425, over 4859.00 frames.], tot_loss[loss=0.1375, simple_loss=0.211, pruned_loss=0.03202, over 972968.67 frames.], batch size: 20, lr: 1.80e-04 +2022-05-07 12:36:54,706 INFO [train.py:715] (7/8) Epoch 12, batch 22100, loss[loss=0.1594, simple_loss=0.2367, pruned_loss=0.04101, over 4908.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2105, pruned_loss=0.03214, over 972839.04 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:37:33,659 INFO [train.py:715] (7/8) Epoch 12, batch 22150, loss[loss=0.1264, simple_loss=0.203, pruned_loss=0.0249, over 4685.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03159, over 972288.67 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:38:11,933 INFO [train.py:715] (7/8) Epoch 12, batch 22200, loss[loss=0.1462, simple_loss=0.2281, pruned_loss=0.03213, over 4962.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03146, over 972553.12 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:38:49,699 INFO [train.py:715] (7/8) Epoch 12, batch 22250, loss[loss=0.1496, simple_loss=0.2176, pruned_loss=0.04079, over 4792.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2102, pruned_loss=0.03179, over 971646.38 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:39:30,400 INFO [train.py:715] (7/8) Epoch 12, batch 22300, loss[loss=0.147, simple_loss=0.222, pruned_loss=0.03601, over 4978.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03133, over 971365.30 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:40:08,651 INFO [train.py:715] (7/8) Epoch 12, batch 22350, loss[loss=0.1358, simple_loss=0.2081, pruned_loss=0.03173, over 4966.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03159, over 971825.26 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:40:46,758 INFO [train.py:715] (7/8) Epoch 12, batch 22400, loss[loss=0.1108, simple_loss=0.1925, pruned_loss=0.01454, over 4768.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03145, over 971347.47 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:41:25,341 INFO [train.py:715] (7/8) Epoch 12, batch 22450, loss[loss=0.1494, simple_loss=0.2211, pruned_loss=0.03885, over 4900.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03161, over 971694.68 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:42:03,781 INFO [train.py:715] (7/8) Epoch 12, batch 22500, loss[loss=0.1434, simple_loss=0.2155, pruned_loss=0.03562, over 4795.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03165, over 971725.70 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:42:42,487 INFO [train.py:715] (7/8) Epoch 12, batch 22550, loss[loss=0.1301, simple_loss=0.2033, pruned_loss=0.02844, over 4847.00 frames.], tot_loss[loss=0.136, simple_loss=0.2093, pruned_loss=0.03135, over 971470.51 frames.], batch size: 32, lr: 1.80e-04 +2022-05-07 12:43:20,634 INFO [train.py:715] (7/8) Epoch 12, batch 22600, loss[loss=0.1277, simple_loss=0.2015, pruned_loss=0.027, over 4932.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2099, pruned_loss=0.03164, over 972412.50 frames.], batch size: 29, lr: 1.80e-04 +2022-05-07 12:43:58,689 INFO [train.py:715] (7/8) Epoch 12, batch 22650, loss[loss=0.1209, simple_loss=0.1972, pruned_loss=0.02234, over 4970.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03132, over 972741.53 frames.], batch size: 25, lr: 1.80e-04 +2022-05-07 12:44:36,593 INFO [train.py:715] (7/8) Epoch 12, batch 22700, loss[loss=0.1503, simple_loss=0.224, pruned_loss=0.03835, over 4689.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03159, over 972730.76 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:45:14,816 INFO [train.py:715] (7/8) Epoch 12, batch 22750, loss[loss=0.1277, simple_loss=0.1979, pruned_loss=0.0287, over 4979.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03187, over 972903.76 frames.], batch size: 25, lr: 1.80e-04 +2022-05-07 12:45:53,320 INFO [train.py:715] (7/8) Epoch 12, batch 22800, loss[loss=0.1301, simple_loss=0.2027, pruned_loss=0.02876, over 4937.00 frames.], tot_loss[loss=0.138, simple_loss=0.2119, pruned_loss=0.03202, over 972129.00 frames.], batch size: 29, lr: 1.80e-04 +2022-05-07 12:46:32,313 INFO [train.py:715] (7/8) Epoch 12, batch 22850, loss[loss=0.104, simple_loss=0.1759, pruned_loss=0.01603, over 4776.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2115, pruned_loss=0.03183, over 972325.46 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:47:10,526 INFO [train.py:715] (7/8) Epoch 12, batch 22900, loss[loss=0.1473, simple_loss=0.2255, pruned_loss=0.03455, over 4963.00 frames.], tot_loss[loss=0.138, simple_loss=0.2115, pruned_loss=0.03223, over 971425.12 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:47:48,404 INFO [train.py:715] (7/8) Epoch 12, batch 22950, loss[loss=0.1663, simple_loss=0.2385, pruned_loss=0.04705, over 4794.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.03187, over 971904.29 frames.], batch size: 17, lr: 1.80e-04 +2022-05-07 12:48:26,680 INFO [train.py:715] (7/8) Epoch 12, batch 23000, loss[loss=0.1324, simple_loss=0.2097, pruned_loss=0.02754, over 4772.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2104, pruned_loss=0.03151, over 971968.68 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:49:04,948 INFO [train.py:715] (7/8) Epoch 12, batch 23050, loss[loss=0.1392, simple_loss=0.2236, pruned_loss=0.02746, over 4751.00 frames.], tot_loss[loss=0.137, simple_loss=0.2109, pruned_loss=0.0315, over 972352.43 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:49:43,056 INFO [train.py:715] (7/8) Epoch 12, batch 23100, loss[loss=0.1514, simple_loss=0.2125, pruned_loss=0.04515, over 4760.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.03137, over 972657.12 frames.], batch size: 12, lr: 1.80e-04 +2022-05-07 12:50:21,954 INFO [train.py:715] (7/8) Epoch 12, batch 23150, loss[loss=0.1406, simple_loss=0.2168, pruned_loss=0.03219, over 4783.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03144, over 972500.13 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:51:01,023 INFO [train.py:715] (7/8) Epoch 12, batch 23200, loss[loss=0.1207, simple_loss=0.197, pruned_loss=0.02217, over 4948.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03143, over 973266.93 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:51:39,416 INFO [train.py:715] (7/8) Epoch 12, batch 23250, loss[loss=0.1448, simple_loss=0.2214, pruned_loss=0.03409, over 4969.00 frames.], tot_loss[loss=0.137, simple_loss=0.2107, pruned_loss=0.03163, over 973211.42 frames.], batch size: 35, lr: 1.80e-04 +2022-05-07 12:52:17,110 INFO [train.py:715] (7/8) Epoch 12, batch 23300, loss[loss=0.1059, simple_loss=0.1739, pruned_loss=0.019, over 4893.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03163, over 973102.48 frames.], batch size: 19, lr: 1.80e-04 +2022-05-07 12:52:55,810 INFO [train.py:715] (7/8) Epoch 12, batch 23350, loss[loss=0.1372, simple_loss=0.2005, pruned_loss=0.03696, over 4973.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2109, pruned_loss=0.03192, over 972751.40 frames.], batch size: 31, lr: 1.80e-04 +2022-05-07 12:53:33,839 INFO [train.py:715] (7/8) Epoch 12, batch 23400, loss[loss=0.1192, simple_loss=0.1937, pruned_loss=0.02235, over 4903.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2113, pruned_loss=0.03186, over 972904.62 frames.], batch size: 18, lr: 1.80e-04 +2022-05-07 12:54:11,388 INFO [train.py:715] (7/8) Epoch 12, batch 23450, loss[loss=0.131, simple_loss=0.2125, pruned_loss=0.0248, over 4935.00 frames.], tot_loss[loss=0.1379, simple_loss=0.2118, pruned_loss=0.03196, over 973043.46 frames.], batch size: 23, lr: 1.80e-04 +2022-05-07 12:54:49,573 INFO [train.py:715] (7/8) Epoch 12, batch 23500, loss[loss=0.1983, simple_loss=0.2637, pruned_loss=0.06642, over 4950.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.03174, over 973599.74 frames.], batch size: 39, lr: 1.80e-04 +2022-05-07 12:55:28,411 INFO [train.py:715] (7/8) Epoch 12, batch 23550, loss[loss=0.1479, simple_loss=0.2171, pruned_loss=0.03938, over 4845.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03157, over 973336.26 frames.], batch size: 32, lr: 1.80e-04 +2022-05-07 12:56:07,099 INFO [train.py:715] (7/8) Epoch 12, batch 23600, loss[loss=0.13, simple_loss=0.2083, pruned_loss=0.02582, over 4966.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03109, over 973073.57 frames.], batch size: 24, lr: 1.80e-04 +2022-05-07 12:56:45,799 INFO [train.py:715] (7/8) Epoch 12, batch 23650, loss[loss=0.1198, simple_loss=0.1925, pruned_loss=0.02356, over 4923.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03121, over 972908.33 frames.], batch size: 29, lr: 1.80e-04 +2022-05-07 12:57:24,209 INFO [train.py:715] (7/8) Epoch 12, batch 23700, loss[loss=0.1424, simple_loss=0.2104, pruned_loss=0.03718, over 4980.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2097, pruned_loss=0.03168, over 973167.57 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:58:02,489 INFO [train.py:715] (7/8) Epoch 12, batch 23750, loss[loss=0.1176, simple_loss=0.1974, pruned_loss=0.01895, over 4684.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2097, pruned_loss=0.03179, over 972632.09 frames.], batch size: 15, lr: 1.80e-04 +2022-05-07 12:58:41,203 INFO [train.py:715] (7/8) Epoch 12, batch 23800, loss[loss=0.1298, simple_loss=0.2045, pruned_loss=0.02753, over 4985.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2097, pruned_loss=0.03184, over 972812.47 frames.], batch size: 14, lr: 1.80e-04 +2022-05-07 12:59:20,121 INFO [train.py:715] (7/8) Epoch 12, batch 23850, loss[loss=0.1622, simple_loss=0.2403, pruned_loss=0.04203, over 4807.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03168, over 972927.08 frames.], batch size: 21, lr: 1.80e-04 +2022-05-07 12:59:59,699 INFO [train.py:715] (7/8) Epoch 12, batch 23900, loss[loss=0.1103, simple_loss=0.1892, pruned_loss=0.01566, over 4838.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03125, over 972807.79 frames.], batch size: 26, lr: 1.80e-04 +2022-05-07 13:00:39,415 INFO [train.py:715] (7/8) Epoch 12, batch 23950, loss[loss=0.1444, simple_loss=0.2215, pruned_loss=0.03367, over 4938.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03106, over 972707.74 frames.], batch size: 29, lr: 1.79e-04 +2022-05-07 13:01:18,254 INFO [train.py:715] (7/8) Epoch 12, batch 24000, loss[loss=0.1317, simple_loss=0.2022, pruned_loss=0.03061, over 4907.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.03123, over 972973.05 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:01:18,254 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 13:01:27,803 INFO [train.py:742] (7/8) Epoch 12, validation: loss=0.1054, simple_loss=0.1895, pruned_loss=0.01071, over 914524.00 frames. +2022-05-07 13:02:06,833 INFO [train.py:715] (7/8) Epoch 12, batch 24050, loss[loss=0.13, simple_loss=0.203, pruned_loss=0.02856, over 4773.00 frames.], tot_loss[loss=0.1362, simple_loss=0.21, pruned_loss=0.0312, over 971450.34 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:02:47,353 INFO [train.py:715] (7/8) Epoch 12, batch 24100, loss[loss=0.1302, simple_loss=0.2013, pruned_loss=0.02955, over 4983.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2108, pruned_loss=0.03148, over 972027.15 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:03:27,829 INFO [train.py:715] (7/8) Epoch 12, batch 24150, loss[loss=0.1378, simple_loss=0.2115, pruned_loss=0.0321, over 4903.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03174, over 972388.29 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:04:07,864 INFO [train.py:715] (7/8) Epoch 12, batch 24200, loss[loss=0.09977, simple_loss=0.1696, pruned_loss=0.01496, over 4992.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2108, pruned_loss=0.03189, over 972856.53 frames.], batch size: 14, lr: 1.79e-04 +2022-05-07 13:04:47,989 INFO [train.py:715] (7/8) Epoch 12, batch 24250, loss[loss=0.1512, simple_loss=0.2212, pruned_loss=0.04063, over 4963.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03168, over 972872.00 frames.], batch size: 35, lr: 1.79e-04 +2022-05-07 13:05:28,029 INFO [train.py:715] (7/8) Epoch 12, batch 24300, loss[loss=0.1356, simple_loss=0.2073, pruned_loss=0.03198, over 4822.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03145, over 973268.18 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:06:07,759 INFO [train.py:715] (7/8) Epoch 12, batch 24350, loss[loss=0.1294, simple_loss=0.1962, pruned_loss=0.03132, over 4833.00 frames.], tot_loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.03148, over 974173.25 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:06:47,579 INFO [train.py:715] (7/8) Epoch 12, batch 24400, loss[loss=0.1161, simple_loss=0.1827, pruned_loss=0.02474, over 4948.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03136, over 973872.39 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:07:27,538 INFO [train.py:715] (7/8) Epoch 12, batch 24450, loss[loss=0.1348, simple_loss=0.2211, pruned_loss=0.02429, over 4783.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.0316, over 973244.27 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:08:07,312 INFO [train.py:715] (7/8) Epoch 12, batch 24500, loss[loss=0.1339, simple_loss=0.2102, pruned_loss=0.02879, over 4757.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03152, over 972413.75 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:08:46,556 INFO [train.py:715] (7/8) Epoch 12, batch 24550, loss[loss=0.1273, simple_loss=0.1973, pruned_loss=0.02869, over 4783.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2109, pruned_loss=0.03206, over 972564.21 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:09:26,204 INFO [train.py:715] (7/8) Epoch 12, batch 24600, loss[loss=0.126, simple_loss=0.2011, pruned_loss=0.02542, over 4962.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03226, over 972395.16 frames.], batch size: 39, lr: 1.79e-04 +2022-05-07 13:10:05,927 INFO [train.py:715] (7/8) Epoch 12, batch 24650, loss[loss=0.1461, simple_loss=0.2159, pruned_loss=0.03812, over 4958.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2109, pruned_loss=0.0321, over 972876.81 frames.], batch size: 24, lr: 1.79e-04 +2022-05-07 13:10:45,627 INFO [train.py:715] (7/8) Epoch 12, batch 24700, loss[loss=0.1452, simple_loss=0.2203, pruned_loss=0.03503, over 4770.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03199, over 971571.09 frames.], batch size: 14, lr: 1.79e-04 +2022-05-07 13:11:24,792 INFO [train.py:715] (7/8) Epoch 12, batch 24750, loss[loss=0.1573, simple_loss=0.2265, pruned_loss=0.04407, over 4692.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03195, over 972309.15 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:12:04,999 INFO [train.py:715] (7/8) Epoch 12, batch 24800, loss[loss=0.1479, simple_loss=0.223, pruned_loss=0.03642, over 4909.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03143, over 970683.38 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:12:44,865 INFO [train.py:715] (7/8) Epoch 12, batch 24850, loss[loss=0.1315, simple_loss=0.2003, pruned_loss=0.03133, over 4974.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03118, over 971196.30 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:13:24,109 INFO [train.py:715] (7/8) Epoch 12, batch 24900, loss[loss=0.1454, simple_loss=0.2231, pruned_loss=0.03388, over 4815.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2107, pruned_loss=0.03145, over 970875.75 frames.], batch size: 27, lr: 1.79e-04 +2022-05-07 13:14:03,441 INFO [train.py:715] (7/8) Epoch 12, batch 24950, loss[loss=0.1485, simple_loss=0.2248, pruned_loss=0.03608, over 4812.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2115, pruned_loss=0.03192, over 971651.85 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 13:14:42,357 INFO [train.py:715] (7/8) Epoch 12, batch 25000, loss[loss=0.1196, simple_loss=0.1885, pruned_loss=0.02533, over 4990.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2109, pruned_loss=0.03149, over 971704.11 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 13:15:20,411 INFO [train.py:715] (7/8) Epoch 12, batch 25050, loss[loss=0.1337, simple_loss=0.2069, pruned_loss=0.03022, over 4906.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2113, pruned_loss=0.03166, over 971636.37 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:15:58,461 INFO [train.py:715] (7/8) Epoch 12, batch 25100, loss[loss=0.1052, simple_loss=0.1776, pruned_loss=0.01646, over 4964.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2108, pruned_loss=0.03143, over 972044.20 frames.], batch size: 24, lr: 1.79e-04 +2022-05-07 13:16:36,853 INFO [train.py:715] (7/8) Epoch 12, batch 25150, loss[loss=0.128, simple_loss=0.2045, pruned_loss=0.02578, over 4916.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2108, pruned_loss=0.03139, over 972473.64 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:17:15,105 INFO [train.py:715] (7/8) Epoch 12, batch 25200, loss[loss=0.1348, simple_loss=0.2075, pruned_loss=0.03103, over 4755.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2099, pruned_loss=0.03099, over 972457.65 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:17:52,724 INFO [train.py:715] (7/8) Epoch 12, batch 25250, loss[loss=0.1154, simple_loss=0.1806, pruned_loss=0.02514, over 4792.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03099, over 972704.81 frames.], batch size: 12, lr: 1.79e-04 +2022-05-07 13:18:30,734 INFO [train.py:715] (7/8) Epoch 12, batch 25300, loss[loss=0.1233, simple_loss=0.2019, pruned_loss=0.02239, over 4776.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03102, over 972960.23 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:19:08,817 INFO [train.py:715] (7/8) Epoch 12, batch 25350, loss[loss=0.1143, simple_loss=0.1908, pruned_loss=0.01893, over 4876.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2093, pruned_loss=0.03068, over 972534.07 frames.], batch size: 22, lr: 1.79e-04 +2022-05-07 13:19:47,789 INFO [train.py:715] (7/8) Epoch 12, batch 25400, loss[loss=0.155, simple_loss=0.2302, pruned_loss=0.03994, over 4707.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03069, over 972406.83 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:20:26,679 INFO [train.py:715] (7/8) Epoch 12, batch 25450, loss[loss=0.1603, simple_loss=0.2388, pruned_loss=0.04089, over 4781.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03128, over 972108.34 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:21:06,563 INFO [train.py:715] (7/8) Epoch 12, batch 25500, loss[loss=0.1453, simple_loss=0.2108, pruned_loss=0.03987, over 4748.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03109, over 972343.56 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:21:45,631 INFO [train.py:715] (7/8) Epoch 12, batch 25550, loss[loss=0.1416, simple_loss=0.2212, pruned_loss=0.031, over 4798.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.03157, over 972680.09 frames.], batch size: 14, lr: 1.79e-04 +2022-05-07 13:22:23,735 INFO [train.py:715] (7/8) Epoch 12, batch 25600, loss[loss=0.1324, simple_loss=0.2125, pruned_loss=0.02617, over 4909.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03115, over 973116.99 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:23:02,064 INFO [train.py:715] (7/8) Epoch 12, batch 25650, loss[loss=0.1199, simple_loss=0.1951, pruned_loss=0.02234, over 4764.00 frames.], tot_loss[loss=0.136, simple_loss=0.2102, pruned_loss=0.03094, over 973129.27 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:23:40,756 INFO [train.py:715] (7/8) Epoch 12, batch 25700, loss[loss=0.1605, simple_loss=0.2332, pruned_loss=0.04394, over 4848.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2107, pruned_loss=0.03132, over 973978.42 frames.], batch size: 32, lr: 1.79e-04 +2022-05-07 13:24:19,543 INFO [train.py:715] (7/8) Epoch 12, batch 25750, loss[loss=0.118, simple_loss=0.1912, pruned_loss=0.02233, over 4887.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03115, over 974097.23 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:24:58,009 INFO [train.py:715] (7/8) Epoch 12, batch 25800, loss[loss=0.1265, simple_loss=0.2119, pruned_loss=0.02053, over 4953.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.0311, over 975011.37 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:25:36,920 INFO [train.py:715] (7/8) Epoch 12, batch 25850, loss[loss=0.1488, simple_loss=0.2262, pruned_loss=0.03566, over 4982.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03132, over 974327.05 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:26:15,480 INFO [train.py:715] (7/8) Epoch 12, batch 25900, loss[loss=0.1125, simple_loss=0.1942, pruned_loss=0.0154, over 4818.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03095, over 974409.38 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 13:26:53,771 INFO [train.py:715] (7/8) Epoch 12, batch 25950, loss[loss=0.1421, simple_loss=0.2105, pruned_loss=0.03692, over 4949.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.0314, over 974128.50 frames.], batch size: 39, lr: 1.79e-04 +2022-05-07 13:27:31,254 INFO [train.py:715] (7/8) Epoch 12, batch 26000, loss[loss=0.1529, simple_loss=0.216, pruned_loss=0.04483, over 4863.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2109, pruned_loss=0.03203, over 974188.23 frames.], batch size: 20, lr: 1.79e-04 +2022-05-07 13:28:09,532 INFO [train.py:715] (7/8) Epoch 12, batch 26050, loss[loss=0.1076, simple_loss=0.1815, pruned_loss=0.0168, over 4930.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03177, over 973498.35 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:28:48,386 INFO [train.py:715] (7/8) Epoch 12, batch 26100, loss[loss=0.1261, simple_loss=0.2043, pruned_loss=0.02402, over 4810.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2099, pruned_loss=0.03174, over 971761.53 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:29:27,201 INFO [train.py:715] (7/8) Epoch 12, batch 26150, loss[loss=0.1345, simple_loss=0.2152, pruned_loss=0.02694, over 4859.00 frames.], tot_loss[loss=0.136, simple_loss=0.2091, pruned_loss=0.03148, over 971340.97 frames.], batch size: 20, lr: 1.79e-04 +2022-05-07 13:30:06,153 INFO [train.py:715] (7/8) Epoch 12, batch 26200, loss[loss=0.1554, simple_loss=0.219, pruned_loss=0.04585, over 4835.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2091, pruned_loss=0.0315, over 970637.70 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:30:44,509 INFO [train.py:715] (7/8) Epoch 12, batch 26250, loss[loss=0.1418, simple_loss=0.2253, pruned_loss=0.02917, over 4961.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2089, pruned_loss=0.03119, over 970853.84 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:31:23,013 INFO [train.py:715] (7/8) Epoch 12, batch 26300, loss[loss=0.1384, simple_loss=0.2136, pruned_loss=0.03162, over 4930.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2087, pruned_loss=0.03118, over 970074.50 frames.], batch size: 29, lr: 1.79e-04 +2022-05-07 13:32:02,153 INFO [train.py:715] (7/8) Epoch 12, batch 26350, loss[loss=0.1355, simple_loss=0.2121, pruned_loss=0.02941, over 4777.00 frames.], tot_loss[loss=0.137, simple_loss=0.21, pruned_loss=0.03196, over 970363.09 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:32:40,221 INFO [train.py:715] (7/8) Epoch 12, batch 26400, loss[loss=0.1441, simple_loss=0.2142, pruned_loss=0.03697, over 4882.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03132, over 971009.90 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:33:18,355 INFO [train.py:715] (7/8) Epoch 12, batch 26450, loss[loss=0.1685, simple_loss=0.2396, pruned_loss=0.04865, over 4970.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03108, over 971530.32 frames.], batch size: 24, lr: 1.79e-04 +2022-05-07 13:33:56,291 INFO [train.py:715] (7/8) Epoch 12, batch 26500, loss[loss=0.1696, simple_loss=0.2216, pruned_loss=0.05883, over 4780.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.0312, over 971717.03 frames.], batch size: 14, lr: 1.79e-04 +2022-05-07 13:34:34,592 INFO [train.py:715] (7/8) Epoch 12, batch 26550, loss[loss=0.1383, simple_loss=0.2082, pruned_loss=0.03418, over 4962.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03092, over 971494.70 frames.], batch size: 24, lr: 1.79e-04 +2022-05-07 13:35:12,934 INFO [train.py:715] (7/8) Epoch 12, batch 26600, loss[loss=0.133, simple_loss=0.2067, pruned_loss=0.02961, over 4926.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03106, over 972029.62 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:35:51,457 INFO [train.py:715] (7/8) Epoch 12, batch 26650, loss[loss=0.1125, simple_loss=0.1913, pruned_loss=0.01687, over 4969.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2094, pruned_loss=0.03057, over 972381.69 frames.], batch size: 28, lr: 1.79e-04 +2022-05-07 13:36:30,041 INFO [train.py:715] (7/8) Epoch 12, batch 26700, loss[loss=0.1768, simple_loss=0.2441, pruned_loss=0.05481, over 4838.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2104, pruned_loss=0.03106, over 972640.74 frames.], batch size: 32, lr: 1.79e-04 +2022-05-07 13:37:08,436 INFO [train.py:715] (7/8) Epoch 12, batch 26750, loss[loss=0.1191, simple_loss=0.193, pruned_loss=0.02265, over 4745.00 frames.], tot_loss[loss=0.1359, simple_loss=0.21, pruned_loss=0.03088, over 971870.16 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:37:47,908 INFO [train.py:715] (7/8) Epoch 12, batch 26800, loss[loss=0.1409, simple_loss=0.219, pruned_loss=0.03144, over 4983.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2108, pruned_loss=0.03141, over 971985.84 frames.], batch size: 25, lr: 1.79e-04 +2022-05-07 13:38:27,723 INFO [train.py:715] (7/8) Epoch 12, batch 26850, loss[loss=0.1315, simple_loss=0.2006, pruned_loss=0.03117, over 4918.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2102, pruned_loss=0.03113, over 971723.10 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:39:07,106 INFO [train.py:715] (7/8) Epoch 12, batch 26900, loss[loss=0.1167, simple_loss=0.1965, pruned_loss=0.01848, over 4984.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03144, over 972129.64 frames.], batch size: 28, lr: 1.79e-04 +2022-05-07 13:39:45,934 INFO [train.py:715] (7/8) Epoch 12, batch 26950, loss[loss=0.1821, simple_loss=0.2462, pruned_loss=0.059, over 4743.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03135, over 972489.37 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:40:25,466 INFO [train.py:715] (7/8) Epoch 12, batch 27000, loss[loss=0.1628, simple_loss=0.2235, pruned_loss=0.05099, over 4959.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03184, over 972459.14 frames.], batch size: 35, lr: 1.79e-04 +2022-05-07 13:40:25,466 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 13:40:37,912 INFO [train.py:742] (7/8) Epoch 12, validation: loss=0.1054, simple_loss=0.1894, pruned_loss=0.01072, over 914524.00 frames. +2022-05-07 13:41:17,234 INFO [train.py:715] (7/8) Epoch 12, batch 27050, loss[loss=0.1462, simple_loss=0.2148, pruned_loss=0.03877, over 4833.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03143, over 972067.00 frames.], batch size: 30, lr: 1.79e-04 +2022-05-07 13:41:55,463 INFO [train.py:715] (7/8) Epoch 12, batch 27100, loss[loss=0.1147, simple_loss=0.1861, pruned_loss=0.02168, over 4761.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2106, pruned_loss=0.03197, over 971778.88 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:42:33,821 INFO [train.py:715] (7/8) Epoch 12, batch 27150, loss[loss=0.1358, simple_loss=0.2211, pruned_loss=0.02526, over 4956.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2117, pruned_loss=0.03221, over 971572.72 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:43:12,675 INFO [train.py:715] (7/8) Epoch 12, batch 27200, loss[loss=0.1304, simple_loss=0.1952, pruned_loss=0.03279, over 4863.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03188, over 971730.76 frames.], batch size: 32, lr: 1.79e-04 +2022-05-07 13:43:50,976 INFO [train.py:715] (7/8) Epoch 12, batch 27250, loss[loss=0.1358, simple_loss=0.2198, pruned_loss=0.0259, over 4734.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2114, pruned_loss=0.03202, over 972105.87 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:44:29,602 INFO [train.py:715] (7/8) Epoch 12, batch 27300, loss[loss=0.1197, simple_loss=0.1893, pruned_loss=0.02507, over 4967.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2114, pruned_loss=0.03208, over 972008.67 frames.], batch size: 14, lr: 1.79e-04 +2022-05-07 13:45:08,186 INFO [train.py:715] (7/8) Epoch 12, batch 27350, loss[loss=0.1485, simple_loss=0.2174, pruned_loss=0.03979, over 4651.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2106, pruned_loss=0.03221, over 971577.07 frames.], batch size: 13, lr: 1.79e-04 +2022-05-07 13:45:47,171 INFO [train.py:715] (7/8) Epoch 12, batch 27400, loss[loss=0.1116, simple_loss=0.1904, pruned_loss=0.01645, over 4734.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2106, pruned_loss=0.03166, over 971557.01 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:46:25,847 INFO [train.py:715] (7/8) Epoch 12, batch 27450, loss[loss=0.1561, simple_loss=0.2136, pruned_loss=0.04932, over 4777.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03201, over 971052.26 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:47:04,345 INFO [train.py:715] (7/8) Epoch 12, batch 27500, loss[loss=0.1345, simple_loss=0.2145, pruned_loss=0.02727, over 4797.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03173, over 972050.68 frames.], batch size: 21, lr: 1.79e-04 +2022-05-07 13:47:43,117 INFO [train.py:715] (7/8) Epoch 12, batch 27550, loss[loss=0.1323, simple_loss=0.2099, pruned_loss=0.02732, over 4818.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03104, over 972142.31 frames.], batch size: 26, lr: 1.79e-04 +2022-05-07 13:48:21,799 INFO [train.py:715] (7/8) Epoch 12, batch 27600, loss[loss=0.1309, simple_loss=0.2045, pruned_loss=0.02863, over 4848.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2093, pruned_loss=0.03138, over 971631.39 frames.], batch size: 20, lr: 1.79e-04 +2022-05-07 13:49:00,948 INFO [train.py:715] (7/8) Epoch 12, batch 27650, loss[loss=0.1462, simple_loss=0.229, pruned_loss=0.03173, over 4964.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03152, over 971907.56 frames.], batch size: 24, lr: 1.79e-04 +2022-05-07 13:49:39,559 INFO [train.py:715] (7/8) Epoch 12, batch 27700, loss[loss=0.1642, simple_loss=0.2297, pruned_loss=0.04933, over 4885.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03111, over 971824.98 frames.], batch size: 22, lr: 1.79e-04 +2022-05-07 13:50:18,423 INFO [train.py:715] (7/8) Epoch 12, batch 27750, loss[loss=0.1299, simple_loss=0.208, pruned_loss=0.02591, over 4896.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.03124, over 971954.55 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:50:56,328 INFO [train.py:715] (7/8) Epoch 12, batch 27800, loss[loss=0.199, simple_loss=0.267, pruned_loss=0.06546, over 4909.00 frames.], tot_loss[loss=0.1372, simple_loss=0.211, pruned_loss=0.03168, over 971949.98 frames.], batch size: 23, lr: 1.79e-04 +2022-05-07 13:51:34,003 INFO [train.py:715] (7/8) Epoch 12, batch 27850, loss[loss=0.1186, simple_loss=0.1887, pruned_loss=0.02421, over 4899.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03165, over 972890.95 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 13:52:12,421 INFO [train.py:715] (7/8) Epoch 12, batch 27900, loss[loss=0.1682, simple_loss=0.2383, pruned_loss=0.04904, over 4986.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.032, over 973429.44 frames.], batch size: 28, lr: 1.79e-04 +2022-05-07 13:52:50,385 INFO [train.py:715] (7/8) Epoch 12, batch 27950, loss[loss=0.1266, simple_loss=0.2059, pruned_loss=0.02365, over 4734.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03184, over 972898.66 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 13:53:28,678 INFO [train.py:715] (7/8) Epoch 12, batch 28000, loss[loss=0.1382, simple_loss=0.216, pruned_loss=0.03019, over 4835.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03162, over 972474.94 frames.], batch size: 15, lr: 1.79e-04 +2022-05-07 13:54:06,331 INFO [train.py:715] (7/8) Epoch 12, batch 28050, loss[loss=0.169, simple_loss=0.2432, pruned_loss=0.04743, over 4777.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03129, over 972613.50 frames.], batch size: 17, lr: 1.79e-04 +2022-05-07 13:54:44,518 INFO [train.py:715] (7/8) Epoch 12, batch 28100, loss[loss=0.1591, simple_loss=0.2253, pruned_loss=0.04651, over 4948.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03169, over 972764.55 frames.], batch size: 35, lr: 1.79e-04 +2022-05-07 13:55:22,258 INFO [train.py:715] (7/8) Epoch 12, batch 28150, loss[loss=0.1189, simple_loss=0.196, pruned_loss=0.02085, over 4842.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2107, pruned_loss=0.03144, over 972488.09 frames.], batch size: 12, lr: 1.79e-04 +2022-05-07 13:56:00,658 INFO [train.py:715] (7/8) Epoch 12, batch 28200, loss[loss=0.1329, simple_loss=0.2066, pruned_loss=0.02963, over 4966.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03132, over 972114.98 frames.], batch size: 24, lr: 1.79e-04 +2022-05-07 13:56:39,078 INFO [train.py:715] (7/8) Epoch 12, batch 28250, loss[loss=0.1287, simple_loss=0.2031, pruned_loss=0.02718, over 4987.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.0318, over 973460.92 frames.], batch size: 14, lr: 1.79e-04 +2022-05-07 13:57:17,043 INFO [train.py:715] (7/8) Epoch 12, batch 28300, loss[loss=0.1189, simple_loss=0.1968, pruned_loss=0.02052, over 4787.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03178, over 972326.52 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 13:57:55,848 INFO [train.py:715] (7/8) Epoch 12, batch 28350, loss[loss=0.1353, simple_loss=0.2077, pruned_loss=0.03146, over 4905.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03185, over 972427.73 frames.], batch size: 39, lr: 1.79e-04 +2022-05-07 13:58:33,887 INFO [train.py:715] (7/8) Epoch 12, batch 28400, loss[loss=0.1187, simple_loss=0.1965, pruned_loss=0.02044, over 4983.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2105, pruned_loss=0.03191, over 973053.45 frames.], batch size: 28, lr: 1.79e-04 +2022-05-07 13:59:12,073 INFO [train.py:715] (7/8) Epoch 12, batch 28450, loss[loss=0.132, simple_loss=0.2079, pruned_loss=0.02807, over 4985.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03215, over 972448.86 frames.], batch size: 28, lr: 1.79e-04 +2022-05-07 13:59:49,932 INFO [train.py:715] (7/8) Epoch 12, batch 28500, loss[loss=0.1551, simple_loss=0.2248, pruned_loss=0.04264, over 4775.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03182, over 972915.24 frames.], batch size: 14, lr: 1.79e-04 +2022-05-07 14:00:27,901 INFO [train.py:715] (7/8) Epoch 12, batch 28550, loss[loss=0.1355, simple_loss=0.2068, pruned_loss=0.03206, over 4763.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03218, over 973323.46 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 14:01:06,319 INFO [train.py:715] (7/8) Epoch 12, batch 28600, loss[loss=0.1384, simple_loss=0.213, pruned_loss=0.03188, over 4905.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.032, over 973039.21 frames.], batch size: 19, lr: 1.79e-04 +2022-05-07 14:01:44,215 INFO [train.py:715] (7/8) Epoch 12, batch 28650, loss[loss=0.1486, simple_loss=0.2188, pruned_loss=0.03913, over 4878.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.0316, over 972812.87 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 14:02:23,314 INFO [train.py:715] (7/8) Epoch 12, batch 28700, loss[loss=0.1595, simple_loss=0.2367, pruned_loss=0.04116, over 4748.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2109, pruned_loss=0.03178, over 972319.45 frames.], batch size: 16, lr: 1.79e-04 +2022-05-07 14:03:01,818 INFO [train.py:715] (7/8) Epoch 12, batch 28750, loss[loss=0.1249, simple_loss=0.1992, pruned_loss=0.02523, over 4920.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03162, over 972608.00 frames.], batch size: 18, lr: 1.79e-04 +2022-05-07 14:03:40,786 INFO [train.py:715] (7/8) Epoch 12, batch 28800, loss[loss=0.1136, simple_loss=0.1805, pruned_loss=0.02337, over 4817.00 frames.], tot_loss[loss=0.1368, simple_loss=0.21, pruned_loss=0.03175, over 972924.08 frames.], batch size: 13, lr: 1.79e-04 +2022-05-07 14:04:18,675 INFO [train.py:715] (7/8) Epoch 12, batch 28850, loss[loss=0.1255, simple_loss=0.2105, pruned_loss=0.0203, over 4986.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03161, over 971780.86 frames.], batch size: 28, lr: 1.79e-04 +2022-05-07 14:04:57,032 INFO [train.py:715] (7/8) Epoch 12, batch 28900, loss[loss=0.1332, simple_loss=0.2034, pruned_loss=0.03149, over 4928.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03191, over 971549.43 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:05:35,791 INFO [train.py:715] (7/8) Epoch 12, batch 28950, loss[loss=0.1436, simple_loss=0.223, pruned_loss=0.03206, over 4983.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03164, over 972604.47 frames.], batch size: 28, lr: 1.78e-04 +2022-05-07 14:06:14,143 INFO [train.py:715] (7/8) Epoch 12, batch 29000, loss[loss=0.1461, simple_loss=0.2218, pruned_loss=0.03519, over 4788.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2097, pruned_loss=0.0315, over 972631.15 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 14:06:53,416 INFO [train.py:715] (7/8) Epoch 12, batch 29050, loss[loss=0.141, simple_loss=0.2076, pruned_loss=0.03718, over 4741.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03142, over 973255.23 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 14:07:31,882 INFO [train.py:715] (7/8) Epoch 12, batch 29100, loss[loss=0.1644, simple_loss=0.2336, pruned_loss=0.04759, over 4952.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03133, over 973058.84 frames.], batch size: 35, lr: 1.78e-04 +2022-05-07 14:08:10,553 INFO [train.py:715] (7/8) Epoch 12, batch 29150, loss[loss=0.164, simple_loss=0.2253, pruned_loss=0.05136, over 4955.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.0313, over 972573.72 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 14:08:48,980 INFO [train.py:715] (7/8) Epoch 12, batch 29200, loss[loss=0.1669, simple_loss=0.2283, pruned_loss=0.05276, over 4969.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03123, over 971799.40 frames.], batch size: 24, lr: 1.78e-04 +2022-05-07 14:09:27,674 INFO [train.py:715] (7/8) Epoch 12, batch 29250, loss[loss=0.1264, simple_loss=0.2093, pruned_loss=0.02179, over 4785.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03116, over 972233.21 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:10:05,810 INFO [train.py:715] (7/8) Epoch 12, batch 29300, loss[loss=0.1557, simple_loss=0.219, pruned_loss=0.04616, over 4800.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03119, over 972092.00 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 14:10:43,233 INFO [train.py:715] (7/8) Epoch 12, batch 29350, loss[loss=0.1343, simple_loss=0.1982, pruned_loss=0.03519, over 4807.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03112, over 972299.49 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 14:11:22,339 INFO [train.py:715] (7/8) Epoch 12, batch 29400, loss[loss=0.1208, simple_loss=0.1946, pruned_loss=0.02352, over 4836.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03128, over 972466.36 frames.], batch size: 13, lr: 1.78e-04 +2022-05-07 14:12:00,593 INFO [train.py:715] (7/8) Epoch 12, batch 29450, loss[loss=0.1365, simple_loss=0.2117, pruned_loss=0.03066, over 4943.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2085, pruned_loss=0.03094, over 971981.35 frames.], batch size: 29, lr: 1.78e-04 +2022-05-07 14:12:38,752 INFO [train.py:715] (7/8) Epoch 12, batch 29500, loss[loss=0.1442, simple_loss=0.2121, pruned_loss=0.03814, over 4736.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03133, over 971689.54 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 14:13:16,879 INFO [train.py:715] (7/8) Epoch 12, batch 29550, loss[loss=0.1354, simple_loss=0.1998, pruned_loss=0.03554, over 4774.00 frames.], tot_loss[loss=0.1366, simple_loss=0.21, pruned_loss=0.03163, over 972484.86 frames.], batch size: 12, lr: 1.78e-04 +2022-05-07 14:13:55,808 INFO [train.py:715] (7/8) Epoch 12, batch 29600, loss[loss=0.1472, simple_loss=0.2137, pruned_loss=0.04034, over 4772.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03179, over 972414.50 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:14:34,033 INFO [train.py:715] (7/8) Epoch 12, batch 29650, loss[loss=0.1305, simple_loss=0.2037, pruned_loss=0.02862, over 4985.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.03139, over 972611.14 frames.], batch size: 28, lr: 1.78e-04 +2022-05-07 14:15:11,743 INFO [train.py:715] (7/8) Epoch 12, batch 29700, loss[loss=0.1768, simple_loss=0.2532, pruned_loss=0.05016, over 4753.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2109, pruned_loss=0.03226, over 972006.14 frames.], batch size: 19, lr: 1.78e-04 +2022-05-07 14:15:51,273 INFO [train.py:715] (7/8) Epoch 12, batch 29750, loss[loss=0.1446, simple_loss=0.209, pruned_loss=0.04009, over 4859.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2105, pruned_loss=0.03224, over 973124.34 frames.], batch size: 32, lr: 1.78e-04 +2022-05-07 14:16:30,393 INFO [train.py:715] (7/8) Epoch 12, batch 29800, loss[loss=0.1422, simple_loss=0.2186, pruned_loss=0.03292, over 4827.00 frames.], tot_loss[loss=0.137, simple_loss=0.2101, pruned_loss=0.03197, over 973756.27 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:17:09,204 INFO [train.py:715] (7/8) Epoch 12, batch 29850, loss[loss=0.1868, simple_loss=0.2462, pruned_loss=0.06364, over 4953.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2102, pruned_loss=0.03199, over 973368.25 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 14:17:47,534 INFO [train.py:715] (7/8) Epoch 12, batch 29900, loss[loss=0.09282, simple_loss=0.1662, pruned_loss=0.009712, over 4821.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03127, over 972757.03 frames.], batch size: 12, lr: 1.78e-04 +2022-05-07 14:18:26,385 INFO [train.py:715] (7/8) Epoch 12, batch 29950, loss[loss=0.1511, simple_loss=0.2197, pruned_loss=0.04126, over 4766.00 frames.], tot_loss[loss=0.1367, simple_loss=0.21, pruned_loss=0.03174, over 972043.15 frames.], batch size: 19, lr: 1.78e-04 +2022-05-07 14:19:04,508 INFO [train.py:715] (7/8) Epoch 12, batch 30000, loss[loss=0.1387, simple_loss=0.1978, pruned_loss=0.03978, over 4954.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2097, pruned_loss=0.03153, over 972187.13 frames.], batch size: 35, lr: 1.78e-04 +2022-05-07 14:19:04,509 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 14:19:14,013 INFO [train.py:742] (7/8) Epoch 12, validation: loss=0.1054, simple_loss=0.1894, pruned_loss=0.01072, over 914524.00 frames. +2022-05-07 14:19:52,926 INFO [train.py:715] (7/8) Epoch 12, batch 30050, loss[loss=0.1242, simple_loss=0.21, pruned_loss=0.01923, over 4869.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2099, pruned_loss=0.03173, over 972828.89 frames.], batch size: 22, lr: 1.78e-04 +2022-05-07 14:20:31,330 INFO [train.py:715] (7/8) Epoch 12, batch 30100, loss[loss=0.1571, simple_loss=0.2227, pruned_loss=0.0457, over 4865.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2094, pruned_loss=0.03154, over 973368.01 frames.], batch size: 30, lr: 1.78e-04 +2022-05-07 14:21:10,496 INFO [train.py:715] (7/8) Epoch 12, batch 30150, loss[loss=0.1324, simple_loss=0.2086, pruned_loss=0.02812, over 4791.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03133, over 973620.08 frames.], batch size: 14, lr: 1.78e-04 +2022-05-07 14:21:48,960 INFO [train.py:715] (7/8) Epoch 12, batch 30200, loss[loss=0.1045, simple_loss=0.1717, pruned_loss=0.01862, over 4793.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2089, pruned_loss=0.03117, over 972930.76 frames.], batch size: 12, lr: 1.78e-04 +2022-05-07 14:22:28,440 INFO [train.py:715] (7/8) Epoch 12, batch 30250, loss[loss=0.114, simple_loss=0.1973, pruned_loss=0.01536, over 4820.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03102, over 972919.02 frames.], batch size: 26, lr: 1.78e-04 +2022-05-07 14:23:07,603 INFO [train.py:715] (7/8) Epoch 12, batch 30300, loss[loss=0.126, simple_loss=0.2009, pruned_loss=0.02555, over 4920.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03136, over 972936.18 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:23:45,572 INFO [train.py:715] (7/8) Epoch 12, batch 30350, loss[loss=0.1228, simple_loss=0.1985, pruned_loss=0.02352, over 4834.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03159, over 972930.52 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:24:23,562 INFO [train.py:715] (7/8) Epoch 12, batch 30400, loss[loss=0.1753, simple_loss=0.2504, pruned_loss=0.05013, over 4787.00 frames.], tot_loss[loss=0.1359, simple_loss=0.209, pruned_loss=0.03138, over 973051.98 frames.], batch size: 14, lr: 1.78e-04 +2022-05-07 14:25:01,297 INFO [train.py:715] (7/8) Epoch 12, batch 30450, loss[loss=0.142, simple_loss=0.2144, pruned_loss=0.03482, over 4874.00 frames.], tot_loss[loss=0.136, simple_loss=0.2093, pruned_loss=0.03137, over 973028.76 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 14:25:39,285 INFO [train.py:715] (7/8) Epoch 12, batch 30500, loss[loss=0.1496, simple_loss=0.2185, pruned_loss=0.04035, over 4904.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2097, pruned_loss=0.03188, over 973109.99 frames.], batch size: 19, lr: 1.78e-04 +2022-05-07 14:26:17,289 INFO [train.py:715] (7/8) Epoch 12, batch 30550, loss[loss=0.1575, simple_loss=0.2348, pruned_loss=0.04009, over 4790.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2098, pruned_loss=0.03194, over 972070.56 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:26:55,238 INFO [train.py:715] (7/8) Epoch 12, batch 30600, loss[loss=0.1294, simple_loss=0.2073, pruned_loss=0.02576, over 4945.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2098, pruned_loss=0.03167, over 972009.72 frames.], batch size: 29, lr: 1.78e-04 +2022-05-07 14:27:32,190 INFO [train.py:715] (7/8) Epoch 12, batch 30650, loss[loss=0.1284, simple_loss=0.2055, pruned_loss=0.02562, over 4799.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03165, over 972262.63 frames.], batch size: 14, lr: 1.78e-04 +2022-05-07 14:28:10,734 INFO [train.py:715] (7/8) Epoch 12, batch 30700, loss[loss=0.1133, simple_loss=0.1917, pruned_loss=0.01742, over 4974.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2099, pruned_loss=0.03167, over 971642.53 frames.], batch size: 24, lr: 1.78e-04 +2022-05-07 14:28:48,655 INFO [train.py:715] (7/8) Epoch 12, batch 30750, loss[loss=0.1214, simple_loss=0.1958, pruned_loss=0.02355, over 4876.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2093, pruned_loss=0.03121, over 972507.18 frames.], batch size: 22, lr: 1.78e-04 +2022-05-07 14:29:27,167 INFO [train.py:715] (7/8) Epoch 12, batch 30800, loss[loss=0.178, simple_loss=0.2499, pruned_loss=0.05308, over 4793.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2094, pruned_loss=0.0316, over 971392.27 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:30:05,816 INFO [train.py:715] (7/8) Epoch 12, batch 30850, loss[loss=0.1123, simple_loss=0.1883, pruned_loss=0.01813, over 4866.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2093, pruned_loss=0.03129, over 971315.04 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 14:30:45,021 INFO [train.py:715] (7/8) Epoch 12, batch 30900, loss[loss=0.128, simple_loss=0.1984, pruned_loss=0.02884, over 4644.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03085, over 971289.29 frames.], batch size: 13, lr: 1.78e-04 +2022-05-07 14:31:23,359 INFO [train.py:715] (7/8) Epoch 12, batch 30950, loss[loss=0.1221, simple_loss=0.2048, pruned_loss=0.01976, over 4857.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.03087, over 971714.19 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 14:32:02,065 INFO [train.py:715] (7/8) Epoch 12, batch 31000, loss[loss=0.1355, simple_loss=0.2161, pruned_loss=0.02742, over 4976.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03033, over 971159.49 frames.], batch size: 28, lr: 1.78e-04 +2022-05-07 14:32:41,214 INFO [train.py:715] (7/8) Epoch 12, batch 31050, loss[loss=0.136, simple_loss=0.2082, pruned_loss=0.0319, over 4892.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03029, over 972075.34 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 14:33:19,691 INFO [train.py:715] (7/8) Epoch 12, batch 31100, loss[loss=0.1459, simple_loss=0.2209, pruned_loss=0.03544, over 4928.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03058, over 972494.01 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:33:57,507 INFO [train.py:715] (7/8) Epoch 12, batch 31150, loss[loss=0.1374, simple_loss=0.2089, pruned_loss=0.03297, over 4757.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.03089, over 972637.28 frames.], batch size: 12, lr: 1.78e-04 +2022-05-07 14:34:36,504 INFO [train.py:715] (7/8) Epoch 12, batch 31200, loss[loss=0.1344, simple_loss=0.194, pruned_loss=0.03742, over 4776.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2093, pruned_loss=0.03121, over 972985.35 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:35:15,346 INFO [train.py:715] (7/8) Epoch 12, batch 31250, loss[loss=0.1431, simple_loss=0.2134, pruned_loss=0.03638, over 4782.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2093, pruned_loss=0.03142, over 971523.17 frames.], batch size: 14, lr: 1.78e-04 +2022-05-07 14:35:54,049 INFO [train.py:715] (7/8) Epoch 12, batch 31300, loss[loss=0.1471, simple_loss=0.2227, pruned_loss=0.03576, over 4755.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03147, over 972237.70 frames.], batch size: 19, lr: 1.78e-04 +2022-05-07 14:36:32,570 INFO [train.py:715] (7/8) Epoch 12, batch 31350, loss[loss=0.1609, simple_loss=0.2346, pruned_loss=0.04363, over 4951.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03149, over 972763.35 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 14:37:11,735 INFO [train.py:715] (7/8) Epoch 12, batch 31400, loss[loss=0.183, simple_loss=0.2377, pruned_loss=0.06414, over 4971.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03168, over 973343.05 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:37:50,139 INFO [train.py:715] (7/8) Epoch 12, batch 31450, loss[loss=0.1643, simple_loss=0.2213, pruned_loss=0.05367, over 4960.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2105, pruned_loss=0.03223, over 972299.54 frames.], batch size: 35, lr: 1.78e-04 +2022-05-07 14:38:28,380 INFO [train.py:715] (7/8) Epoch 12, batch 31500, loss[loss=0.1753, simple_loss=0.2418, pruned_loss=0.0544, over 4779.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2113, pruned_loss=0.0326, over 973443.83 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:39:06,657 INFO [train.py:715] (7/8) Epoch 12, batch 31550, loss[loss=0.1276, simple_loss=0.1995, pruned_loss=0.02788, over 4838.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2108, pruned_loss=0.03212, over 972440.87 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:39:45,222 INFO [train.py:715] (7/8) Epoch 12, batch 31600, loss[loss=0.1477, simple_loss=0.2062, pruned_loss=0.04456, over 4891.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2106, pruned_loss=0.03213, over 971924.24 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:40:22,890 INFO [train.py:715] (7/8) Epoch 12, batch 31650, loss[loss=0.1016, simple_loss=0.1676, pruned_loss=0.01776, over 4825.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2107, pruned_loss=0.03209, over 971833.71 frames.], batch size: 12, lr: 1.78e-04 +2022-05-07 14:41:00,519 INFO [train.py:715] (7/8) Epoch 12, batch 31700, loss[loss=0.1336, simple_loss=0.2097, pruned_loss=0.02878, over 4917.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03162, over 971456.53 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:41:38,634 INFO [train.py:715] (7/8) Epoch 12, batch 31750, loss[loss=0.152, simple_loss=0.23, pruned_loss=0.037, over 4884.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.03136, over 970913.24 frames.], batch size: 38, lr: 1.78e-04 +2022-05-07 14:42:16,747 INFO [train.py:715] (7/8) Epoch 12, batch 31800, loss[loss=0.125, simple_loss=0.2042, pruned_loss=0.0229, over 4763.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03169, over 970683.61 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 14:42:54,699 INFO [train.py:715] (7/8) Epoch 12, batch 31850, loss[loss=0.1513, simple_loss=0.2249, pruned_loss=0.03884, over 4783.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03149, over 971116.55 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:43:32,405 INFO [train.py:715] (7/8) Epoch 12, batch 31900, loss[loss=0.1421, simple_loss=0.2108, pruned_loss=0.03667, over 4841.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2102, pruned_loss=0.03202, over 971383.78 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:44:10,698 INFO [train.py:715] (7/8) Epoch 12, batch 31950, loss[loss=0.1365, simple_loss=0.2072, pruned_loss=0.03289, over 4960.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03146, over 972050.37 frames.], batch size: 39, lr: 1.78e-04 +2022-05-07 14:44:48,297 INFO [train.py:715] (7/8) Epoch 12, batch 32000, loss[loss=0.152, simple_loss=0.2266, pruned_loss=0.03865, over 4817.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03162, over 971673.82 frames.], batch size: 25, lr: 1.78e-04 +2022-05-07 14:45:26,162 INFO [train.py:715] (7/8) Epoch 12, batch 32050, loss[loss=0.1298, simple_loss=0.2006, pruned_loss=0.0295, over 4887.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03094, over 971513.19 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 14:46:04,020 INFO [train.py:715] (7/8) Epoch 12, batch 32100, loss[loss=0.1446, simple_loss=0.2113, pruned_loss=0.03891, over 4978.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03108, over 971238.52 frames.], batch size: 35, lr: 1.78e-04 +2022-05-07 14:46:42,428 INFO [train.py:715] (7/8) Epoch 12, batch 32150, loss[loss=0.1553, simple_loss=0.2159, pruned_loss=0.04734, over 4897.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03129, over 972248.82 frames.], batch size: 39, lr: 1.78e-04 +2022-05-07 14:47:20,024 INFO [train.py:715] (7/8) Epoch 12, batch 32200, loss[loss=0.1136, simple_loss=0.1876, pruned_loss=0.01978, over 4779.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03139, over 971679.50 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 14:47:58,175 INFO [train.py:715] (7/8) Epoch 12, batch 32250, loss[loss=0.106, simple_loss=0.1807, pruned_loss=0.01566, over 4861.00 frames.], tot_loss[loss=0.137, simple_loss=0.2107, pruned_loss=0.03163, over 971962.16 frames.], batch size: 32, lr: 1.78e-04 +2022-05-07 14:48:36,824 INFO [train.py:715] (7/8) Epoch 12, batch 32300, loss[loss=0.1177, simple_loss=0.1956, pruned_loss=0.01988, over 4804.00 frames.], tot_loss[loss=0.137, simple_loss=0.2107, pruned_loss=0.03168, over 971999.54 frames.], batch size: 25, lr: 1.78e-04 +2022-05-07 14:49:14,383 INFO [train.py:715] (7/8) Epoch 12, batch 32350, loss[loss=0.1636, simple_loss=0.2269, pruned_loss=0.05014, over 4974.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03167, over 972395.94 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:49:52,727 INFO [train.py:715] (7/8) Epoch 12, batch 32400, loss[loss=0.1434, simple_loss=0.2149, pruned_loss=0.03594, over 4903.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2112, pruned_loss=0.03222, over 972348.81 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 14:50:30,826 INFO [train.py:715] (7/8) Epoch 12, batch 32450, loss[loss=0.1531, simple_loss=0.223, pruned_loss=0.04166, over 4814.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2109, pruned_loss=0.03223, over 972891.41 frames.], batch size: 27, lr: 1.78e-04 +2022-05-07 14:51:09,333 INFO [train.py:715] (7/8) Epoch 12, batch 32500, loss[loss=0.1602, simple_loss=0.2327, pruned_loss=0.0439, over 4881.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2103, pruned_loss=0.03235, over 972442.60 frames.], batch size: 22, lr: 1.78e-04 +2022-05-07 14:51:46,829 INFO [train.py:715] (7/8) Epoch 12, batch 32550, loss[loss=0.1311, simple_loss=0.215, pruned_loss=0.02359, over 4886.00 frames.], tot_loss[loss=0.137, simple_loss=0.2101, pruned_loss=0.03196, over 972746.10 frames.], batch size: 19, lr: 1.78e-04 +2022-05-07 14:52:25,069 INFO [train.py:715] (7/8) Epoch 12, batch 32600, loss[loss=0.1323, simple_loss=0.2026, pruned_loss=0.03096, over 4970.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2097, pruned_loss=0.03173, over 972799.64 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:53:03,207 INFO [train.py:715] (7/8) Epoch 12, batch 32650, loss[loss=0.111, simple_loss=0.1909, pruned_loss=0.01559, over 4799.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2095, pruned_loss=0.03154, over 972321.64 frames.], batch size: 25, lr: 1.78e-04 +2022-05-07 14:53:40,737 INFO [train.py:715] (7/8) Epoch 12, batch 32700, loss[loss=0.1342, simple_loss=0.2055, pruned_loss=0.03148, over 4806.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.03142, over 972538.61 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 14:54:18,461 INFO [train.py:715] (7/8) Epoch 12, batch 32750, loss[loss=0.1686, simple_loss=0.2295, pruned_loss=0.05389, over 4978.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03178, over 972408.82 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:54:56,866 INFO [train.py:715] (7/8) Epoch 12, batch 32800, loss[loss=0.1171, simple_loss=0.1961, pruned_loss=0.01903, over 4971.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03114, over 971909.92 frames.], batch size: 14, lr: 1.78e-04 +2022-05-07 14:55:35,235 INFO [train.py:715] (7/8) Epoch 12, batch 32850, loss[loss=0.1144, simple_loss=0.1957, pruned_loss=0.01654, over 4884.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03117, over 971809.89 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 14:56:12,924 INFO [train.py:715] (7/8) Epoch 12, batch 32900, loss[loss=0.1436, simple_loss=0.2158, pruned_loss=0.03568, over 4766.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.03121, over 972874.97 frames.], batch size: 19, lr: 1.78e-04 +2022-05-07 14:56:51,022 INFO [train.py:715] (7/8) Epoch 12, batch 32950, loss[loss=0.175, simple_loss=0.2288, pruned_loss=0.06064, over 4961.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03115, over 972713.38 frames.], batch size: 14, lr: 1.78e-04 +2022-05-07 14:57:29,167 INFO [train.py:715] (7/8) Epoch 12, batch 33000, loss[loss=0.1356, simple_loss=0.2171, pruned_loss=0.02703, over 4694.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03153, over 972826.94 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 14:57:29,168 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 14:57:38,689 INFO [train.py:742] (7/8) Epoch 12, validation: loss=0.1057, simple_loss=0.1896, pruned_loss=0.01085, over 914524.00 frames. +2022-05-07 14:58:18,189 INFO [train.py:715] (7/8) Epoch 12, batch 33050, loss[loss=0.1225, simple_loss=0.1851, pruned_loss=0.02991, over 4801.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2097, pruned_loss=0.03178, over 972685.32 frames.], batch size: 13, lr: 1.78e-04 +2022-05-07 14:58:56,559 INFO [train.py:715] (7/8) Epoch 12, batch 33100, loss[loss=0.1172, simple_loss=0.1907, pruned_loss=0.02183, over 4987.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2109, pruned_loss=0.03211, over 973273.14 frames.], batch size: 14, lr: 1.78e-04 +2022-05-07 14:59:34,844 INFO [train.py:715] (7/8) Epoch 12, batch 33150, loss[loss=0.1183, simple_loss=0.1886, pruned_loss=0.024, over 4939.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03218, over 973752.77 frames.], batch size: 23, lr: 1.78e-04 +2022-05-07 15:00:12,868 INFO [train.py:715] (7/8) Epoch 12, batch 33200, loss[loss=0.1369, simple_loss=0.2189, pruned_loss=0.02742, over 4949.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2108, pruned_loss=0.03215, over 973359.56 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 15:00:51,450 INFO [train.py:715] (7/8) Epoch 12, batch 33250, loss[loss=0.1513, simple_loss=0.24, pruned_loss=0.03128, over 4784.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2115, pruned_loss=0.0325, over 972261.54 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 15:01:29,592 INFO [train.py:715] (7/8) Epoch 12, batch 33300, loss[loss=0.1376, simple_loss=0.2069, pruned_loss=0.03421, over 4822.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2113, pruned_loss=0.03244, over 972432.00 frames.], batch size: 15, lr: 1.78e-04 +2022-05-07 15:02:07,721 INFO [train.py:715] (7/8) Epoch 12, batch 33350, loss[loss=0.1255, simple_loss=0.2063, pruned_loss=0.02237, over 4875.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2106, pruned_loss=0.03245, over 972116.90 frames.], batch size: 20, lr: 1.78e-04 +2022-05-07 15:02:46,383 INFO [train.py:715] (7/8) Epoch 12, batch 33400, loss[loss=0.1179, simple_loss=0.1846, pruned_loss=0.02561, over 4780.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03197, over 971501.21 frames.], batch size: 17, lr: 1.78e-04 +2022-05-07 15:03:25,037 INFO [train.py:715] (7/8) Epoch 12, batch 33450, loss[loss=0.153, simple_loss=0.2282, pruned_loss=0.03891, over 4971.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03152, over 970778.58 frames.], batch size: 31, lr: 1.78e-04 +2022-05-07 15:04:03,393 INFO [train.py:715] (7/8) Epoch 12, batch 33500, loss[loss=0.139, simple_loss=0.2154, pruned_loss=0.03127, over 4793.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03146, over 970796.81 frames.], batch size: 14, lr: 1.78e-04 +2022-05-07 15:04:42,494 INFO [train.py:715] (7/8) Epoch 12, batch 33550, loss[loss=0.1377, simple_loss=0.2038, pruned_loss=0.0358, over 4798.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.0315, over 970589.11 frames.], batch size: 24, lr: 1.78e-04 +2022-05-07 15:05:21,128 INFO [train.py:715] (7/8) Epoch 12, batch 33600, loss[loss=0.132, simple_loss=0.2059, pruned_loss=0.02909, over 4770.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03147, over 970709.57 frames.], batch size: 16, lr: 1.78e-04 +2022-05-07 15:05:59,984 INFO [train.py:715] (7/8) Epoch 12, batch 33650, loss[loss=0.1636, simple_loss=0.2344, pruned_loss=0.04637, over 4918.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03153, over 971766.65 frames.], batch size: 18, lr: 1.78e-04 +2022-05-07 15:06:38,062 INFO [train.py:715] (7/8) Epoch 12, batch 33700, loss[loss=0.1336, simple_loss=0.203, pruned_loss=0.0321, over 4850.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03144, over 971103.93 frames.], batch size: 32, lr: 1.78e-04 +2022-05-07 15:07:16,845 INFO [train.py:715] (7/8) Epoch 12, batch 33750, loss[loss=0.1468, simple_loss=0.215, pruned_loss=0.03934, over 4983.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2107, pruned_loss=0.0322, over 971898.09 frames.], batch size: 25, lr: 1.78e-04 +2022-05-07 15:07:55,112 INFO [train.py:715] (7/8) Epoch 12, batch 33800, loss[loss=0.1463, simple_loss=0.2223, pruned_loss=0.03518, over 4952.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.03202, over 971932.49 frames.], batch size: 21, lr: 1.78e-04 +2022-05-07 15:08:32,476 INFO [train.py:715] (7/8) Epoch 12, batch 33850, loss[loss=0.1161, simple_loss=0.1959, pruned_loss=0.01813, over 4813.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.0317, over 971628.00 frames.], batch size: 26, lr: 1.78e-04 +2022-05-07 15:09:10,656 INFO [train.py:715] (7/8) Epoch 12, batch 33900, loss[loss=0.1312, simple_loss=0.2061, pruned_loss=0.02814, over 4914.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.0317, over 970853.61 frames.], batch size: 23, lr: 1.78e-04 +2022-05-07 15:09:47,909 INFO [train.py:715] (7/8) Epoch 12, batch 33950, loss[loss=0.1065, simple_loss=0.1881, pruned_loss=0.01243, over 4867.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03182, over 970912.40 frames.], batch size: 22, lr: 1.77e-04 +2022-05-07 15:10:26,070 INFO [train.py:715] (7/8) Epoch 12, batch 34000, loss[loss=0.1605, simple_loss=0.2273, pruned_loss=0.04684, over 4760.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03195, over 971592.36 frames.], batch size: 19, lr: 1.77e-04 +2022-05-07 15:11:03,703 INFO [train.py:715] (7/8) Epoch 12, batch 34050, loss[loss=0.1394, simple_loss=0.2098, pruned_loss=0.03455, over 4857.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.03166, over 971299.84 frames.], batch size: 20, lr: 1.77e-04 +2022-05-07 15:11:41,637 INFO [train.py:715] (7/8) Epoch 12, batch 34100, loss[loss=0.1451, simple_loss=0.2218, pruned_loss=0.03418, over 4828.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03189, over 971132.66 frames.], batch size: 15, lr: 1.77e-04 +2022-05-07 15:12:19,671 INFO [train.py:715] (7/8) Epoch 12, batch 34150, loss[loss=0.1488, simple_loss=0.2172, pruned_loss=0.04025, over 4888.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2108, pruned_loss=0.0315, over 971460.14 frames.], batch size: 16, lr: 1.77e-04 +2022-05-07 15:12:57,180 INFO [train.py:715] (7/8) Epoch 12, batch 34200, loss[loss=0.12, simple_loss=0.1882, pruned_loss=0.02586, over 4907.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03143, over 971500.71 frames.], batch size: 19, lr: 1.77e-04 +2022-05-07 15:13:35,448 INFO [train.py:715] (7/8) Epoch 12, batch 34250, loss[loss=0.1157, simple_loss=0.1885, pruned_loss=0.02142, over 4898.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03174, over 972189.94 frames.], batch size: 18, lr: 1.77e-04 +2022-05-07 15:14:12,817 INFO [train.py:715] (7/8) Epoch 12, batch 34300, loss[loss=0.1454, simple_loss=0.232, pruned_loss=0.02938, over 4664.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2098, pruned_loss=0.03165, over 972442.40 frames.], batch size: 13, lr: 1.77e-04 +2022-05-07 15:14:51,106 INFO [train.py:715] (7/8) Epoch 12, batch 34350, loss[loss=0.1289, simple_loss=0.213, pruned_loss=0.02237, over 4863.00 frames.], tot_loss[loss=0.136, simple_loss=0.2093, pruned_loss=0.03138, over 972694.24 frames.], batch size: 20, lr: 1.77e-04 +2022-05-07 15:15:28,882 INFO [train.py:715] (7/8) Epoch 12, batch 34400, loss[loss=0.1388, simple_loss=0.2117, pruned_loss=0.03295, over 4897.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2094, pruned_loss=0.03151, over 972843.58 frames.], batch size: 19, lr: 1.77e-04 +2022-05-07 15:16:07,251 INFO [train.py:715] (7/8) Epoch 12, batch 34450, loss[loss=0.1295, simple_loss=0.2002, pruned_loss=0.02938, over 4970.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03146, over 972490.72 frames.], batch size: 24, lr: 1.77e-04 +2022-05-07 15:16:45,350 INFO [train.py:715] (7/8) Epoch 12, batch 34500, loss[loss=0.1633, simple_loss=0.2484, pruned_loss=0.03909, over 4861.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03159, over 972311.20 frames.], batch size: 20, lr: 1.77e-04 +2022-05-07 15:17:23,595 INFO [train.py:715] (7/8) Epoch 12, batch 34550, loss[loss=0.1663, simple_loss=0.2338, pruned_loss=0.04942, over 4776.00 frames.], tot_loss[loss=0.137, simple_loss=0.2102, pruned_loss=0.03186, over 972357.50 frames.], batch size: 14, lr: 1.77e-04 +2022-05-07 15:18:02,261 INFO [train.py:715] (7/8) Epoch 12, batch 34600, loss[loss=0.1285, simple_loss=0.197, pruned_loss=0.02997, over 4824.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2094, pruned_loss=0.03146, over 971787.62 frames.], batch size: 15, lr: 1.77e-04 +2022-05-07 15:18:41,657 INFO [train.py:715] (7/8) Epoch 12, batch 34650, loss[loss=0.1196, simple_loss=0.2049, pruned_loss=0.01711, over 4982.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2092, pruned_loss=0.03146, over 972687.27 frames.], batch size: 25, lr: 1.77e-04 +2022-05-07 15:19:21,041 INFO [train.py:715] (7/8) Epoch 12, batch 34700, loss[loss=0.1491, simple_loss=0.2157, pruned_loss=0.04124, over 4794.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03177, over 972511.24 frames.], batch size: 24, lr: 1.77e-04 +2022-05-07 15:19:58,682 INFO [train.py:715] (7/8) Epoch 12, batch 34750, loss[loss=0.1217, simple_loss=0.1813, pruned_loss=0.03104, over 4754.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2094, pruned_loss=0.03157, over 972021.30 frames.], batch size: 12, lr: 1.77e-04 +2022-05-07 15:20:34,682 INFO [train.py:715] (7/8) Epoch 12, batch 34800, loss[loss=0.1349, simple_loss=0.2013, pruned_loss=0.03429, over 4916.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2091, pruned_loss=0.0313, over 971686.21 frames.], batch size: 18, lr: 1.77e-04 +2022-05-07 15:21:23,125 INFO [train.py:715] (7/8) Epoch 13, batch 0, loss[loss=0.1338, simple_loss=0.2099, pruned_loss=0.02883, over 4869.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2099, pruned_loss=0.02883, over 4869.00 frames.], batch size: 13, lr: 1.71e-04 +2022-05-07 15:22:01,153 INFO [train.py:715] (7/8) Epoch 13, batch 50, loss[loss=0.117, simple_loss=0.1911, pruned_loss=0.02143, over 4806.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2117, pruned_loss=0.03342, over 219750.28 frames.], batch size: 24, lr: 1.71e-04 +2022-05-07 15:22:39,462 INFO [train.py:715] (7/8) Epoch 13, batch 100, loss[loss=0.1343, simple_loss=0.211, pruned_loss=0.02883, over 4754.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2103, pruned_loss=0.03296, over 387700.18 frames.], batch size: 19, lr: 1.71e-04 +2022-05-07 15:23:17,856 INFO [train.py:715] (7/8) Epoch 13, batch 150, loss[loss=0.1804, simple_loss=0.249, pruned_loss=0.05589, over 4681.00 frames.], tot_loss[loss=0.1387, simple_loss=0.2111, pruned_loss=0.03317, over 516000.10 frames.], batch size: 15, lr: 1.71e-04 +2022-05-07 15:23:57,324 INFO [train.py:715] (7/8) Epoch 13, batch 200, loss[loss=0.1107, simple_loss=0.1849, pruned_loss=0.01826, over 4759.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2089, pruned_loss=0.03131, over 617866.32 frames.], batch size: 12, lr: 1.71e-04 +2022-05-07 15:24:35,736 INFO [train.py:715] (7/8) Epoch 13, batch 250, loss[loss=0.1262, simple_loss=0.1921, pruned_loss=0.03015, over 4915.00 frames.], tot_loss[loss=0.136, simple_loss=0.209, pruned_loss=0.03153, over 696984.04 frames.], batch size: 19, lr: 1.71e-04 +2022-05-07 15:25:15,230 INFO [train.py:715] (7/8) Epoch 13, batch 300, loss[loss=0.1358, simple_loss=0.2104, pruned_loss=0.03057, over 4846.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2093, pruned_loss=0.03161, over 757455.09 frames.], batch size: 30, lr: 1.71e-04 +2022-05-07 15:25:53,992 INFO [train.py:715] (7/8) Epoch 13, batch 350, loss[loss=0.1391, simple_loss=0.2171, pruned_loss=0.03055, over 4985.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2097, pruned_loss=0.03124, over 805398.81 frames.], batch size: 25, lr: 1.71e-04 +2022-05-07 15:26:33,532 INFO [train.py:715] (7/8) Epoch 13, batch 400, loss[loss=0.1612, simple_loss=0.2386, pruned_loss=0.04192, over 4953.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03139, over 842939.68 frames.], batch size: 35, lr: 1.71e-04 +2022-05-07 15:27:13,023 INFO [train.py:715] (7/8) Epoch 13, batch 450, loss[loss=0.1472, simple_loss=0.2148, pruned_loss=0.03976, over 4910.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03137, over 870764.91 frames.], batch size: 17, lr: 1.71e-04 +2022-05-07 15:27:53,172 INFO [train.py:715] (7/8) Epoch 13, batch 500, loss[loss=0.1305, simple_loss=0.2007, pruned_loss=0.03012, over 4980.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03119, over 894150.71 frames.], batch size: 25, lr: 1.71e-04 +2022-05-07 15:28:33,650 INFO [train.py:715] (7/8) Epoch 13, batch 550, loss[loss=0.1448, simple_loss=0.2139, pruned_loss=0.03782, over 4922.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03096, over 912496.39 frames.], batch size: 17, lr: 1.71e-04 +2022-05-07 15:29:12,911 INFO [train.py:715] (7/8) Epoch 13, batch 600, loss[loss=0.1197, simple_loss=0.1978, pruned_loss=0.02074, over 4988.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.0309, over 925702.36 frames.], batch size: 25, lr: 1.71e-04 +2022-05-07 15:29:53,388 INFO [train.py:715] (7/8) Epoch 13, batch 650, loss[loss=0.1227, simple_loss=0.2033, pruned_loss=0.02105, over 4945.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03079, over 936020.22 frames.], batch size: 39, lr: 1.71e-04 +2022-05-07 15:30:33,369 INFO [train.py:715] (7/8) Epoch 13, batch 700, loss[loss=0.1082, simple_loss=0.1784, pruned_loss=0.01902, over 4978.00 frames.], tot_loss[loss=0.135, simple_loss=0.2088, pruned_loss=0.03056, over 944359.28 frames.], batch size: 15, lr: 1.71e-04 +2022-05-07 15:31:13,979 INFO [train.py:715] (7/8) Epoch 13, batch 750, loss[loss=0.1277, simple_loss=0.1988, pruned_loss=0.02829, over 4636.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.0306, over 950542.99 frames.], batch size: 13, lr: 1.71e-04 +2022-05-07 15:31:53,295 INFO [train.py:715] (7/8) Epoch 13, batch 800, loss[loss=0.1293, simple_loss=0.2029, pruned_loss=0.02787, over 4982.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03062, over 955399.96 frames.], batch size: 25, lr: 1.71e-04 +2022-05-07 15:32:32,564 INFO [train.py:715] (7/8) Epoch 13, batch 850, loss[loss=0.1328, simple_loss=0.2017, pruned_loss=0.0319, over 4910.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03089, over 960237.76 frames.], batch size: 17, lr: 1.71e-04 +2022-05-07 15:33:12,802 INFO [train.py:715] (7/8) Epoch 13, batch 900, loss[loss=0.1469, simple_loss=0.2321, pruned_loss=0.03083, over 4865.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03029, over 962190.51 frames.], batch size: 20, lr: 1.71e-04 +2022-05-07 15:33:52,199 INFO [train.py:715] (7/8) Epoch 13, batch 950, loss[loss=0.1422, simple_loss=0.2215, pruned_loss=0.03144, over 4811.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.03059, over 963931.69 frames.], batch size: 25, lr: 1.71e-04 +2022-05-07 15:34:32,777 INFO [train.py:715] (7/8) Epoch 13, batch 1000, loss[loss=0.1365, simple_loss=0.2212, pruned_loss=0.02595, over 4927.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03085, over 966187.79 frames.], batch size: 23, lr: 1.71e-04 +2022-05-07 15:35:12,238 INFO [train.py:715] (7/8) Epoch 13, batch 1050, loss[loss=0.1102, simple_loss=0.1847, pruned_loss=0.01782, over 4789.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03069, over 966257.35 frames.], batch size: 12, lr: 1.71e-04 +2022-05-07 15:35:52,554 INFO [train.py:715] (7/8) Epoch 13, batch 1100, loss[loss=0.1274, simple_loss=0.2015, pruned_loss=0.02667, over 4856.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2086, pruned_loss=0.03088, over 967685.79 frames.], batch size: 20, lr: 1.71e-04 +2022-05-07 15:36:32,012 INFO [train.py:715] (7/8) Epoch 13, batch 1150, loss[loss=0.1422, simple_loss=0.2008, pruned_loss=0.04179, over 4825.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2081, pruned_loss=0.03065, over 969389.13 frames.], batch size: 30, lr: 1.71e-04 +2022-05-07 15:37:11,797 INFO [train.py:715] (7/8) Epoch 13, batch 1200, loss[loss=0.1604, simple_loss=0.2228, pruned_loss=0.049, over 4939.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2079, pruned_loss=0.03081, over 969563.19 frames.], batch size: 35, lr: 1.71e-04 +2022-05-07 15:37:52,137 INFO [train.py:715] (7/8) Epoch 13, batch 1250, loss[loss=0.1034, simple_loss=0.1788, pruned_loss=0.01406, over 4975.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2077, pruned_loss=0.03101, over 969646.97 frames.], batch size: 25, lr: 1.71e-04 +2022-05-07 15:38:31,099 INFO [train.py:715] (7/8) Epoch 13, batch 1300, loss[loss=0.1245, simple_loss=0.1952, pruned_loss=0.02694, over 4814.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2081, pruned_loss=0.03066, over 969953.82 frames.], batch size: 26, lr: 1.71e-04 +2022-05-07 15:39:11,008 INFO [train.py:715] (7/8) Epoch 13, batch 1350, loss[loss=0.1298, simple_loss=0.202, pruned_loss=0.02885, over 4697.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2084, pruned_loss=0.0309, over 970970.35 frames.], batch size: 15, lr: 1.71e-04 +2022-05-07 15:39:49,774 INFO [train.py:715] (7/8) Epoch 13, batch 1400, loss[loss=0.1487, simple_loss=0.222, pruned_loss=0.03769, over 4750.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03037, over 971999.91 frames.], batch size: 16, lr: 1.71e-04 +2022-05-07 15:40:28,861 INFO [train.py:715] (7/8) Epoch 13, batch 1450, loss[loss=0.145, simple_loss=0.2118, pruned_loss=0.03913, over 4818.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03072, over 972476.46 frames.], batch size: 25, lr: 1.71e-04 +2022-05-07 15:41:06,534 INFO [train.py:715] (7/8) Epoch 13, batch 1500, loss[loss=0.1584, simple_loss=0.2388, pruned_loss=0.03896, over 4856.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03087, over 973161.30 frames.], batch size: 13, lr: 1.71e-04 +2022-05-07 15:41:44,152 INFO [train.py:715] (7/8) Epoch 13, batch 1550, loss[loss=0.1556, simple_loss=0.2181, pruned_loss=0.04658, over 4860.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.03126, over 973413.39 frames.], batch size: 32, lr: 1.71e-04 +2022-05-07 15:42:22,722 INFO [train.py:715] (7/8) Epoch 13, batch 1600, loss[loss=0.1237, simple_loss=0.2059, pruned_loss=0.0208, over 4937.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2103, pruned_loss=0.03119, over 973221.61 frames.], batch size: 29, lr: 1.71e-04 +2022-05-07 15:43:00,639 INFO [train.py:715] (7/8) Epoch 13, batch 1650, loss[loss=0.1401, simple_loss=0.2195, pruned_loss=0.03035, over 4754.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2106, pruned_loss=0.03129, over 972447.08 frames.], batch size: 16, lr: 1.71e-04 +2022-05-07 15:43:39,378 INFO [train.py:715] (7/8) Epoch 13, batch 1700, loss[loss=0.1398, simple_loss=0.2141, pruned_loss=0.03276, over 4817.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2104, pruned_loss=0.03112, over 972517.29 frames.], batch size: 26, lr: 1.71e-04 +2022-05-07 15:44:17,661 INFO [train.py:715] (7/8) Epoch 13, batch 1750, loss[loss=0.1112, simple_loss=0.1915, pruned_loss=0.0155, over 4915.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03131, over 972553.60 frames.], batch size: 23, lr: 1.71e-04 +2022-05-07 15:44:57,089 INFO [train.py:715] (7/8) Epoch 13, batch 1800, loss[loss=0.1506, simple_loss=0.2327, pruned_loss=0.03429, over 4828.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03158, over 972261.86 frames.], batch size: 13, lr: 1.71e-04 +2022-05-07 15:45:35,169 INFO [train.py:715] (7/8) Epoch 13, batch 1850, loss[loss=0.1232, simple_loss=0.1896, pruned_loss=0.02839, over 4952.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03117, over 971804.34 frames.], batch size: 21, lr: 1.71e-04 +2022-05-07 15:46:13,429 INFO [train.py:715] (7/8) Epoch 13, batch 1900, loss[loss=0.1251, simple_loss=0.207, pruned_loss=0.02159, over 4940.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2097, pruned_loss=0.0313, over 972451.25 frames.], batch size: 29, lr: 1.71e-04 +2022-05-07 15:46:52,088 INFO [train.py:715] (7/8) Epoch 13, batch 1950, loss[loss=0.1273, simple_loss=0.1989, pruned_loss=0.02786, over 4914.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03139, over 973062.11 frames.], batch size: 23, lr: 1.71e-04 +2022-05-07 15:47:30,464 INFO [train.py:715] (7/8) Epoch 13, batch 2000, loss[loss=0.1466, simple_loss=0.2167, pruned_loss=0.03828, over 4978.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.03189, over 973025.55 frames.], batch size: 35, lr: 1.71e-04 +2022-05-07 15:48:09,017 INFO [train.py:715] (7/8) Epoch 13, batch 2050, loss[loss=0.1393, simple_loss=0.2037, pruned_loss=0.03746, over 4809.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2097, pruned_loss=0.03183, over 972996.63 frames.], batch size: 13, lr: 1.71e-04 +2022-05-07 15:48:47,022 INFO [train.py:715] (7/8) Epoch 13, batch 2100, loss[loss=0.1297, simple_loss=0.211, pruned_loss=0.02422, over 4882.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2097, pruned_loss=0.03181, over 972707.85 frames.], batch size: 22, lr: 1.71e-04 +2022-05-07 15:49:26,187 INFO [train.py:715] (7/8) Epoch 13, batch 2150, loss[loss=0.1184, simple_loss=0.1893, pruned_loss=0.02378, over 4844.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2097, pruned_loss=0.03164, over 972495.35 frames.], batch size: 34, lr: 1.71e-04 +2022-05-07 15:50:04,031 INFO [train.py:715] (7/8) Epoch 13, batch 2200, loss[loss=0.1433, simple_loss=0.2162, pruned_loss=0.0352, over 4738.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03157, over 972432.82 frames.], batch size: 16, lr: 1.71e-04 +2022-05-07 15:50:42,240 INFO [train.py:715] (7/8) Epoch 13, batch 2250, loss[loss=0.1393, simple_loss=0.2232, pruned_loss=0.02773, over 4953.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2103, pruned_loss=0.03198, over 972515.84 frames.], batch size: 24, lr: 1.71e-04 +2022-05-07 15:51:20,492 INFO [train.py:715] (7/8) Epoch 13, batch 2300, loss[loss=0.1631, simple_loss=0.2305, pruned_loss=0.04789, over 4944.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2093, pruned_loss=0.0311, over 972806.12 frames.], batch size: 29, lr: 1.71e-04 +2022-05-07 15:51:59,645 INFO [train.py:715] (7/8) Epoch 13, batch 2350, loss[loss=0.1263, simple_loss=0.2011, pruned_loss=0.02573, over 4804.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03098, over 972723.98 frames.], batch size: 21, lr: 1.71e-04 +2022-05-07 15:52:38,009 INFO [train.py:715] (7/8) Epoch 13, batch 2400, loss[loss=0.1297, simple_loss=0.199, pruned_loss=0.03015, over 4818.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03097, over 972631.84 frames.], batch size: 15, lr: 1.71e-04 +2022-05-07 15:53:16,744 INFO [train.py:715] (7/8) Epoch 13, batch 2450, loss[loss=0.1265, simple_loss=0.2013, pruned_loss=0.02587, over 4988.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03093, over 972346.62 frames.], batch size: 28, lr: 1.71e-04 +2022-05-07 15:53:55,651 INFO [train.py:715] (7/8) Epoch 13, batch 2500, loss[loss=0.1466, simple_loss=0.2264, pruned_loss=0.03339, over 4934.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03075, over 972218.21 frames.], batch size: 21, lr: 1.71e-04 +2022-05-07 15:54:34,062 INFO [train.py:715] (7/8) Epoch 13, batch 2550, loss[loss=0.1341, simple_loss=0.2114, pruned_loss=0.02841, over 4778.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03156, over 972241.73 frames.], batch size: 18, lr: 1.71e-04 +2022-05-07 15:55:12,160 INFO [train.py:715] (7/8) Epoch 13, batch 2600, loss[loss=0.1295, simple_loss=0.2115, pruned_loss=0.02381, over 4968.00 frames.], tot_loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.03163, over 972544.07 frames.], batch size: 24, lr: 1.71e-04 +2022-05-07 15:55:50,583 INFO [train.py:715] (7/8) Epoch 13, batch 2650, loss[loss=0.1286, simple_loss=0.2116, pruned_loss=0.02277, over 4952.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03151, over 972486.03 frames.], batch size: 24, lr: 1.71e-04 +2022-05-07 15:56:28,665 INFO [train.py:715] (7/8) Epoch 13, batch 2700, loss[loss=0.1479, simple_loss=0.2259, pruned_loss=0.03493, over 4886.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2099, pruned_loss=0.03113, over 972266.81 frames.], batch size: 39, lr: 1.70e-04 +2022-05-07 15:57:06,439 INFO [train.py:715] (7/8) Epoch 13, batch 2750, loss[loss=0.1201, simple_loss=0.1884, pruned_loss=0.0259, over 4937.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2109, pruned_loss=0.03168, over 972171.25 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 15:57:43,968 INFO [train.py:715] (7/8) Epoch 13, batch 2800, loss[loss=0.1375, simple_loss=0.2132, pruned_loss=0.03093, over 4825.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.03129, over 972378.28 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 15:58:22,562 INFO [train.py:715] (7/8) Epoch 13, batch 2850, loss[loss=0.1335, simple_loss=0.2012, pruned_loss=0.03292, over 4915.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.0312, over 972664.86 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 15:59:00,069 INFO [train.py:715] (7/8) Epoch 13, batch 2900, loss[loss=0.1637, simple_loss=0.2449, pruned_loss=0.04126, over 4771.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2085, pruned_loss=0.03091, over 971677.76 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 15:59:37,966 INFO [train.py:715] (7/8) Epoch 13, batch 2950, loss[loss=0.1383, simple_loss=0.2052, pruned_loss=0.03572, over 4738.00 frames.], tot_loss[loss=0.1358, simple_loss=0.209, pruned_loss=0.0313, over 971234.94 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:00:15,988 INFO [train.py:715] (7/8) Epoch 13, batch 3000, loss[loss=0.1354, simple_loss=0.2043, pruned_loss=0.03327, over 4917.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03135, over 971458.84 frames.], batch size: 29, lr: 1.70e-04 +2022-05-07 16:00:15,989 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 16:00:25,446 INFO [train.py:742] (7/8) Epoch 13, validation: loss=0.1052, simple_loss=0.1893, pruned_loss=0.01058, over 914524.00 frames. +2022-05-07 16:01:03,672 INFO [train.py:715] (7/8) Epoch 13, batch 3050, loss[loss=0.1578, simple_loss=0.229, pruned_loss=0.04334, over 4936.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.03164, over 971818.61 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:01:42,202 INFO [train.py:715] (7/8) Epoch 13, batch 3100, loss[loss=0.1804, simple_loss=0.2533, pruned_loss=0.05374, over 4846.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03175, over 972412.97 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:02:19,746 INFO [train.py:715] (7/8) Epoch 13, batch 3150, loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03115, over 4864.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03165, over 972466.33 frames.], batch size: 32, lr: 1.70e-04 +2022-05-07 16:02:57,074 INFO [train.py:715] (7/8) Epoch 13, batch 3200, loss[loss=0.1402, simple_loss=0.2079, pruned_loss=0.03622, over 4855.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2107, pruned_loss=0.03193, over 971511.83 frames.], batch size: 32, lr: 1.70e-04 +2022-05-07 16:03:35,543 INFO [train.py:715] (7/8) Epoch 13, batch 3250, loss[loss=0.1309, simple_loss=0.2094, pruned_loss=0.02624, over 4900.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03169, over 971431.32 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:04:13,564 INFO [train.py:715] (7/8) Epoch 13, batch 3300, loss[loss=0.1346, simple_loss=0.213, pruned_loss=0.02807, over 4992.00 frames.], tot_loss[loss=0.137, simple_loss=0.2109, pruned_loss=0.03154, over 971975.12 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 16:04:51,387 INFO [train.py:715] (7/8) Epoch 13, batch 3350, loss[loss=0.126, simple_loss=0.1975, pruned_loss=0.02725, over 4971.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2107, pruned_loss=0.03138, over 972743.70 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:05:29,077 INFO [train.py:715] (7/8) Epoch 13, batch 3400, loss[loss=0.1519, simple_loss=0.2183, pruned_loss=0.04274, over 4875.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.03125, over 972262.68 frames.], batch size: 39, lr: 1.70e-04 +2022-05-07 16:06:07,371 INFO [train.py:715] (7/8) Epoch 13, batch 3450, loss[loss=0.1276, simple_loss=0.2059, pruned_loss=0.02465, over 4935.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03072, over 972675.60 frames.], batch size: 29, lr: 1.70e-04 +2022-05-07 16:06:47,671 INFO [train.py:715] (7/8) Epoch 13, batch 3500, loss[loss=0.1524, simple_loss=0.2207, pruned_loss=0.04211, over 4984.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03089, over 972114.66 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:07:25,031 INFO [train.py:715] (7/8) Epoch 13, batch 3550, loss[loss=0.1374, simple_loss=0.2105, pruned_loss=0.03219, over 4772.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.0311, over 972427.58 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:08:03,490 INFO [train.py:715] (7/8) Epoch 13, batch 3600, loss[loss=0.1568, simple_loss=0.2401, pruned_loss=0.03682, over 4793.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03051, over 972835.30 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:08:41,278 INFO [train.py:715] (7/8) Epoch 13, batch 3650, loss[loss=0.1425, simple_loss=0.2147, pruned_loss=0.03518, over 4975.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03072, over 972117.32 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 16:09:18,851 INFO [train.py:715] (7/8) Epoch 13, batch 3700, loss[loss=0.1138, simple_loss=0.1841, pruned_loss=0.02175, over 4952.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03023, over 972946.53 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:09:56,570 INFO [train.py:715] (7/8) Epoch 13, batch 3750, loss[loss=0.1176, simple_loss=0.1984, pruned_loss=0.01846, over 4775.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.0305, over 973225.13 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:10:34,803 INFO [train.py:715] (7/8) Epoch 13, batch 3800, loss[loss=0.1247, simple_loss=0.205, pruned_loss=0.0222, over 4886.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03025, over 973213.52 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:11:11,946 INFO [train.py:715] (7/8) Epoch 13, batch 3850, loss[loss=0.1188, simple_loss=0.1971, pruned_loss=0.02022, over 4846.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03049, over 972891.53 frames.], batch size: 26, lr: 1.70e-04 +2022-05-07 16:11:49,243 INFO [train.py:715] (7/8) Epoch 13, batch 3900, loss[loss=0.1386, simple_loss=0.2325, pruned_loss=0.02237, over 4923.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.0308, over 972823.48 frames.], batch size: 29, lr: 1.70e-04 +2022-05-07 16:12:27,131 INFO [train.py:715] (7/8) Epoch 13, batch 3950, loss[loss=0.1283, simple_loss=0.203, pruned_loss=0.02677, over 4912.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.0315, over 973603.41 frames.], batch size: 29, lr: 1.70e-04 +2022-05-07 16:13:05,300 INFO [train.py:715] (7/8) Epoch 13, batch 4000, loss[loss=0.1373, simple_loss=0.2116, pruned_loss=0.03153, over 4830.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.0315, over 973291.31 frames.], batch size: 26, lr: 1.70e-04 +2022-05-07 16:13:42,997 INFO [train.py:715] (7/8) Epoch 13, batch 4050, loss[loss=0.1487, simple_loss=0.2331, pruned_loss=0.0321, over 4964.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.03109, over 972765.07 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:14:20,644 INFO [train.py:715] (7/8) Epoch 13, batch 4100, loss[loss=0.1578, simple_loss=0.2343, pruned_loss=0.04068, over 4887.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.03082, over 971929.11 frames.], batch size: 22, lr: 1.70e-04 +2022-05-07 16:14:59,184 INFO [train.py:715] (7/8) Epoch 13, batch 4150, loss[loss=0.1253, simple_loss=0.1885, pruned_loss=0.03105, over 4985.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2091, pruned_loss=0.0314, over 972063.28 frames.], batch size: 31, lr: 1.70e-04 +2022-05-07 16:15:36,528 INFO [train.py:715] (7/8) Epoch 13, batch 4200, loss[loss=0.1194, simple_loss=0.1981, pruned_loss=0.02036, over 4942.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2083, pruned_loss=0.03093, over 972634.59 frames.], batch size: 35, lr: 1.70e-04 +2022-05-07 16:16:14,501 INFO [train.py:715] (7/8) Epoch 13, batch 4250, loss[loss=0.09793, simple_loss=0.1659, pruned_loss=0.015, over 4994.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2089, pruned_loss=0.03163, over 972298.10 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:16:52,599 INFO [train.py:715] (7/8) Epoch 13, batch 4300, loss[loss=0.1276, simple_loss=0.1969, pruned_loss=0.02918, over 4826.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2092, pruned_loss=0.03191, over 972357.57 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:17:30,605 INFO [train.py:715] (7/8) Epoch 13, batch 4350, loss[loss=0.1176, simple_loss=0.1972, pruned_loss=0.01896, over 4989.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.03115, over 973048.86 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:18:08,276 INFO [train.py:715] (7/8) Epoch 13, batch 4400, loss[loss=0.1614, simple_loss=0.2325, pruned_loss=0.04516, over 4832.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2083, pruned_loss=0.03057, over 972494.60 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:18:46,446 INFO [train.py:715] (7/8) Epoch 13, batch 4450, loss[loss=0.1395, simple_loss=0.2121, pruned_loss=0.03344, over 4746.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2097, pruned_loss=0.0311, over 972784.96 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:19:25,667 INFO [train.py:715] (7/8) Epoch 13, batch 4500, loss[loss=0.126, simple_loss=0.2043, pruned_loss=0.02381, over 4979.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03094, over 971999.69 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:20:03,829 INFO [train.py:715] (7/8) Epoch 13, batch 4550, loss[loss=0.1432, simple_loss=0.2116, pruned_loss=0.03739, over 4709.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2105, pruned_loss=0.03115, over 971618.47 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:20:40,814 INFO [train.py:715] (7/8) Epoch 13, batch 4600, loss[loss=0.1309, simple_loss=0.2062, pruned_loss=0.02782, over 4819.00 frames.], tot_loss[loss=0.137, simple_loss=0.2112, pruned_loss=0.03139, over 971602.32 frames.], batch size: 27, lr: 1.70e-04 +2022-05-07 16:21:19,532 INFO [train.py:715] (7/8) Epoch 13, batch 4650, loss[loss=0.1449, simple_loss=0.2121, pruned_loss=0.03886, over 4903.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2122, pruned_loss=0.03195, over 971985.45 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:21:57,440 INFO [train.py:715] (7/8) Epoch 13, batch 4700, loss[loss=0.1144, simple_loss=0.1917, pruned_loss=0.0186, over 4982.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2114, pruned_loss=0.03166, over 971770.92 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 16:22:35,611 INFO [train.py:715] (7/8) Epoch 13, batch 4750, loss[loss=0.1441, simple_loss=0.2244, pruned_loss=0.03192, over 4961.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2108, pruned_loss=0.03132, over 973109.11 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:23:13,891 INFO [train.py:715] (7/8) Epoch 13, batch 4800, loss[loss=0.1691, simple_loss=0.2524, pruned_loss=0.04292, over 4971.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2116, pruned_loss=0.03153, over 973223.11 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:23:53,166 INFO [train.py:715] (7/8) Epoch 13, batch 4850, loss[loss=0.1994, simple_loss=0.2667, pruned_loss=0.06606, over 4960.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2109, pruned_loss=0.03107, over 973036.81 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:24:31,290 INFO [train.py:715] (7/8) Epoch 13, batch 4900, loss[loss=0.16, simple_loss=0.2256, pruned_loss=0.04722, over 4880.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2107, pruned_loss=0.03104, over 972908.74 frames.], batch size: 38, lr: 1.70e-04 +2022-05-07 16:25:10,148 INFO [train.py:715] (7/8) Epoch 13, batch 4950, loss[loss=0.1315, simple_loss=0.1957, pruned_loss=0.03368, over 4864.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2105, pruned_loss=0.03104, over 973757.08 frames.], batch size: 32, lr: 1.70e-04 +2022-05-07 16:25:49,563 INFO [train.py:715] (7/8) Epoch 13, batch 5000, loss[loss=0.1197, simple_loss=0.1967, pruned_loss=0.02135, over 4948.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2091, pruned_loss=0.03026, over 972907.45 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:26:28,896 INFO [train.py:715] (7/8) Epoch 13, batch 5050, loss[loss=0.1539, simple_loss=0.2193, pruned_loss=0.04421, over 4745.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03075, over 972624.97 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:27:07,534 INFO [train.py:715] (7/8) Epoch 13, batch 5100, loss[loss=0.1289, simple_loss=0.1975, pruned_loss=0.0302, over 4964.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03087, over 971811.25 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:27:46,963 INFO [train.py:715] (7/8) Epoch 13, batch 5150, loss[loss=0.1338, simple_loss=0.2119, pruned_loss=0.02781, over 4807.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03089, over 973069.87 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 16:28:26,706 INFO [train.py:715] (7/8) Epoch 13, batch 5200, loss[loss=0.1289, simple_loss=0.2021, pruned_loss=0.02791, over 4987.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2097, pruned_loss=0.0308, over 972656.10 frames.], batch size: 28, lr: 1.70e-04 +2022-05-07 16:29:06,550 INFO [train.py:715] (7/8) Epoch 13, batch 5250, loss[loss=0.1419, simple_loss=0.2132, pruned_loss=0.03535, over 4778.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03098, over 972641.08 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:29:45,231 INFO [train.py:715] (7/8) Epoch 13, batch 5300, loss[loss=0.126, simple_loss=0.2052, pruned_loss=0.02341, over 4890.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03014, over 972122.10 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:30:25,371 INFO [train.py:715] (7/8) Epoch 13, batch 5350, loss[loss=0.1424, simple_loss=0.199, pruned_loss=0.04292, over 4767.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2088, pruned_loss=0.0301, over 971707.36 frames.], batch size: 12, lr: 1.70e-04 +2022-05-07 16:31:05,471 INFO [train.py:715] (7/8) Epoch 13, batch 5400, loss[loss=0.1414, simple_loss=0.2126, pruned_loss=0.03515, over 4929.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2096, pruned_loss=0.03049, over 972753.56 frames.], batch size: 23, lr: 1.70e-04 +2022-05-07 16:31:45,406 INFO [train.py:715] (7/8) Epoch 13, batch 5450, loss[loss=0.1018, simple_loss=0.1703, pruned_loss=0.01663, over 4965.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2105, pruned_loss=0.03124, over 972566.82 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:32:24,987 INFO [train.py:715] (7/8) Epoch 13, batch 5500, loss[loss=0.1283, simple_loss=0.2084, pruned_loss=0.02414, over 4756.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2106, pruned_loss=0.03127, over 972181.48 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:33:04,814 INFO [train.py:715] (7/8) Epoch 13, batch 5550, loss[loss=0.1312, simple_loss=0.2064, pruned_loss=0.02805, over 4844.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2111, pruned_loss=0.03135, over 972105.68 frames.], batch size: 20, lr: 1.70e-04 +2022-05-07 16:33:44,065 INFO [train.py:715] (7/8) Epoch 13, batch 5600, loss[loss=0.1588, simple_loss=0.2303, pruned_loss=0.04359, over 4860.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2115, pruned_loss=0.03176, over 972768.87 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:34:23,512 INFO [train.py:715] (7/8) Epoch 13, batch 5650, loss[loss=0.1419, simple_loss=0.2209, pruned_loss=0.03149, over 4785.00 frames.], tot_loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.03226, over 972403.57 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:35:03,784 INFO [train.py:715] (7/8) Epoch 13, batch 5700, loss[loss=0.1192, simple_loss=0.1897, pruned_loss=0.02432, over 4798.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2104, pruned_loss=0.03151, over 972145.17 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:35:43,897 INFO [train.py:715] (7/8) Epoch 13, batch 5750, loss[loss=0.1412, simple_loss=0.2085, pruned_loss=0.03691, over 4829.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03152, over 971951.07 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 16:36:22,750 INFO [train.py:715] (7/8) Epoch 13, batch 5800, loss[loss=0.1257, simple_loss=0.1941, pruned_loss=0.02864, over 4984.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03141, over 971741.96 frames.], batch size: 28, lr: 1.70e-04 +2022-05-07 16:37:02,234 INFO [train.py:715] (7/8) Epoch 13, batch 5850, loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02966, over 4973.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03137, over 972746.67 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:37:42,376 INFO [train.py:715] (7/8) Epoch 13, batch 5900, loss[loss=0.139, simple_loss=0.2209, pruned_loss=0.02848, over 4860.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.0317, over 972991.04 frames.], batch size: 20, lr: 1.70e-04 +2022-05-07 16:38:21,734 INFO [train.py:715] (7/8) Epoch 13, batch 5950, loss[loss=0.1299, simple_loss=0.2004, pruned_loss=0.02968, over 4801.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2108, pruned_loss=0.03219, over 973417.21 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 16:39:01,213 INFO [train.py:715] (7/8) Epoch 13, batch 6000, loss[loss=0.155, simple_loss=0.2243, pruned_loss=0.04285, over 4933.00 frames.], tot_loss[loss=0.1367, simple_loss=0.21, pruned_loss=0.03176, over 973136.97 frames.], batch size: 35, lr: 1.70e-04 +2022-05-07 16:39:01,214 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 16:39:10,779 INFO [train.py:742] (7/8) Epoch 13, validation: loss=0.1054, simple_loss=0.1893, pruned_loss=0.01078, over 914524.00 frames. +2022-05-07 16:39:50,262 INFO [train.py:715] (7/8) Epoch 13, batch 6050, loss[loss=0.1218, simple_loss=0.1942, pruned_loss=0.02464, over 4942.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2093, pruned_loss=0.03125, over 972986.09 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 16:40:29,776 INFO [train.py:715] (7/8) Epoch 13, batch 6100, loss[loss=0.138, simple_loss=0.2222, pruned_loss=0.02686, over 4974.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03143, over 973956.56 frames.], batch size: 24, lr: 1.70e-04 +2022-05-07 16:41:09,342 INFO [train.py:715] (7/8) Epoch 13, batch 6150, loss[loss=0.1589, simple_loss=0.2304, pruned_loss=0.0437, over 4818.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03171, over 972852.48 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:41:47,238 INFO [train.py:715] (7/8) Epoch 13, batch 6200, loss[loss=0.1106, simple_loss=0.1835, pruned_loss=0.0189, over 4916.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03117, over 972872.95 frames.], batch size: 23, lr: 1.70e-04 +2022-05-07 16:42:26,290 INFO [train.py:715] (7/8) Epoch 13, batch 6250, loss[loss=0.1306, simple_loss=0.2042, pruned_loss=0.02845, over 4955.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03082, over 972977.60 frames.], batch size: 28, lr: 1.70e-04 +2022-05-07 16:43:05,823 INFO [train.py:715] (7/8) Epoch 13, batch 6300, loss[loss=0.1238, simple_loss=0.2021, pruned_loss=0.02277, over 4815.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03106, over 972352.85 frames.], batch size: 26, lr: 1.70e-04 +2022-05-07 16:43:44,411 INFO [train.py:715] (7/8) Epoch 13, batch 6350, loss[loss=0.1798, simple_loss=0.2361, pruned_loss=0.06174, over 4981.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2097, pruned_loss=0.03175, over 973270.36 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:44:24,227 INFO [train.py:715] (7/8) Epoch 13, batch 6400, loss[loss=0.1233, simple_loss=0.2022, pruned_loss=0.02218, over 4830.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03132, over 972120.01 frames.], batch size: 26, lr: 1.70e-04 +2022-05-07 16:45:04,048 INFO [train.py:715] (7/8) Epoch 13, batch 6450, loss[loss=0.1317, simple_loss=0.2107, pruned_loss=0.0263, over 4883.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03086, over 972047.82 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:45:44,139 INFO [train.py:715] (7/8) Epoch 13, batch 6500, loss[loss=0.1666, simple_loss=0.2238, pruned_loss=0.05473, over 4690.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03094, over 972094.53 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:46:23,312 INFO [train.py:715] (7/8) Epoch 13, batch 6550, loss[loss=0.1447, simple_loss=0.2236, pruned_loss=0.03291, over 4758.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03111, over 971835.22 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:47:02,648 INFO [train.py:715] (7/8) Epoch 13, batch 6600, loss[loss=0.1314, simple_loss=0.2098, pruned_loss=0.02645, over 4983.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03133, over 972173.55 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:47:42,041 INFO [train.py:715] (7/8) Epoch 13, batch 6650, loss[loss=0.1287, simple_loss=0.2024, pruned_loss=0.02748, over 4817.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03105, over 972751.30 frames.], batch size: 26, lr: 1.70e-04 +2022-05-07 16:48:20,274 INFO [train.py:715] (7/8) Epoch 13, batch 6700, loss[loss=0.1179, simple_loss=0.179, pruned_loss=0.02837, over 4845.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03094, over 972341.08 frames.], batch size: 13, lr: 1.70e-04 +2022-05-07 16:48:58,715 INFO [train.py:715] (7/8) Epoch 13, batch 6750, loss[loss=0.1559, simple_loss=0.2221, pruned_loss=0.04486, over 4856.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03152, over 972829.76 frames.], batch size: 32, lr: 1.70e-04 +2022-05-07 16:49:37,983 INFO [train.py:715] (7/8) Epoch 13, batch 6800, loss[loss=0.1344, simple_loss=0.2168, pruned_loss=0.02598, over 4984.00 frames.], tot_loss[loss=0.136, simple_loss=0.2092, pruned_loss=0.0314, over 972909.30 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 16:50:17,431 INFO [train.py:715] (7/8) Epoch 13, batch 6850, loss[loss=0.1412, simple_loss=0.2102, pruned_loss=0.03606, over 4876.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2101, pruned_loss=0.03185, over 972022.99 frames.], batch size: 32, lr: 1.70e-04 +2022-05-07 16:50:55,363 INFO [train.py:715] (7/8) Epoch 13, batch 6900, loss[loss=0.1148, simple_loss=0.1988, pruned_loss=0.01534, over 4979.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03133, over 971921.06 frames.], batch size: 28, lr: 1.70e-04 +2022-05-07 16:51:33,401 INFO [train.py:715] (7/8) Epoch 13, batch 6950, loss[loss=0.1271, simple_loss=0.1921, pruned_loss=0.03109, over 4755.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.03121, over 971653.47 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:52:12,639 INFO [train.py:715] (7/8) Epoch 13, batch 7000, loss[loss=0.1198, simple_loss=0.201, pruned_loss=0.01929, over 4987.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03179, over 971623.57 frames.], batch size: 28, lr: 1.70e-04 +2022-05-07 16:52:51,281 INFO [train.py:715] (7/8) Epoch 13, batch 7050, loss[loss=0.1157, simple_loss=0.191, pruned_loss=0.02019, over 4908.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2094, pruned_loss=0.03142, over 971918.75 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 16:53:30,245 INFO [train.py:715] (7/8) Epoch 13, batch 7100, loss[loss=0.1403, simple_loss=0.2138, pruned_loss=0.03341, over 4900.00 frames.], tot_loss[loss=0.137, simple_loss=0.2101, pruned_loss=0.03194, over 972298.14 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:54:09,705 INFO [train.py:715] (7/8) Epoch 13, batch 7150, loss[loss=0.1265, simple_loss=0.2044, pruned_loss=0.02425, over 4991.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2092, pruned_loss=0.03153, over 972142.62 frames.], batch size: 20, lr: 1.70e-04 +2022-05-07 16:54:49,405 INFO [train.py:715] (7/8) Epoch 13, batch 7200, loss[loss=0.1281, simple_loss=0.1997, pruned_loss=0.0282, over 4829.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03139, over 972011.19 frames.], batch size: 15, lr: 1.70e-04 +2022-05-07 16:55:27,553 INFO [train.py:715] (7/8) Epoch 13, batch 7250, loss[loss=0.1635, simple_loss=0.2319, pruned_loss=0.04755, over 4768.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2102, pruned_loss=0.03182, over 971958.91 frames.], batch size: 18, lr: 1.70e-04 +2022-05-07 16:56:05,824 INFO [train.py:715] (7/8) Epoch 13, batch 7300, loss[loss=0.148, simple_loss=0.2201, pruned_loss=0.0379, over 4973.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03156, over 972331.59 frames.], batch size: 14, lr: 1.70e-04 +2022-05-07 16:56:45,076 INFO [train.py:715] (7/8) Epoch 13, batch 7350, loss[loss=0.1342, simple_loss=0.216, pruned_loss=0.02622, over 4751.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03096, over 972513.88 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 16:57:23,718 INFO [train.py:715] (7/8) Epoch 13, batch 7400, loss[loss=0.1388, simple_loss=0.2046, pruned_loss=0.03646, over 4965.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03149, over 973267.63 frames.], batch size: 24, lr: 1.70e-04 +2022-05-07 16:58:01,541 INFO [train.py:715] (7/8) Epoch 13, batch 7450, loss[loss=0.142, simple_loss=0.2197, pruned_loss=0.03218, over 4833.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03107, over 973063.17 frames.], batch size: 26, lr: 1.70e-04 +2022-05-07 16:58:40,993 INFO [train.py:715] (7/8) Epoch 13, batch 7500, loss[loss=0.1832, simple_loss=0.2466, pruned_loss=0.05989, over 4837.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2093, pruned_loss=0.03117, over 972980.15 frames.], batch size: 30, lr: 1.70e-04 +2022-05-07 16:59:20,235 INFO [train.py:715] (7/8) Epoch 13, batch 7550, loss[loss=0.1653, simple_loss=0.2377, pruned_loss=0.04639, over 4734.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2086, pruned_loss=0.03102, over 972428.27 frames.], batch size: 16, lr: 1.70e-04 +2022-05-07 16:59:57,836 INFO [train.py:715] (7/8) Epoch 13, batch 7600, loss[loss=0.1346, simple_loss=0.2186, pruned_loss=0.0253, over 4818.00 frames.], tot_loss[loss=0.1358, simple_loss=0.209, pruned_loss=0.03127, over 972525.40 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 17:00:36,714 INFO [train.py:715] (7/8) Epoch 13, batch 7650, loss[loss=0.1476, simple_loss=0.2155, pruned_loss=0.03986, over 4969.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2094, pruned_loss=0.0314, over 972443.37 frames.], batch size: 35, lr: 1.70e-04 +2022-05-07 17:01:15,684 INFO [train.py:715] (7/8) Epoch 13, batch 7700, loss[loss=0.1373, simple_loss=0.2085, pruned_loss=0.03307, over 4917.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2091, pruned_loss=0.03126, over 972942.68 frames.], batch size: 23, lr: 1.70e-04 +2022-05-07 17:01:54,651 INFO [train.py:715] (7/8) Epoch 13, batch 7750, loss[loss=0.1116, simple_loss=0.1833, pruned_loss=0.01992, over 4895.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2089, pruned_loss=0.03113, over 972950.82 frames.], batch size: 17, lr: 1.70e-04 +2022-05-07 17:02:32,576 INFO [train.py:715] (7/8) Epoch 13, batch 7800, loss[loss=0.1346, simple_loss=0.1996, pruned_loss=0.03484, over 4986.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.03081, over 972628.46 frames.], batch size: 26, lr: 1.70e-04 +2022-05-07 17:03:11,061 INFO [train.py:715] (7/8) Epoch 13, batch 7850, loss[loss=0.1309, simple_loss=0.2075, pruned_loss=0.02716, over 4809.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.0311, over 972706.95 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 17:03:50,703 INFO [train.py:715] (7/8) Epoch 13, batch 7900, loss[loss=0.1203, simple_loss=0.199, pruned_loss=0.02083, over 4746.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2093, pruned_loss=0.03114, over 973300.88 frames.], batch size: 19, lr: 1.70e-04 +2022-05-07 17:04:28,752 INFO [train.py:715] (7/8) Epoch 13, batch 7950, loss[loss=0.1571, simple_loss=0.2388, pruned_loss=0.03773, over 4796.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03129, over 972848.55 frames.], batch size: 25, lr: 1.70e-04 +2022-05-07 17:05:07,214 INFO [train.py:715] (7/8) Epoch 13, batch 8000, loss[loss=0.1135, simple_loss=0.1794, pruned_loss=0.02382, over 4950.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03113, over 973117.50 frames.], batch size: 21, lr: 1.70e-04 +2022-05-07 17:05:45,981 INFO [train.py:715] (7/8) Epoch 13, batch 8050, loss[loss=0.147, simple_loss=0.2289, pruned_loss=0.0326, over 4825.00 frames.], tot_loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.03163, over 972781.91 frames.], batch size: 13, lr: 1.70e-04 +2022-05-07 17:06:24,532 INFO [train.py:715] (7/8) Epoch 13, batch 8100, loss[loss=0.1502, simple_loss=0.2146, pruned_loss=0.04287, over 4799.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03185, over 972864.37 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:07:02,515 INFO [train.py:715] (7/8) Epoch 13, batch 8150, loss[loss=0.1503, simple_loss=0.22, pruned_loss=0.04035, over 4941.00 frames.], tot_loss[loss=0.138, simple_loss=0.2114, pruned_loss=0.03227, over 973348.66 frames.], batch size: 39, lr: 1.69e-04 +2022-05-07 17:07:40,992 INFO [train.py:715] (7/8) Epoch 13, batch 8200, loss[loss=0.1382, simple_loss=0.2129, pruned_loss=0.03175, over 4846.00 frames.], tot_loss[loss=0.1382, simple_loss=0.2118, pruned_loss=0.03233, over 973426.59 frames.], batch size: 30, lr: 1.69e-04 +2022-05-07 17:08:20,202 INFO [train.py:715] (7/8) Epoch 13, batch 8250, loss[loss=0.1288, simple_loss=0.2073, pruned_loss=0.02519, over 4914.00 frames.], tot_loss[loss=0.1384, simple_loss=0.212, pruned_loss=0.03243, over 973257.91 frames.], batch size: 29, lr: 1.69e-04 +2022-05-07 17:08:58,109 INFO [train.py:715] (7/8) Epoch 13, batch 8300, loss[loss=0.1257, simple_loss=0.2038, pruned_loss=0.02379, over 4892.00 frames.], tot_loss[loss=0.138, simple_loss=0.2117, pruned_loss=0.03219, over 973639.36 frames.], batch size: 19, lr: 1.69e-04 +2022-05-07 17:09:36,540 INFO [train.py:715] (7/8) Epoch 13, batch 8350, loss[loss=0.1223, simple_loss=0.1991, pruned_loss=0.02272, over 4831.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03168, over 974028.98 frames.], batch size: 26, lr: 1.69e-04 +2022-05-07 17:10:15,701 INFO [train.py:715] (7/8) Epoch 13, batch 8400, loss[loss=0.1221, simple_loss=0.1924, pruned_loss=0.0259, over 4947.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.0317, over 973780.53 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:10:54,561 INFO [train.py:715] (7/8) Epoch 13, batch 8450, loss[loss=0.1513, simple_loss=0.2201, pruned_loss=0.04118, over 4922.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2105, pruned_loss=0.03152, over 973264.05 frames.], batch size: 35, lr: 1.69e-04 +2022-05-07 17:11:32,554 INFO [train.py:715] (7/8) Epoch 13, batch 8500, loss[loss=0.1323, simple_loss=0.2046, pruned_loss=0.02995, over 4963.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03079, over 972659.30 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:12:11,695 INFO [train.py:715] (7/8) Epoch 13, batch 8550, loss[loss=0.1301, simple_loss=0.2068, pruned_loss=0.0267, over 4843.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03072, over 972439.73 frames.], batch size: 20, lr: 1.69e-04 +2022-05-07 17:12:50,649 INFO [train.py:715] (7/8) Epoch 13, batch 8600, loss[loss=0.13, simple_loss=0.2148, pruned_loss=0.02257, over 4794.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03052, over 972562.38 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:13:28,882 INFO [train.py:715] (7/8) Epoch 13, batch 8650, loss[loss=0.09976, simple_loss=0.1778, pruned_loss=0.01086, over 4975.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03012, over 971482.87 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:14:07,225 INFO [train.py:715] (7/8) Epoch 13, batch 8700, loss[loss=0.1274, simple_loss=0.1949, pruned_loss=0.02991, over 4960.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.0303, over 972048.20 frames.], batch size: 35, lr: 1.69e-04 +2022-05-07 17:14:45,866 INFO [train.py:715] (7/8) Epoch 13, batch 8750, loss[loss=0.142, simple_loss=0.2231, pruned_loss=0.03045, over 4891.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03082, over 972360.48 frames.], batch size: 19, lr: 1.69e-04 +2022-05-07 17:15:24,585 INFO [train.py:715] (7/8) Epoch 13, batch 8800, loss[loss=0.1514, simple_loss=0.2178, pruned_loss=0.04249, over 4766.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2092, pruned_loss=0.03118, over 972450.77 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:16:02,884 INFO [train.py:715] (7/8) Epoch 13, batch 8850, loss[loss=0.1325, simple_loss=0.212, pruned_loss=0.02649, over 4780.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.0305, over 972319.82 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 17:16:40,973 INFO [train.py:715] (7/8) Epoch 13, batch 8900, loss[loss=0.1097, simple_loss=0.1884, pruned_loss=0.01543, over 4928.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03079, over 973397.06 frames.], batch size: 29, lr: 1.69e-04 +2022-05-07 17:17:19,698 INFO [train.py:715] (7/8) Epoch 13, batch 8950, loss[loss=0.1464, simple_loss=0.2186, pruned_loss=0.03709, over 4986.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03132, over 973385.89 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 17:17:57,825 INFO [train.py:715] (7/8) Epoch 13, batch 9000, loss[loss=0.1289, simple_loss=0.2135, pruned_loss=0.02211, over 4912.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03095, over 973656.74 frames.], batch size: 23, lr: 1.69e-04 +2022-05-07 17:17:57,826 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 17:18:07,452 INFO [train.py:742] (7/8) Epoch 13, validation: loss=0.1055, simple_loss=0.1893, pruned_loss=0.01084, over 914524.00 frames. +2022-05-07 17:18:45,500 INFO [train.py:715] (7/8) Epoch 13, batch 9050, loss[loss=0.155, simple_loss=0.2295, pruned_loss=0.04029, over 4985.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03043, over 972914.01 frames.], batch size: 35, lr: 1.69e-04 +2022-05-07 17:19:23,910 INFO [train.py:715] (7/8) Epoch 13, batch 9100, loss[loss=0.1381, simple_loss=0.2116, pruned_loss=0.03236, over 4960.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03037, over 973334.17 frames.], batch size: 39, lr: 1.69e-04 +2022-05-07 17:20:03,099 INFO [train.py:715] (7/8) Epoch 13, batch 9150, loss[loss=0.1639, simple_loss=0.2247, pruned_loss=0.0515, over 4902.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2105, pruned_loss=0.03136, over 972788.27 frames.], batch size: 19, lr: 1.69e-04 +2022-05-07 17:20:42,095 INFO [train.py:715] (7/8) Epoch 13, batch 9200, loss[loss=0.1144, simple_loss=0.1913, pruned_loss=0.01877, over 4755.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.03124, over 972464.16 frames.], batch size: 19, lr: 1.69e-04 +2022-05-07 17:21:20,011 INFO [train.py:715] (7/8) Epoch 13, batch 9250, loss[loss=0.1361, simple_loss=0.2217, pruned_loss=0.02526, over 4852.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2109, pruned_loss=0.03091, over 972862.69 frames.], batch size: 20, lr: 1.69e-04 +2022-05-07 17:21:58,913 INFO [train.py:715] (7/8) Epoch 13, batch 9300, loss[loss=0.1192, simple_loss=0.2002, pruned_loss=0.01915, over 4763.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2113, pruned_loss=0.03145, over 972920.41 frames.], batch size: 19, lr: 1.69e-04 +2022-05-07 17:22:37,765 INFO [train.py:715] (7/8) Epoch 13, batch 9350, loss[loss=0.1499, simple_loss=0.2238, pruned_loss=0.03804, over 4960.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2099, pruned_loss=0.03092, over 972432.27 frames.], batch size: 35, lr: 1.69e-04 +2022-05-07 17:23:15,582 INFO [train.py:715] (7/8) Epoch 13, batch 9400, loss[loss=0.1452, simple_loss=0.2126, pruned_loss=0.0389, over 4973.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03121, over 972267.87 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:23:54,032 INFO [train.py:715] (7/8) Epoch 13, batch 9450, loss[loss=0.1375, simple_loss=0.2171, pruned_loss=0.02892, over 4812.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03137, over 972934.03 frames.], batch size: 25, lr: 1.69e-04 +2022-05-07 17:24:32,854 INFO [train.py:715] (7/8) Epoch 13, batch 9500, loss[loss=0.172, simple_loss=0.2464, pruned_loss=0.04879, over 4758.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.03174, over 972718.23 frames.], batch size: 16, lr: 1.69e-04 +2022-05-07 17:25:11,108 INFO [train.py:715] (7/8) Epoch 13, batch 9550, loss[loss=0.1301, simple_loss=0.2003, pruned_loss=0.02993, over 4778.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03139, over 972618.90 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:25:49,079 INFO [train.py:715] (7/8) Epoch 13, batch 9600, loss[loss=0.1483, simple_loss=0.2085, pruned_loss=0.04405, over 4981.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2102, pruned_loss=0.0311, over 972696.89 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:26:28,021 INFO [train.py:715] (7/8) Epoch 13, batch 9650, loss[loss=0.1374, simple_loss=0.2105, pruned_loss=0.03213, over 4822.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2086, pruned_loss=0.03097, over 972690.25 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:27:06,439 INFO [train.py:715] (7/8) Epoch 13, batch 9700, loss[loss=0.1213, simple_loss=0.2042, pruned_loss=0.01926, over 4867.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03083, over 972612.61 frames.], batch size: 22, lr: 1.69e-04 +2022-05-07 17:27:44,980 INFO [train.py:715] (7/8) Epoch 13, batch 9750, loss[loss=0.1327, simple_loss=0.21, pruned_loss=0.02774, over 4771.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.0311, over 972751.69 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:28:23,843 INFO [train.py:715] (7/8) Epoch 13, batch 9800, loss[loss=0.1456, simple_loss=0.219, pruned_loss=0.03613, over 4938.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03094, over 972600.21 frames.], batch size: 23, lr: 1.69e-04 +2022-05-07 17:29:03,033 INFO [train.py:715] (7/8) Epoch 13, batch 9850, loss[loss=0.1328, simple_loss=0.2124, pruned_loss=0.02661, over 4962.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03145, over 973155.96 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:29:41,580 INFO [train.py:715] (7/8) Epoch 13, batch 9900, loss[loss=0.1342, simple_loss=0.2125, pruned_loss=0.02793, over 4739.00 frames.], tot_loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.03157, over 973001.68 frames.], batch size: 16, lr: 1.69e-04 +2022-05-07 17:30:19,822 INFO [train.py:715] (7/8) Epoch 13, batch 9950, loss[loss=0.1311, simple_loss=0.2137, pruned_loss=0.02426, over 4792.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2113, pruned_loss=0.03173, over 973446.10 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:30:58,615 INFO [train.py:715] (7/8) Epoch 13, batch 10000, loss[loss=0.1135, simple_loss=0.1864, pruned_loss=0.02034, over 4812.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2116, pruned_loss=0.03177, over 973426.29 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:31:37,785 INFO [train.py:715] (7/8) Epoch 13, batch 10050, loss[loss=0.1357, simple_loss=0.2152, pruned_loss=0.0281, over 4697.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2114, pruned_loss=0.03155, over 973017.23 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:32:16,720 INFO [train.py:715] (7/8) Epoch 13, batch 10100, loss[loss=0.1206, simple_loss=0.1944, pruned_loss=0.02339, over 4957.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2107, pruned_loss=0.03124, over 972549.00 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 17:32:54,964 INFO [train.py:715] (7/8) Epoch 13, batch 10150, loss[loss=0.1392, simple_loss=0.214, pruned_loss=0.0322, over 4982.00 frames.], tot_loss[loss=0.1372, simple_loss=0.211, pruned_loss=0.03173, over 972905.86 frames.], batch size: 31, lr: 1.69e-04 +2022-05-07 17:33:33,998 INFO [train.py:715] (7/8) Epoch 13, batch 10200, loss[loss=0.1321, simple_loss=0.2054, pruned_loss=0.02935, over 4954.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2111, pruned_loss=0.03136, over 971867.07 frames.], batch size: 29, lr: 1.69e-04 +2022-05-07 17:34:13,393 INFO [train.py:715] (7/8) Epoch 13, batch 10250, loss[loss=0.1051, simple_loss=0.1698, pruned_loss=0.02023, over 4805.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03151, over 971083.38 frames.], batch size: 12, lr: 1.69e-04 +2022-05-07 17:34:52,082 INFO [train.py:715] (7/8) Epoch 13, batch 10300, loss[loss=0.1316, simple_loss=0.202, pruned_loss=0.03059, over 4981.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.0315, over 971958.23 frames.], batch size: 25, lr: 1.69e-04 +2022-05-07 17:35:31,126 INFO [train.py:715] (7/8) Epoch 13, batch 10350, loss[loss=0.1155, simple_loss=0.1874, pruned_loss=0.02187, over 4765.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.031, over 971663.66 frames.], batch size: 12, lr: 1.69e-04 +2022-05-07 17:36:10,304 INFO [train.py:715] (7/8) Epoch 13, batch 10400, loss[loss=0.1673, simple_loss=0.2438, pruned_loss=0.04541, over 4972.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.0314, over 971888.66 frames.], batch size: 39, lr: 1.69e-04 +2022-05-07 17:36:49,250 INFO [train.py:715] (7/8) Epoch 13, batch 10450, loss[loss=0.1109, simple_loss=0.1868, pruned_loss=0.01754, over 4795.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.0311, over 971920.63 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:37:26,677 INFO [train.py:715] (7/8) Epoch 13, batch 10500, loss[loss=0.1223, simple_loss=0.1834, pruned_loss=0.03057, over 4844.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.0312, over 971502.95 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:38:05,566 INFO [train.py:715] (7/8) Epoch 13, batch 10550, loss[loss=0.1124, simple_loss=0.188, pruned_loss=0.01845, over 4887.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.03082, over 971606.85 frames.], batch size: 22, lr: 1.69e-04 +2022-05-07 17:38:44,487 INFO [train.py:715] (7/8) Epoch 13, batch 10600, loss[loss=0.1327, simple_loss=0.2163, pruned_loss=0.0245, over 4945.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.03042, over 971905.42 frames.], batch size: 23, lr: 1.69e-04 +2022-05-07 17:39:22,593 INFO [train.py:715] (7/8) Epoch 13, batch 10650, loss[loss=0.1348, simple_loss=0.2054, pruned_loss=0.03214, over 4642.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03055, over 972158.96 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 17:40:01,765 INFO [train.py:715] (7/8) Epoch 13, batch 10700, loss[loss=0.1325, simple_loss=0.2151, pruned_loss=0.02494, over 4981.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03031, over 971548.50 frames.], batch size: 28, lr: 1.69e-04 +2022-05-07 17:40:41,061 INFO [train.py:715] (7/8) Epoch 13, batch 10750, loss[loss=0.1389, simple_loss=0.2217, pruned_loss=0.02806, over 4705.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03124, over 970836.33 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:41:19,858 INFO [train.py:715] (7/8) Epoch 13, batch 10800, loss[loss=0.1339, simple_loss=0.2108, pruned_loss=0.02851, over 4830.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2097, pruned_loss=0.0309, over 970860.43 frames.], batch size: 26, lr: 1.69e-04 +2022-05-07 17:41:57,880 INFO [train.py:715] (7/8) Epoch 13, batch 10850, loss[loss=0.1539, simple_loss=0.2137, pruned_loss=0.04701, over 4842.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03124, over 971188.63 frames.], batch size: 30, lr: 1.69e-04 +2022-05-07 17:42:37,034 INFO [train.py:715] (7/8) Epoch 13, batch 10900, loss[loss=0.1147, simple_loss=0.1963, pruned_loss=0.01653, over 4814.00 frames.], tot_loss[loss=0.137, simple_loss=0.2106, pruned_loss=0.0317, over 971426.61 frames.], batch size: 27, lr: 1.69e-04 +2022-05-07 17:43:16,892 INFO [train.py:715] (7/8) Epoch 13, batch 10950, loss[loss=0.1261, simple_loss=0.2079, pruned_loss=0.02211, over 4926.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03107, over 972573.19 frames.], batch size: 29, lr: 1.69e-04 +2022-05-07 17:43:56,321 INFO [train.py:715] (7/8) Epoch 13, batch 11000, loss[loss=0.1471, simple_loss=0.2223, pruned_loss=0.03598, over 4782.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03067, over 973138.65 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:44:34,953 INFO [train.py:715] (7/8) Epoch 13, batch 11050, loss[loss=0.131, simple_loss=0.2108, pruned_loss=0.02558, over 4795.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03045, over 972811.95 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:45:14,250 INFO [train.py:715] (7/8) Epoch 13, batch 11100, loss[loss=0.1539, simple_loss=0.2245, pruned_loss=0.04169, over 4979.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03064, over 971972.06 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:45:53,236 INFO [train.py:715] (7/8) Epoch 13, batch 11150, loss[loss=0.125, simple_loss=0.1962, pruned_loss=0.02686, over 4843.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03073, over 971933.37 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:46:30,989 INFO [train.py:715] (7/8) Epoch 13, batch 11200, loss[loss=0.1591, simple_loss=0.2291, pruned_loss=0.04454, over 4692.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2081, pruned_loss=0.03054, over 972082.74 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:47:09,193 INFO [train.py:715] (7/8) Epoch 13, batch 11250, loss[loss=0.1504, simple_loss=0.2106, pruned_loss=0.04513, over 4968.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03079, over 972628.89 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 17:47:48,138 INFO [train.py:715] (7/8) Epoch 13, batch 11300, loss[loss=0.1111, simple_loss=0.1891, pruned_loss=0.01652, over 4938.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03123, over 973813.42 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:48:27,086 INFO [train.py:715] (7/8) Epoch 13, batch 11350, loss[loss=0.1409, simple_loss=0.2242, pruned_loss=0.02879, over 4917.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03078, over 973971.45 frames.], batch size: 29, lr: 1.69e-04 +2022-05-07 17:49:05,291 INFO [train.py:715] (7/8) Epoch 13, batch 11400, loss[loss=0.1195, simple_loss=0.1933, pruned_loss=0.0228, over 4852.00 frames.], tot_loss[loss=0.134, simple_loss=0.2076, pruned_loss=0.03023, over 973112.00 frames.], batch size: 20, lr: 1.69e-04 +2022-05-07 17:49:44,156 INFO [train.py:715] (7/8) Epoch 13, batch 11450, loss[loss=0.1022, simple_loss=0.1771, pruned_loss=0.01368, over 4824.00 frames.], tot_loss[loss=0.134, simple_loss=0.2075, pruned_loss=0.03025, over 973224.09 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 17:50:25,718 INFO [train.py:715] (7/8) Epoch 13, batch 11500, loss[loss=0.1325, simple_loss=0.2038, pruned_loss=0.03061, over 4912.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2081, pruned_loss=0.03059, over 972175.74 frames.], batch size: 23, lr: 1.69e-04 +2022-05-07 17:51:03,651 INFO [train.py:715] (7/8) Epoch 13, batch 11550, loss[loss=0.1211, simple_loss=0.1937, pruned_loss=0.02428, over 4798.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2078, pruned_loss=0.03046, over 972151.04 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 17:51:42,316 INFO [train.py:715] (7/8) Epoch 13, batch 11600, loss[loss=0.147, simple_loss=0.2167, pruned_loss=0.0386, over 4799.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2078, pruned_loss=0.03066, over 972141.80 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 17:52:21,598 INFO [train.py:715] (7/8) Epoch 13, batch 11650, loss[loss=0.1399, simple_loss=0.2161, pruned_loss=0.03182, over 4845.00 frames.], tot_loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03075, over 972361.33 frames.], batch size: 30, lr: 1.69e-04 +2022-05-07 17:53:00,309 INFO [train.py:715] (7/8) Epoch 13, batch 11700, loss[loss=0.1225, simple_loss=0.1867, pruned_loss=0.02919, over 4931.00 frames.], tot_loss[loss=0.135, simple_loss=0.2083, pruned_loss=0.0308, over 972431.60 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:53:38,276 INFO [train.py:715] (7/8) Epoch 13, batch 11750, loss[loss=0.1327, simple_loss=0.2101, pruned_loss=0.02766, over 4897.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.03059, over 971761.00 frames.], batch size: 22, lr: 1.69e-04 +2022-05-07 17:54:16,754 INFO [train.py:715] (7/8) Epoch 13, batch 11800, loss[loss=0.1424, simple_loss=0.2174, pruned_loss=0.03369, over 4981.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2088, pruned_loss=0.03066, over 972058.91 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 17:54:55,480 INFO [train.py:715] (7/8) Epoch 13, batch 11850, loss[loss=0.126, simple_loss=0.1969, pruned_loss=0.02758, over 4897.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2099, pruned_loss=0.03112, over 972279.79 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:55:32,888 INFO [train.py:715] (7/8) Epoch 13, batch 11900, loss[loss=0.1402, simple_loss=0.2047, pruned_loss=0.03779, over 4972.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03136, over 972139.67 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 17:56:11,544 INFO [train.py:715] (7/8) Epoch 13, batch 11950, loss[loss=0.142, simple_loss=0.2199, pruned_loss=0.03208, over 4871.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.03121, over 971567.73 frames.], batch size: 20, lr: 1.69e-04 +2022-05-07 17:56:50,609 INFO [train.py:715] (7/8) Epoch 13, batch 12000, loss[loss=0.1302, simple_loss=0.2019, pruned_loss=0.02923, over 4908.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03074, over 972726.36 frames.], batch size: 19, lr: 1.69e-04 +2022-05-07 17:56:50,610 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 17:57:00,357 INFO [train.py:742] (7/8) Epoch 13, validation: loss=0.1055, simple_loss=0.1893, pruned_loss=0.01081, over 914524.00 frames. +2022-05-07 17:57:40,024 INFO [train.py:715] (7/8) Epoch 13, batch 12050, loss[loss=0.1344, simple_loss=0.1953, pruned_loss=0.03677, over 4992.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2079, pruned_loss=0.03045, over 972995.60 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 17:58:18,317 INFO [train.py:715] (7/8) Epoch 13, batch 12100, loss[loss=0.16, simple_loss=0.2322, pruned_loss=0.0439, over 4921.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.0304, over 972636.95 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 17:58:56,071 INFO [train.py:715] (7/8) Epoch 13, batch 12150, loss[loss=0.1258, simple_loss=0.1894, pruned_loss=0.03111, over 4886.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03075, over 973592.94 frames.], batch size: 16, lr: 1.69e-04 +2022-05-07 17:59:34,972 INFO [train.py:715] (7/8) Epoch 13, batch 12200, loss[loss=0.1258, simple_loss=0.1924, pruned_loss=0.02957, over 4852.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03074, over 972943.26 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 18:00:13,889 INFO [train.py:715] (7/8) Epoch 13, batch 12250, loss[loss=0.1252, simple_loss=0.1936, pruned_loss=0.02842, over 4903.00 frames.], tot_loss[loss=0.136, simple_loss=0.21, pruned_loss=0.03098, over 972813.55 frames.], batch size: 19, lr: 1.69e-04 +2022-05-07 18:00:52,458 INFO [train.py:715] (7/8) Epoch 13, batch 12300, loss[loss=0.1338, simple_loss=0.2156, pruned_loss=0.02596, over 4939.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2107, pruned_loss=0.03116, over 972925.37 frames.], batch size: 21, lr: 1.69e-04 +2022-05-07 18:01:30,135 INFO [train.py:715] (7/8) Epoch 13, batch 12350, loss[loss=0.1336, simple_loss=0.2054, pruned_loss=0.03092, over 4786.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2103, pruned_loss=0.0309, over 971946.06 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 18:02:09,067 INFO [train.py:715] (7/8) Epoch 13, batch 12400, loss[loss=0.1454, simple_loss=0.2121, pruned_loss=0.0394, over 4642.00 frames.], tot_loss[loss=0.137, simple_loss=0.2111, pruned_loss=0.03149, over 971042.25 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 18:02:47,457 INFO [train.py:715] (7/8) Epoch 13, batch 12450, loss[loss=0.1412, simple_loss=0.2172, pruned_loss=0.03256, over 4959.00 frames.], tot_loss[loss=0.1372, simple_loss=0.211, pruned_loss=0.03165, over 971984.56 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 18:03:24,471 INFO [train.py:715] (7/8) Epoch 13, batch 12500, loss[loss=0.15, simple_loss=0.2232, pruned_loss=0.03833, over 4967.00 frames.], tot_loss[loss=0.138, simple_loss=0.2119, pruned_loss=0.03205, over 971877.29 frames.], batch size: 15, lr: 1.69e-04 +2022-05-07 18:04:03,260 INFO [train.py:715] (7/8) Epoch 13, batch 12550, loss[loss=0.1496, simple_loss=0.2319, pruned_loss=0.03365, over 4925.00 frames.], tot_loss[loss=0.1392, simple_loss=0.2129, pruned_loss=0.03273, over 971187.87 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 18:04:41,883 INFO [train.py:715] (7/8) Epoch 13, batch 12600, loss[loss=0.1601, simple_loss=0.2302, pruned_loss=0.045, over 4655.00 frames.], tot_loss[loss=0.1386, simple_loss=0.2121, pruned_loss=0.0325, over 971247.47 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 18:05:20,412 INFO [train.py:715] (7/8) Epoch 13, batch 12650, loss[loss=0.1592, simple_loss=0.2245, pruned_loss=0.04694, over 4774.00 frames.], tot_loss[loss=0.138, simple_loss=0.2117, pruned_loss=0.03215, over 971211.16 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 18:05:58,205 INFO [train.py:715] (7/8) Epoch 13, batch 12700, loss[loss=0.1246, simple_loss=0.196, pruned_loss=0.02662, over 4796.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2105, pruned_loss=0.03144, over 971463.70 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 18:06:37,494 INFO [train.py:715] (7/8) Epoch 13, batch 12750, loss[loss=0.1217, simple_loss=0.19, pruned_loss=0.02673, over 4885.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03133, over 971563.31 frames.], batch size: 32, lr: 1.69e-04 +2022-05-07 18:07:16,116 INFO [train.py:715] (7/8) Epoch 13, batch 12800, loss[loss=0.1269, simple_loss=0.2057, pruned_loss=0.0241, over 4838.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2102, pruned_loss=0.03108, over 972576.01 frames.], batch size: 26, lr: 1.69e-04 +2022-05-07 18:07:53,799 INFO [train.py:715] (7/8) Epoch 13, batch 12850, loss[loss=0.1249, simple_loss=0.2012, pruned_loss=0.02423, over 4923.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.0305, over 971730.82 frames.], batch size: 23, lr: 1.69e-04 +2022-05-07 18:08:32,301 INFO [train.py:715] (7/8) Epoch 13, batch 12900, loss[loss=0.1287, simple_loss=0.2036, pruned_loss=0.02689, over 4833.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03047, over 970699.05 frames.], batch size: 13, lr: 1.69e-04 +2022-05-07 18:09:10,901 INFO [train.py:715] (7/8) Epoch 13, batch 12950, loss[loss=0.1419, simple_loss=0.209, pruned_loss=0.0374, over 4922.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03027, over 970930.65 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 18:09:48,857 INFO [train.py:715] (7/8) Epoch 13, batch 13000, loss[loss=0.14, simple_loss=0.2177, pruned_loss=0.0312, over 4892.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03086, over 970753.90 frames.], batch size: 16, lr: 1.69e-04 +2022-05-07 18:10:26,256 INFO [train.py:715] (7/8) Epoch 13, batch 13050, loss[loss=0.1226, simple_loss=0.1947, pruned_loss=0.02526, over 4799.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.0309, over 971240.71 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 18:11:05,300 INFO [train.py:715] (7/8) Epoch 13, batch 13100, loss[loss=0.152, simple_loss=0.2269, pruned_loss=0.03852, over 4880.00 frames.], tot_loss[loss=0.135, simple_loss=0.2093, pruned_loss=0.03041, over 971538.40 frames.], batch size: 22, lr: 1.69e-04 +2022-05-07 18:11:43,995 INFO [train.py:715] (7/8) Epoch 13, batch 13150, loss[loss=0.149, simple_loss=0.2314, pruned_loss=0.0333, over 4942.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.03057, over 971508.23 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 18:12:21,745 INFO [train.py:715] (7/8) Epoch 13, batch 13200, loss[loss=0.1446, simple_loss=0.2211, pruned_loss=0.03401, over 4801.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.03024, over 971460.11 frames.], batch size: 25, lr: 1.69e-04 +2022-05-07 18:13:00,178 INFO [train.py:715] (7/8) Epoch 13, batch 13250, loss[loss=0.1359, simple_loss=0.2048, pruned_loss=0.03345, over 4774.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03075, over 971455.13 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 18:13:38,870 INFO [train.py:715] (7/8) Epoch 13, batch 13300, loss[loss=0.1605, simple_loss=0.2341, pruned_loss=0.04343, over 4771.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2083, pruned_loss=0.0306, over 971325.86 frames.], batch size: 18, lr: 1.69e-04 +2022-05-07 18:14:17,608 INFO [train.py:715] (7/8) Epoch 13, batch 13350, loss[loss=0.1299, simple_loss=0.2099, pruned_loss=0.02499, over 4756.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03111, over 971478.48 frames.], batch size: 14, lr: 1.69e-04 +2022-05-07 18:14:55,895 INFO [train.py:715] (7/8) Epoch 13, batch 13400, loss[loss=0.1223, simple_loss=0.1909, pruned_loss=0.02686, over 4964.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03107, over 972781.43 frames.], batch size: 24, lr: 1.69e-04 +2022-05-07 18:15:35,681 INFO [train.py:715] (7/8) Epoch 13, batch 13450, loss[loss=0.1613, simple_loss=0.2341, pruned_loss=0.0443, over 4897.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03214, over 972109.18 frames.], batch size: 17, lr: 1.69e-04 +2022-05-07 18:16:14,408 INFO [train.py:715] (7/8) Epoch 13, batch 13500, loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03072, over 4945.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03155, over 972425.27 frames.], batch size: 29, lr: 1.69e-04 +2022-05-07 18:16:52,059 INFO [train.py:715] (7/8) Epoch 13, batch 13550, loss[loss=0.135, simple_loss=0.2052, pruned_loss=0.0324, over 4844.00 frames.], tot_loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.03162, over 972151.04 frames.], batch size: 30, lr: 1.69e-04 +2022-05-07 18:17:29,850 INFO [train.py:715] (7/8) Epoch 13, batch 13600, loss[loss=0.1208, simple_loss=0.191, pruned_loss=0.02528, over 4957.00 frames.], tot_loss[loss=0.137, simple_loss=0.2109, pruned_loss=0.0316, over 972250.87 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:18:08,970 INFO [train.py:715] (7/8) Epoch 13, batch 13650, loss[loss=0.1463, simple_loss=0.2132, pruned_loss=0.03969, over 4832.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03134, over 972461.97 frames.], batch size: 27, lr: 1.68e-04 +2022-05-07 18:18:47,086 INFO [train.py:715] (7/8) Epoch 13, batch 13700, loss[loss=0.1395, simple_loss=0.2166, pruned_loss=0.03125, over 4771.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.0315, over 972281.05 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 18:19:24,725 INFO [train.py:715] (7/8) Epoch 13, batch 13750, loss[loss=0.1177, simple_loss=0.1987, pruned_loss=0.01836, over 4865.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2106, pruned_loss=0.03155, over 972629.42 frames.], batch size: 20, lr: 1.68e-04 +2022-05-07 18:20:03,321 INFO [train.py:715] (7/8) Epoch 13, batch 13800, loss[loss=0.1116, simple_loss=0.1837, pruned_loss=0.01973, over 4850.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2108, pruned_loss=0.03129, over 972723.54 frames.], batch size: 30, lr: 1.68e-04 +2022-05-07 18:20:41,460 INFO [train.py:715] (7/8) Epoch 13, batch 13850, loss[loss=0.1233, simple_loss=0.1905, pruned_loss=0.02799, over 4836.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2102, pruned_loss=0.03107, over 973089.90 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 18:21:19,872 INFO [train.py:715] (7/8) Epoch 13, batch 13900, loss[loss=0.1325, simple_loss=0.2079, pruned_loss=0.02856, over 4985.00 frames.], tot_loss[loss=0.1358, simple_loss=0.21, pruned_loss=0.03082, over 973857.02 frames.], batch size: 28, lr: 1.68e-04 +2022-05-07 18:21:58,634 INFO [train.py:715] (7/8) Epoch 13, batch 13950, loss[loss=0.1486, simple_loss=0.2181, pruned_loss=0.03957, over 4925.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2101, pruned_loss=0.0308, over 973185.01 frames.], batch size: 23, lr: 1.68e-04 +2022-05-07 18:22:37,440 INFO [train.py:715] (7/8) Epoch 13, batch 14000, loss[loss=0.1178, simple_loss=0.1977, pruned_loss=0.01896, over 4754.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2092, pruned_loss=0.03051, over 972367.87 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 18:23:15,659 INFO [train.py:715] (7/8) Epoch 13, batch 14050, loss[loss=0.1323, simple_loss=0.2105, pruned_loss=0.02705, over 4815.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03026, over 973571.44 frames.], batch size: 27, lr: 1.68e-04 +2022-05-07 18:23:53,253 INFO [train.py:715] (7/8) Epoch 13, batch 14100, loss[loss=0.1431, simple_loss=0.2219, pruned_loss=0.03214, over 4775.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03019, over 972295.46 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 18:24:32,482 INFO [train.py:715] (7/8) Epoch 13, batch 14150, loss[loss=0.158, simple_loss=0.2325, pruned_loss=0.04182, over 4860.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03053, over 972445.63 frames.], batch size: 20, lr: 1.68e-04 +2022-05-07 18:25:10,630 INFO [train.py:715] (7/8) Epoch 13, batch 14200, loss[loss=0.121, simple_loss=0.2001, pruned_loss=0.02093, over 4955.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03094, over 973337.43 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:25:48,509 INFO [train.py:715] (7/8) Epoch 13, batch 14250, loss[loss=0.1077, simple_loss=0.1838, pruned_loss=0.01585, over 4785.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03092, over 973456.84 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 18:26:26,794 INFO [train.py:715] (7/8) Epoch 13, batch 14300, loss[loss=0.1477, simple_loss=0.2233, pruned_loss=0.03602, over 4701.00 frames.], tot_loss[loss=0.136, simple_loss=0.2101, pruned_loss=0.03095, over 972663.28 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:27:06,170 INFO [train.py:715] (7/8) Epoch 13, batch 14350, loss[loss=0.141, simple_loss=0.2116, pruned_loss=0.03518, over 4842.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2096, pruned_loss=0.03072, over 972200.91 frames.], batch size: 30, lr: 1.68e-04 +2022-05-07 18:27:44,508 INFO [train.py:715] (7/8) Epoch 13, batch 14400, loss[loss=0.1596, simple_loss=0.2365, pruned_loss=0.04139, over 4839.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2098, pruned_loss=0.03058, over 973139.23 frames.], batch size: 30, lr: 1.68e-04 +2022-05-07 18:28:22,435 INFO [train.py:715] (7/8) Epoch 13, batch 14450, loss[loss=0.1357, simple_loss=0.2036, pruned_loss=0.0339, over 4901.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2105, pruned_loss=0.03084, over 973056.09 frames.], batch size: 22, lr: 1.68e-04 +2022-05-07 18:29:01,543 INFO [train.py:715] (7/8) Epoch 13, batch 14500, loss[loss=0.1144, simple_loss=0.1905, pruned_loss=0.01911, over 4797.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2102, pruned_loss=0.03083, over 972860.02 frames.], batch size: 12, lr: 1.68e-04 +2022-05-07 18:29:40,344 INFO [train.py:715] (7/8) Epoch 13, batch 14550, loss[loss=0.1464, simple_loss=0.2262, pruned_loss=0.03326, over 4909.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2102, pruned_loss=0.03065, over 973638.08 frames.], batch size: 29, lr: 1.68e-04 +2022-05-07 18:30:18,702 INFO [train.py:715] (7/8) Epoch 13, batch 14600, loss[loss=0.1418, simple_loss=0.2125, pruned_loss=0.03557, over 4986.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2103, pruned_loss=0.03103, over 973280.35 frames.], batch size: 35, lr: 1.68e-04 +2022-05-07 18:30:57,057 INFO [train.py:715] (7/8) Epoch 13, batch 14650, loss[loss=0.1386, simple_loss=0.2201, pruned_loss=0.02854, over 4854.00 frames.], tot_loss[loss=0.1358, simple_loss=0.21, pruned_loss=0.03076, over 973447.01 frames.], batch size: 20, lr: 1.68e-04 +2022-05-07 18:31:35,708 INFO [train.py:715] (7/8) Epoch 13, batch 14700, loss[loss=0.111, simple_loss=0.19, pruned_loss=0.01603, over 4798.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03083, over 972382.63 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 18:32:13,645 INFO [train.py:715] (7/8) Epoch 13, batch 14750, loss[loss=0.1273, simple_loss=0.2057, pruned_loss=0.02452, over 4776.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03079, over 972272.14 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 18:32:50,799 INFO [train.py:715] (7/8) Epoch 13, batch 14800, loss[loss=0.1312, simple_loss=0.2012, pruned_loss=0.03061, over 4937.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2097, pruned_loss=0.0311, over 972505.82 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 18:33:29,889 INFO [train.py:715] (7/8) Epoch 13, batch 14850, loss[loss=0.1189, simple_loss=0.1914, pruned_loss=0.02322, over 4914.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03102, over 971913.16 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 18:34:08,569 INFO [train.py:715] (7/8) Epoch 13, batch 14900, loss[loss=0.1225, simple_loss=0.1996, pruned_loss=0.02277, over 4778.00 frames.], tot_loss[loss=0.1357, simple_loss=0.209, pruned_loss=0.03117, over 971971.40 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 18:34:46,491 INFO [train.py:715] (7/8) Epoch 13, batch 14950, loss[loss=0.1448, simple_loss=0.2203, pruned_loss=0.03465, over 4876.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2093, pruned_loss=0.03113, over 972346.31 frames.], batch size: 22, lr: 1.68e-04 +2022-05-07 18:35:24,994 INFO [train.py:715] (7/8) Epoch 13, batch 15000, loss[loss=0.1168, simple_loss=0.1821, pruned_loss=0.02573, over 4892.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03089, over 971770.73 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 18:35:24,995 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 18:35:34,567 INFO [train.py:742] (7/8) Epoch 13, validation: loss=0.1052, simple_loss=0.189, pruned_loss=0.01074, over 914524.00 frames. +2022-05-07 18:36:13,158 INFO [train.py:715] (7/8) Epoch 13, batch 15050, loss[loss=0.1277, simple_loss=0.1842, pruned_loss=0.03565, over 4845.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2097, pruned_loss=0.0315, over 972197.28 frames.], batch size: 30, lr: 1.68e-04 +2022-05-07 18:36:52,714 INFO [train.py:715] (7/8) Epoch 13, batch 15100, loss[loss=0.1516, simple_loss=0.2186, pruned_loss=0.04225, over 4892.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03133, over 972017.08 frames.], batch size: 22, lr: 1.68e-04 +2022-05-07 18:37:31,194 INFO [train.py:715] (7/8) Epoch 13, batch 15150, loss[loss=0.1218, simple_loss=0.1994, pruned_loss=0.02207, over 4807.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03149, over 971946.62 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 18:38:09,446 INFO [train.py:715] (7/8) Epoch 13, batch 15200, loss[loss=0.1097, simple_loss=0.1783, pruned_loss=0.02056, over 4762.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03071, over 971859.85 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 18:38:49,229 INFO [train.py:715] (7/8) Epoch 13, batch 15250, loss[loss=0.1246, simple_loss=0.2071, pruned_loss=0.02109, over 4785.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2088, pruned_loss=0.03105, over 971394.06 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 18:39:27,974 INFO [train.py:715] (7/8) Epoch 13, batch 15300, loss[loss=0.1283, simple_loss=0.2025, pruned_loss=0.02708, over 4973.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03079, over 972610.40 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 18:40:06,014 INFO [train.py:715] (7/8) Epoch 13, batch 15350, loss[loss=0.1179, simple_loss=0.1954, pruned_loss=0.02018, over 4829.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.0308, over 972868.14 frames.], batch size: 25, lr: 1.68e-04 +2022-05-07 18:40:45,014 INFO [train.py:715] (7/8) Epoch 13, batch 15400, loss[loss=0.1451, simple_loss=0.2191, pruned_loss=0.03556, over 4945.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03098, over 972953.48 frames.], batch size: 39, lr: 1.68e-04 +2022-05-07 18:41:23,902 INFO [train.py:715] (7/8) Epoch 13, batch 15450, loss[loss=0.1411, simple_loss=0.2145, pruned_loss=0.03387, over 4979.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.0308, over 973466.84 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 18:42:03,713 INFO [train.py:715] (7/8) Epoch 13, batch 15500, loss[loss=0.1272, simple_loss=0.2015, pruned_loss=0.02644, over 4799.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.03113, over 972030.81 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 18:42:41,962 INFO [train.py:715] (7/8) Epoch 13, batch 15550, loss[loss=0.1771, simple_loss=0.2426, pruned_loss=0.05579, over 4815.00 frames.], tot_loss[loss=0.136, simple_loss=0.2093, pruned_loss=0.03138, over 972213.02 frames.], batch size: 26, lr: 1.68e-04 +2022-05-07 18:43:21,698 INFO [train.py:715] (7/8) Epoch 13, batch 15600, loss[loss=0.1256, simple_loss=0.2087, pruned_loss=0.02122, over 4873.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2085, pruned_loss=0.03093, over 972121.61 frames.], batch size: 20, lr: 1.68e-04 +2022-05-07 18:44:01,140 INFO [train.py:715] (7/8) Epoch 13, batch 15650, loss[loss=0.1515, simple_loss=0.2205, pruned_loss=0.04119, over 4939.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03076, over 971740.25 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 18:44:39,624 INFO [train.py:715] (7/8) Epoch 13, batch 15700, loss[loss=0.1431, simple_loss=0.2177, pruned_loss=0.03423, over 4786.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03073, over 972070.78 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 18:45:18,633 INFO [train.py:715] (7/8) Epoch 13, batch 15750, loss[loss=0.1304, simple_loss=0.2108, pruned_loss=0.02498, over 4811.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2089, pruned_loss=0.03116, over 971035.67 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 18:45:57,411 INFO [train.py:715] (7/8) Epoch 13, batch 15800, loss[loss=0.133, simple_loss=0.1995, pruned_loss=0.03324, over 4857.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2102, pruned_loss=0.03184, over 971440.57 frames.], batch size: 34, lr: 1.68e-04 +2022-05-07 18:46:35,694 INFO [train.py:715] (7/8) Epoch 13, batch 15850, loss[loss=0.1254, simple_loss=0.1991, pruned_loss=0.02581, over 4950.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2094, pruned_loss=0.03143, over 972352.85 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 18:47:13,599 INFO [train.py:715] (7/8) Epoch 13, batch 15900, loss[loss=0.1351, simple_loss=0.2187, pruned_loss=0.02569, over 4901.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.0317, over 972481.76 frames.], batch size: 39, lr: 1.68e-04 +2022-05-07 18:47:52,835 INFO [train.py:715] (7/8) Epoch 13, batch 15950, loss[loss=0.1287, simple_loss=0.2085, pruned_loss=0.02451, over 4764.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03193, over 971948.35 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 18:48:31,349 INFO [train.py:715] (7/8) Epoch 13, batch 16000, loss[loss=0.148, simple_loss=0.2167, pruned_loss=0.03965, over 4920.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.0317, over 972559.53 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 18:49:09,601 INFO [train.py:715] (7/8) Epoch 13, batch 16050, loss[loss=0.1448, simple_loss=0.2124, pruned_loss=0.03855, over 4954.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03172, over 971940.49 frames.], batch size: 35, lr: 1.68e-04 +2022-05-07 18:49:48,081 INFO [train.py:715] (7/8) Epoch 13, batch 16100, loss[loss=0.138, simple_loss=0.2206, pruned_loss=0.02769, over 4827.00 frames.], tot_loss[loss=0.1363, simple_loss=0.21, pruned_loss=0.03127, over 971986.35 frames.], batch size: 27, lr: 1.68e-04 +2022-05-07 18:50:27,346 INFO [train.py:715] (7/8) Epoch 13, batch 16150, loss[loss=0.1527, simple_loss=0.2206, pruned_loss=0.04239, over 4824.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03142, over 971300.05 frames.], batch size: 25, lr: 1.68e-04 +2022-05-07 18:51:05,994 INFO [train.py:715] (7/8) Epoch 13, batch 16200, loss[loss=0.1266, simple_loss=0.2057, pruned_loss=0.02377, over 4931.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03146, over 972098.63 frames.], batch size: 23, lr: 1.68e-04 +2022-05-07 18:51:42,924 INFO [train.py:715] (7/8) Epoch 13, batch 16250, loss[loss=0.143, simple_loss=0.224, pruned_loss=0.03099, over 4772.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03127, over 971675.35 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 18:52:22,103 INFO [train.py:715] (7/8) Epoch 13, batch 16300, loss[loss=0.1055, simple_loss=0.1719, pruned_loss=0.01955, over 4892.00 frames.], tot_loss[loss=0.1362, simple_loss=0.21, pruned_loss=0.03123, over 971672.67 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 18:53:00,700 INFO [train.py:715] (7/8) Epoch 13, batch 16350, loss[loss=0.1265, simple_loss=0.1965, pruned_loss=0.02823, over 4840.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.0312, over 971403.85 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:53:39,045 INFO [train.py:715] (7/8) Epoch 13, batch 16400, loss[loss=0.1415, simple_loss=0.2224, pruned_loss=0.03032, over 4951.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03123, over 971475.39 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 18:54:18,188 INFO [train.py:715] (7/8) Epoch 13, batch 16450, loss[loss=0.1621, simple_loss=0.2325, pruned_loss=0.04585, over 4699.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03187, over 971459.67 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:54:57,401 INFO [train.py:715] (7/8) Epoch 13, batch 16500, loss[loss=0.1284, simple_loss=0.207, pruned_loss=0.02486, over 4815.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03148, over 971412.97 frames.], batch size: 25, lr: 1.68e-04 +2022-05-07 18:55:36,543 INFO [train.py:715] (7/8) Epoch 13, batch 16550, loss[loss=0.1147, simple_loss=0.1981, pruned_loss=0.01571, over 4910.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.03119, over 971713.55 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 18:56:13,926 INFO [train.py:715] (7/8) Epoch 13, batch 16600, loss[loss=0.1541, simple_loss=0.2352, pruned_loss=0.03653, over 4855.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.03114, over 971926.09 frames.], batch size: 39, lr: 1.68e-04 +2022-05-07 18:56:53,164 INFO [train.py:715] (7/8) Epoch 13, batch 16650, loss[loss=0.1238, simple_loss=0.1939, pruned_loss=0.02687, over 4777.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2093, pruned_loss=0.0314, over 971748.78 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 18:57:31,699 INFO [train.py:715] (7/8) Epoch 13, batch 16700, loss[loss=0.1435, simple_loss=0.2107, pruned_loss=0.03817, over 4952.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03108, over 972046.68 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 18:58:09,692 INFO [train.py:715] (7/8) Epoch 13, batch 16750, loss[loss=0.116, simple_loss=0.1992, pruned_loss=0.01639, over 4688.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03153, over 972203.81 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 18:58:48,290 INFO [train.py:715] (7/8) Epoch 13, batch 16800, loss[loss=0.1342, simple_loss=0.1984, pruned_loss=0.03502, over 4794.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03142, over 971504.66 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 18:59:27,922 INFO [train.py:715] (7/8) Epoch 13, batch 16850, loss[loss=0.1128, simple_loss=0.1933, pruned_loss=0.01617, over 4790.00 frames.], tot_loss[loss=0.136, simple_loss=0.21, pruned_loss=0.031, over 971988.00 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 19:00:06,299 INFO [train.py:715] (7/8) Epoch 13, batch 16900, loss[loss=0.1306, simple_loss=0.2136, pruned_loss=0.02379, over 4792.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.03123, over 972388.17 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 19:00:44,804 INFO [train.py:715] (7/8) Epoch 13, batch 16950, loss[loss=0.1147, simple_loss=0.1945, pruned_loss=0.01746, over 4832.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03089, over 972435.92 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:01:23,719 INFO [train.py:715] (7/8) Epoch 13, batch 17000, loss[loss=0.1896, simple_loss=0.2647, pruned_loss=0.0573, over 4923.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.0314, over 972258.96 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 19:02:02,413 INFO [train.py:715] (7/8) Epoch 13, batch 17050, loss[loss=0.1411, simple_loss=0.225, pruned_loss=0.02854, over 4983.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03165, over 971718.44 frames.], batch size: 28, lr: 1.68e-04 +2022-05-07 19:02:40,528 INFO [train.py:715] (7/8) Epoch 13, batch 17100, loss[loss=0.1637, simple_loss=0.2486, pruned_loss=0.03942, over 4969.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2115, pruned_loss=0.03172, over 972019.60 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:03:19,261 INFO [train.py:715] (7/8) Epoch 13, batch 17150, loss[loss=0.1127, simple_loss=0.1978, pruned_loss=0.01379, over 4820.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2106, pruned_loss=0.03106, over 971999.81 frames.], batch size: 25, lr: 1.68e-04 +2022-05-07 19:03:58,099 INFO [train.py:715] (7/8) Epoch 13, batch 17200, loss[loss=0.1399, simple_loss=0.2288, pruned_loss=0.02554, over 4918.00 frames.], tot_loss[loss=0.136, simple_loss=0.2101, pruned_loss=0.03092, over 971812.77 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 19:04:36,806 INFO [train.py:715] (7/8) Epoch 13, batch 17250, loss[loss=0.1563, simple_loss=0.2316, pruned_loss=0.04051, over 4780.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2097, pruned_loss=0.03102, over 972373.12 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 19:05:14,778 INFO [train.py:715] (7/8) Epoch 13, batch 17300, loss[loss=0.1224, simple_loss=0.197, pruned_loss=0.02388, over 4746.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03106, over 972829.52 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 19:05:53,540 INFO [train.py:715] (7/8) Epoch 13, batch 17350, loss[loss=0.1104, simple_loss=0.1803, pruned_loss=0.02023, over 4836.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2097, pruned_loss=0.03082, over 972347.12 frames.], batch size: 12, lr: 1.68e-04 +2022-05-07 19:06:32,451 INFO [train.py:715] (7/8) Epoch 13, batch 17400, loss[loss=0.1413, simple_loss=0.2021, pruned_loss=0.04026, over 4909.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2099, pruned_loss=0.03087, over 972259.21 frames.], batch size: 17, lr: 1.68e-04 +2022-05-07 19:07:10,067 INFO [train.py:715] (7/8) Epoch 13, batch 17450, loss[loss=0.1383, simple_loss=0.2075, pruned_loss=0.03453, over 4739.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2105, pruned_loss=0.03127, over 973209.80 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 19:07:48,568 INFO [train.py:715] (7/8) Epoch 13, batch 17500, loss[loss=0.125, simple_loss=0.1961, pruned_loss=0.02698, over 4946.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03098, over 973402.10 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 19:08:27,655 INFO [train.py:715] (7/8) Epoch 13, batch 17550, loss[loss=0.1707, simple_loss=0.2383, pruned_loss=0.0515, over 4918.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03092, over 972905.54 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 19:09:06,327 INFO [train.py:715] (7/8) Epoch 13, batch 17600, loss[loss=0.148, simple_loss=0.2175, pruned_loss=0.03925, over 4821.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03107, over 972212.64 frames.], batch size: 25, lr: 1.68e-04 +2022-05-07 19:09:43,948 INFO [train.py:715] (7/8) Epoch 13, batch 17650, loss[loss=0.1384, simple_loss=0.2103, pruned_loss=0.03318, over 4845.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03103, over 971508.49 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 19:10:23,207 INFO [train.py:715] (7/8) Epoch 13, batch 17700, loss[loss=0.1738, simple_loss=0.2374, pruned_loss=0.05509, over 4741.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03096, over 971779.36 frames.], batch size: 16, lr: 1.68e-04 +2022-05-07 19:11:02,061 INFO [train.py:715] (7/8) Epoch 13, batch 17750, loss[loss=0.1299, simple_loss=0.2058, pruned_loss=0.02705, over 4987.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2088, pruned_loss=0.03069, over 972326.93 frames.], batch size: 26, lr: 1.68e-04 +2022-05-07 19:11:39,679 INFO [train.py:715] (7/8) Epoch 13, batch 17800, loss[loss=0.1573, simple_loss=0.2416, pruned_loss=0.03653, over 4964.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03067, over 973145.39 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:12:18,453 INFO [train.py:715] (7/8) Epoch 13, batch 17850, loss[loss=0.1514, simple_loss=0.2197, pruned_loss=0.04157, over 4855.00 frames.], tot_loss[loss=0.135, simple_loss=0.2088, pruned_loss=0.03065, over 973033.30 frames.], batch size: 30, lr: 1.68e-04 +2022-05-07 19:12:57,284 INFO [train.py:715] (7/8) Epoch 13, batch 17900, loss[loss=0.1349, simple_loss=0.2047, pruned_loss=0.03258, over 4821.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.03083, over 973193.36 frames.], batch size: 25, lr: 1.68e-04 +2022-05-07 19:13:35,477 INFO [train.py:715] (7/8) Epoch 13, batch 17950, loss[loss=0.1171, simple_loss=0.1996, pruned_loss=0.0173, over 4957.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.03054, over 972334.53 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 19:14:13,537 INFO [train.py:715] (7/8) Epoch 13, batch 18000, loss[loss=0.1648, simple_loss=0.2483, pruned_loss=0.04069, over 4906.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03073, over 972103.03 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 19:14:13,538 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 19:14:23,028 INFO [train.py:742] (7/8) Epoch 13, validation: loss=0.1055, simple_loss=0.1892, pruned_loss=0.01083, over 914524.00 frames. +2022-05-07 19:15:00,696 INFO [train.py:715] (7/8) Epoch 13, batch 18050, loss[loss=0.1061, simple_loss=0.1824, pruned_loss=0.01491, over 4799.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03066, over 972552.18 frames.], batch size: 12, lr: 1.68e-04 +2022-05-07 19:15:39,772 INFO [train.py:715] (7/8) Epoch 13, batch 18100, loss[loss=0.1121, simple_loss=0.1854, pruned_loss=0.0194, over 4937.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2085, pruned_loss=0.03066, over 972483.52 frames.], batch size: 21, lr: 1.68e-04 +2022-05-07 19:16:18,124 INFO [train.py:715] (7/8) Epoch 13, batch 18150, loss[loss=0.1254, simple_loss=0.1997, pruned_loss=0.02558, over 4792.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.03045, over 973179.98 frames.], batch size: 13, lr: 1.68e-04 +2022-05-07 19:16:55,371 INFO [train.py:715] (7/8) Epoch 13, batch 18200, loss[loss=0.1356, simple_loss=0.2076, pruned_loss=0.03181, over 4922.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.0306, over 972609.50 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 19:17:33,691 INFO [train.py:715] (7/8) Epoch 13, batch 18250, loss[loss=0.1292, simple_loss=0.2034, pruned_loss=0.02751, over 4869.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03095, over 972561.45 frames.], batch size: 20, lr: 1.68e-04 +2022-05-07 19:18:12,483 INFO [train.py:715] (7/8) Epoch 13, batch 18300, loss[loss=0.1245, simple_loss=0.1979, pruned_loss=0.02551, over 4984.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2097, pruned_loss=0.03158, over 971881.27 frames.], batch size: 14, lr: 1.68e-04 +2022-05-07 19:18:51,122 INFO [train.py:715] (7/8) Epoch 13, batch 18350, loss[loss=0.1475, simple_loss=0.2284, pruned_loss=0.03329, over 4976.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2105, pruned_loss=0.03184, over 972181.84 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 19:19:29,021 INFO [train.py:715] (7/8) Epoch 13, batch 18400, loss[loss=0.1344, simple_loss=0.2077, pruned_loss=0.03053, over 4972.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2111, pruned_loss=0.03193, over 972410.45 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 19:20:07,824 INFO [train.py:715] (7/8) Epoch 13, batch 18450, loss[loss=0.1743, simple_loss=0.2397, pruned_loss=0.05443, over 4767.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.03134, over 972418.56 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 19:20:46,496 INFO [train.py:715] (7/8) Epoch 13, batch 18500, loss[loss=0.1422, simple_loss=0.2157, pruned_loss=0.03436, over 4962.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.0312, over 971401.82 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 19:21:23,937 INFO [train.py:715] (7/8) Epoch 13, batch 18550, loss[loss=0.1395, simple_loss=0.2143, pruned_loss=0.03235, over 4810.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03131, over 971860.23 frames.], batch size: 24, lr: 1.68e-04 +2022-05-07 19:22:01,959 INFO [train.py:715] (7/8) Epoch 13, batch 18600, loss[loss=0.1307, simple_loss=0.2029, pruned_loss=0.02921, over 4990.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03098, over 972272.02 frames.], batch size: 20, lr: 1.68e-04 +2022-05-07 19:22:40,556 INFO [train.py:715] (7/8) Epoch 13, batch 18650, loss[loss=0.1439, simple_loss=0.2156, pruned_loss=0.03611, over 4963.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.03088, over 973320.49 frames.], batch size: 39, lr: 1.68e-04 +2022-05-07 19:23:18,502 INFO [train.py:715] (7/8) Epoch 13, batch 18700, loss[loss=0.1262, simple_loss=0.1963, pruned_loss=0.02808, over 4909.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03069, over 973676.81 frames.], batch size: 19, lr: 1.68e-04 +2022-05-07 19:23:56,289 INFO [train.py:715] (7/8) Epoch 13, batch 18750, loss[loss=0.1798, simple_loss=0.2532, pruned_loss=0.05317, over 4929.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03123, over 973381.58 frames.], batch size: 23, lr: 1.68e-04 +2022-05-07 19:24:35,595 INFO [train.py:715] (7/8) Epoch 13, batch 18800, loss[loss=0.1058, simple_loss=0.1809, pruned_loss=0.01531, over 4779.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03089, over 973584.90 frames.], batch size: 12, lr: 1.68e-04 +2022-05-07 19:25:14,030 INFO [train.py:715] (7/8) Epoch 13, batch 18850, loss[loss=0.1336, simple_loss=0.2021, pruned_loss=0.03252, over 4794.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.03081, over 972816.74 frames.], batch size: 14, lr: 1.68e-04 +2022-05-07 19:25:52,019 INFO [train.py:715] (7/8) Epoch 13, batch 18900, loss[loss=0.1269, simple_loss=0.2034, pruned_loss=0.02517, over 4950.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03086, over 972867.09 frames.], batch size: 23, lr: 1.68e-04 +2022-05-07 19:26:30,883 INFO [train.py:715] (7/8) Epoch 13, batch 18950, loss[loss=0.1247, simple_loss=0.2036, pruned_loss=0.02284, over 4780.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03147, over 973359.19 frames.], batch size: 18, lr: 1.68e-04 +2022-05-07 19:27:09,767 INFO [train.py:715] (7/8) Epoch 13, batch 19000, loss[loss=0.1404, simple_loss=0.2129, pruned_loss=0.034, over 4851.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2111, pruned_loss=0.03161, over 972746.78 frames.], batch size: 15, lr: 1.68e-04 +2022-05-07 19:27:48,114 INFO [train.py:715] (7/8) Epoch 13, batch 19050, loss[loss=0.1527, simple_loss=0.2393, pruned_loss=0.03303, over 4792.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.03132, over 972992.94 frames.], batch size: 14, lr: 1.68e-04 +2022-05-07 19:28:26,436 INFO [train.py:715] (7/8) Epoch 13, batch 19100, loss[loss=0.1478, simple_loss=0.2209, pruned_loss=0.03733, over 4777.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2107, pruned_loss=0.03158, over 973274.44 frames.], batch size: 14, lr: 1.68e-04 +2022-05-07 19:29:05,442 INFO [train.py:715] (7/8) Epoch 13, batch 19150, loss[loss=0.1372, simple_loss=0.2176, pruned_loss=0.02843, over 4959.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03157, over 973294.27 frames.], batch size: 24, lr: 1.67e-04 +2022-05-07 19:29:44,102 INFO [train.py:715] (7/8) Epoch 13, batch 19200, loss[loss=0.09958, simple_loss=0.1678, pruned_loss=0.0157, over 4813.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2099, pruned_loss=0.03167, over 972989.76 frames.], batch size: 12, lr: 1.67e-04 +2022-05-07 19:30:21,501 INFO [train.py:715] (7/8) Epoch 13, batch 19250, loss[loss=0.1343, simple_loss=0.2183, pruned_loss=0.02511, over 4933.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03175, over 973257.62 frames.], batch size: 29, lr: 1.67e-04 +2022-05-07 19:31:00,078 INFO [train.py:715] (7/8) Epoch 13, batch 19300, loss[loss=0.1429, simple_loss=0.2302, pruned_loss=0.02785, over 4820.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03158, over 973597.21 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 19:31:39,543 INFO [train.py:715] (7/8) Epoch 13, batch 19350, loss[loss=0.138, simple_loss=0.204, pruned_loss=0.03603, over 4965.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03123, over 973581.49 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 19:32:18,081 INFO [train.py:715] (7/8) Epoch 13, batch 19400, loss[loss=0.1571, simple_loss=0.2308, pruned_loss=0.0417, over 4841.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03109, over 972948.10 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 19:32:56,516 INFO [train.py:715] (7/8) Epoch 13, batch 19450, loss[loss=0.1389, simple_loss=0.2138, pruned_loss=0.03199, over 4785.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.0306, over 972917.92 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 19:33:37,809 INFO [train.py:715] (7/8) Epoch 13, batch 19500, loss[loss=0.1216, simple_loss=0.1944, pruned_loss=0.02441, over 4836.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03055, over 971991.30 frames.], batch size: 12, lr: 1.67e-04 +2022-05-07 19:34:16,748 INFO [train.py:715] (7/8) Epoch 13, batch 19550, loss[loss=0.1395, simple_loss=0.2096, pruned_loss=0.0347, over 4739.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2082, pruned_loss=0.03061, over 970802.83 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 19:34:54,318 INFO [train.py:715] (7/8) Epoch 13, batch 19600, loss[loss=0.1401, simple_loss=0.216, pruned_loss=0.03214, over 4766.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2082, pruned_loss=0.0311, over 970542.14 frames.], batch size: 12, lr: 1.67e-04 +2022-05-07 19:35:32,450 INFO [train.py:715] (7/8) Epoch 13, batch 19650, loss[loss=0.1361, simple_loss=0.2185, pruned_loss=0.02689, over 4900.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2084, pruned_loss=0.03134, over 971348.45 frames.], batch size: 19, lr: 1.67e-04 +2022-05-07 19:36:11,255 INFO [train.py:715] (7/8) Epoch 13, batch 19700, loss[loss=0.1534, simple_loss=0.2328, pruned_loss=0.03703, over 4948.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2098, pruned_loss=0.03178, over 971418.65 frames.], batch size: 40, lr: 1.67e-04 +2022-05-07 19:36:49,084 INFO [train.py:715] (7/8) Epoch 13, batch 19750, loss[loss=0.135, simple_loss=0.21, pruned_loss=0.03, over 4878.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03173, over 971970.27 frames.], batch size: 20, lr: 1.67e-04 +2022-05-07 19:37:26,941 INFO [train.py:715] (7/8) Epoch 13, batch 19800, loss[loss=0.1308, simple_loss=0.2166, pruned_loss=0.02244, over 4822.00 frames.], tot_loss[loss=0.1362, simple_loss=0.21, pruned_loss=0.03125, over 971989.44 frames.], batch size: 27, lr: 1.67e-04 +2022-05-07 19:38:05,611 INFO [train.py:715] (7/8) Epoch 13, batch 19850, loss[loss=0.1269, simple_loss=0.2088, pruned_loss=0.02244, over 4771.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03118, over 971557.90 frames.], batch size: 18, lr: 1.67e-04 +2022-05-07 19:38:44,224 INFO [train.py:715] (7/8) Epoch 13, batch 19900, loss[loss=0.1454, simple_loss=0.2197, pruned_loss=0.03555, over 4810.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.0309, over 971659.37 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 19:39:22,425 INFO [train.py:715] (7/8) Epoch 13, batch 19950, loss[loss=0.1392, simple_loss=0.2162, pruned_loss=0.03104, over 4938.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03094, over 972352.48 frames.], batch size: 29, lr: 1.67e-04 +2022-05-07 19:40:01,313 INFO [train.py:715] (7/8) Epoch 13, batch 20000, loss[loss=0.1529, simple_loss=0.229, pruned_loss=0.03839, over 4785.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2106, pruned_loss=0.03139, over 972283.85 frames.], batch size: 18, lr: 1.67e-04 +2022-05-07 19:40:39,756 INFO [train.py:715] (7/8) Epoch 13, batch 20050, loss[loss=0.154, simple_loss=0.209, pruned_loss=0.04953, over 4777.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03186, over 972396.05 frames.], batch size: 12, lr: 1.67e-04 +2022-05-07 19:41:16,930 INFO [train.py:715] (7/8) Epoch 13, batch 20100, loss[loss=0.1208, simple_loss=0.1933, pruned_loss=0.02413, over 4913.00 frames.], tot_loss[loss=0.1374, simple_loss=0.211, pruned_loss=0.03187, over 972854.80 frames.], batch size: 19, lr: 1.67e-04 +2022-05-07 19:41:54,379 INFO [train.py:715] (7/8) Epoch 13, batch 20150, loss[loss=0.141, simple_loss=0.2088, pruned_loss=0.03666, over 4915.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03183, over 973001.04 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 19:42:33,107 INFO [train.py:715] (7/8) Epoch 13, batch 20200, loss[loss=0.1349, simple_loss=0.2079, pruned_loss=0.031, over 4855.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03144, over 972584.84 frames.], batch size: 32, lr: 1.67e-04 +2022-05-07 19:43:11,187 INFO [train.py:715] (7/8) Epoch 13, batch 20250, loss[loss=0.1413, simple_loss=0.2141, pruned_loss=0.03423, over 4812.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.03124, over 972823.30 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 19:43:48,891 INFO [train.py:715] (7/8) Epoch 13, batch 20300, loss[loss=0.1351, simple_loss=0.2067, pruned_loss=0.03172, over 4982.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03122, over 973122.89 frames.], batch size: 31, lr: 1.67e-04 +2022-05-07 19:44:26,993 INFO [train.py:715] (7/8) Epoch 13, batch 20350, loss[loss=0.1311, simple_loss=0.1951, pruned_loss=0.03352, over 4848.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03131, over 972862.97 frames.], batch size: 30, lr: 1.67e-04 +2022-05-07 19:45:05,761 INFO [train.py:715] (7/8) Epoch 13, batch 20400, loss[loss=0.1446, simple_loss=0.2121, pruned_loss=0.03856, over 4967.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03171, over 972251.99 frames.], batch size: 39, lr: 1.67e-04 +2022-05-07 19:45:43,493 INFO [train.py:715] (7/8) Epoch 13, batch 20450, loss[loss=0.1437, simple_loss=0.2076, pruned_loss=0.03984, over 4906.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2102, pruned_loss=0.03157, over 972836.82 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 19:46:21,266 INFO [train.py:715] (7/8) Epoch 13, batch 20500, loss[loss=0.1324, simple_loss=0.2159, pruned_loss=0.02441, over 4889.00 frames.], tot_loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.03146, over 972270.90 frames.], batch size: 19, lr: 1.67e-04 +2022-05-07 19:46:59,824 INFO [train.py:715] (7/8) Epoch 13, batch 20550, loss[loss=0.1306, simple_loss=0.2126, pruned_loss=0.02427, over 4769.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2111, pruned_loss=0.03165, over 972154.67 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 19:47:37,479 INFO [train.py:715] (7/8) Epoch 13, batch 20600, loss[loss=0.1525, simple_loss=0.2112, pruned_loss=0.04695, over 4816.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2115, pruned_loss=0.03171, over 972535.03 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 19:48:15,107 INFO [train.py:715] (7/8) Epoch 13, batch 20650, loss[loss=0.1583, simple_loss=0.2146, pruned_loss=0.05096, over 4808.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03186, over 973309.33 frames.], batch size: 25, lr: 1.67e-04 +2022-05-07 19:48:52,913 INFO [train.py:715] (7/8) Epoch 13, batch 20700, loss[loss=0.1173, simple_loss=0.1947, pruned_loss=0.01994, over 4813.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.0312, over 973120.59 frames.], batch size: 26, lr: 1.67e-04 +2022-05-07 19:49:31,351 INFO [train.py:715] (7/8) Epoch 13, batch 20750, loss[loss=0.1789, simple_loss=0.2507, pruned_loss=0.0535, over 4903.00 frames.], tot_loss[loss=0.136, simple_loss=0.2101, pruned_loss=0.03095, over 972479.72 frames.], batch size: 19, lr: 1.67e-04 +2022-05-07 19:50:08,696 INFO [train.py:715] (7/8) Epoch 13, batch 20800, loss[loss=0.1333, simple_loss=0.2115, pruned_loss=0.02758, over 4824.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2097, pruned_loss=0.03072, over 971865.63 frames.], batch size: 27, lr: 1.67e-04 +2022-05-07 19:50:46,284 INFO [train.py:715] (7/8) Epoch 13, batch 20850, loss[loss=0.1461, simple_loss=0.2213, pruned_loss=0.03552, over 4804.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03117, over 971541.27 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 19:51:24,967 INFO [train.py:715] (7/8) Epoch 13, batch 20900, loss[loss=0.1341, simple_loss=0.2033, pruned_loss=0.03245, over 4821.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03175, over 972068.15 frames.], batch size: 25, lr: 1.67e-04 +2022-05-07 19:52:03,243 INFO [train.py:715] (7/8) Epoch 13, batch 20950, loss[loss=0.1412, simple_loss=0.2173, pruned_loss=0.03253, over 4809.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03114, over 971432.63 frames.], batch size: 26, lr: 1.67e-04 +2022-05-07 19:52:40,747 INFO [train.py:715] (7/8) Epoch 13, batch 21000, loss[loss=0.1297, simple_loss=0.203, pruned_loss=0.02817, over 4775.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03155, over 972337.98 frames.], batch size: 12, lr: 1.67e-04 +2022-05-07 19:52:40,747 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 19:52:50,264 INFO [train.py:742] (7/8) Epoch 13, validation: loss=0.1054, simple_loss=0.1891, pruned_loss=0.01084, over 914524.00 frames. +2022-05-07 19:53:28,433 INFO [train.py:715] (7/8) Epoch 13, batch 21050, loss[loss=0.1111, simple_loss=0.183, pruned_loss=0.01962, over 4908.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03145, over 973288.80 frames.], batch size: 18, lr: 1.67e-04 +2022-05-07 19:54:06,967 INFO [train.py:715] (7/8) Epoch 13, batch 21100, loss[loss=0.1501, simple_loss=0.2243, pruned_loss=0.03798, over 4879.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03132, over 973204.06 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 19:54:46,057 INFO [train.py:715] (7/8) Epoch 13, batch 21150, loss[loss=0.1631, simple_loss=0.2341, pruned_loss=0.04604, over 4872.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2108, pruned_loss=0.03199, over 973332.06 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 19:55:23,879 INFO [train.py:715] (7/8) Epoch 13, batch 21200, loss[loss=0.1159, simple_loss=0.1866, pruned_loss=0.02259, over 4755.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2112, pruned_loss=0.03189, over 972815.00 frames.], batch size: 19, lr: 1.67e-04 +2022-05-07 19:56:02,465 INFO [train.py:715] (7/8) Epoch 13, batch 21250, loss[loss=0.1365, simple_loss=0.207, pruned_loss=0.03303, over 4702.00 frames.], tot_loss[loss=0.1378, simple_loss=0.211, pruned_loss=0.03227, over 972106.13 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 19:56:41,276 INFO [train.py:715] (7/8) Epoch 13, batch 21300, loss[loss=0.1581, simple_loss=0.2213, pruned_loss=0.04743, over 4879.00 frames.], tot_loss[loss=0.1378, simple_loss=0.211, pruned_loss=0.03231, over 972691.75 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 19:57:19,154 INFO [train.py:715] (7/8) Epoch 13, batch 21350, loss[loss=0.1887, simple_loss=0.2566, pruned_loss=0.06042, over 4843.00 frames.], tot_loss[loss=0.1376, simple_loss=0.211, pruned_loss=0.03213, over 972971.37 frames.], batch size: 20, lr: 1.67e-04 +2022-05-07 19:57:57,082 INFO [train.py:715] (7/8) Epoch 13, batch 21400, loss[loss=0.126, simple_loss=0.205, pruned_loss=0.02344, over 4868.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03151, over 972779.87 frames.], batch size: 20, lr: 1.67e-04 +2022-05-07 19:58:35,345 INFO [train.py:715] (7/8) Epoch 13, batch 21450, loss[loss=0.1247, simple_loss=0.2035, pruned_loss=0.02298, over 4908.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.03125, over 972313.15 frames.], batch size: 19, lr: 1.67e-04 +2022-05-07 19:59:14,499 INFO [train.py:715] (7/8) Epoch 13, batch 21500, loss[loss=0.1144, simple_loss=0.1757, pruned_loss=0.02654, over 4867.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2087, pruned_loss=0.03114, over 972462.26 frames.], batch size: 20, lr: 1.67e-04 +2022-05-07 19:59:52,262 INFO [train.py:715] (7/8) Epoch 13, batch 21550, loss[loss=0.1339, simple_loss=0.2101, pruned_loss=0.02886, over 4967.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03079, over 972015.37 frames.], batch size: 24, lr: 1.67e-04 +2022-05-07 20:00:30,897 INFO [train.py:715] (7/8) Epoch 13, batch 21600, loss[loss=0.1315, simple_loss=0.1981, pruned_loss=0.03248, over 4871.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03026, over 972506.75 frames.], batch size: 22, lr: 1.67e-04 +2022-05-07 20:01:09,863 INFO [train.py:715] (7/8) Epoch 13, batch 21650, loss[loss=0.1429, simple_loss=0.2135, pruned_loss=0.03619, over 4941.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2073, pruned_loss=0.03007, over 972480.46 frames.], batch size: 29, lr: 1.67e-04 +2022-05-07 20:01:48,623 INFO [train.py:715] (7/8) Epoch 13, batch 21700, loss[loss=0.1356, simple_loss=0.2077, pruned_loss=0.03177, over 4686.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2083, pruned_loss=0.03065, over 972798.72 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:02:27,467 INFO [train.py:715] (7/8) Epoch 13, batch 21750, loss[loss=0.1265, simple_loss=0.2077, pruned_loss=0.02263, over 4813.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2083, pruned_loss=0.03057, over 973083.77 frames.], batch size: 25, lr: 1.67e-04 +2022-05-07 20:03:06,114 INFO [train.py:715] (7/8) Epoch 13, batch 21800, loss[loss=0.1566, simple_loss=0.2326, pruned_loss=0.04034, over 4742.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03056, over 972739.21 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 20:03:45,412 INFO [train.py:715] (7/8) Epoch 13, batch 21850, loss[loss=0.1535, simple_loss=0.2248, pruned_loss=0.04104, over 4885.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03079, over 972970.64 frames.], batch size: 39, lr: 1.67e-04 +2022-05-07 20:04:23,521 INFO [train.py:715] (7/8) Epoch 13, batch 21900, loss[loss=0.1398, simple_loss=0.2103, pruned_loss=0.03461, over 4966.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03083, over 973300.49 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:05:01,702 INFO [train.py:715] (7/8) Epoch 13, batch 21950, loss[loss=0.1253, simple_loss=0.2018, pruned_loss=0.02444, over 4899.00 frames.], tot_loss[loss=0.1358, simple_loss=0.21, pruned_loss=0.03082, over 973061.21 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 20:05:40,175 INFO [train.py:715] (7/8) Epoch 13, batch 22000, loss[loss=0.1514, simple_loss=0.2172, pruned_loss=0.0428, over 4840.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2092, pruned_loss=0.03026, over 972932.71 frames.], batch size: 32, lr: 1.67e-04 +2022-05-07 20:06:17,893 INFO [train.py:715] (7/8) Epoch 13, batch 22050, loss[loss=0.138, simple_loss=0.2159, pruned_loss=0.03007, over 4786.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2092, pruned_loss=0.03018, over 972832.95 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 20:06:55,943 INFO [train.py:715] (7/8) Epoch 13, batch 22100, loss[loss=0.1596, simple_loss=0.223, pruned_loss=0.04807, over 4825.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2099, pruned_loss=0.03081, over 972591.87 frames.], batch size: 30, lr: 1.67e-04 +2022-05-07 20:07:33,693 INFO [train.py:715] (7/8) Epoch 13, batch 22150, loss[loss=0.1161, simple_loss=0.1943, pruned_loss=0.01893, over 4968.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03047, over 972594.12 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:08:12,647 INFO [train.py:715] (7/8) Epoch 13, batch 22200, loss[loss=0.12, simple_loss=0.1941, pruned_loss=0.02298, over 4957.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.0307, over 974331.51 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:08:50,193 INFO [train.py:715] (7/8) Epoch 13, batch 22250, loss[loss=0.1305, simple_loss=0.2068, pruned_loss=0.02712, over 4829.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03018, over 973959.79 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:09:28,954 INFO [train.py:715] (7/8) Epoch 13, batch 22300, loss[loss=0.1423, simple_loss=0.2159, pruned_loss=0.0343, over 4836.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2096, pruned_loss=0.03072, over 973674.66 frames.], batch size: 12, lr: 1.67e-04 +2022-05-07 20:10:07,702 INFO [train.py:715] (7/8) Epoch 13, batch 22350, loss[loss=0.1513, simple_loss=0.2161, pruned_loss=0.04323, over 4959.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03043, over 973615.32 frames.], batch size: 35, lr: 1.67e-04 +2022-05-07 20:10:45,728 INFO [train.py:715] (7/8) Epoch 13, batch 22400, loss[loss=0.1022, simple_loss=0.1714, pruned_loss=0.01647, over 4880.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.03075, over 972648.66 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 20:11:23,411 INFO [train.py:715] (7/8) Epoch 13, batch 22450, loss[loss=0.126, simple_loss=0.2045, pruned_loss=0.02378, over 4917.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03063, over 972964.79 frames.], batch size: 18, lr: 1.67e-04 +2022-05-07 20:12:01,254 INFO [train.py:715] (7/8) Epoch 13, batch 22500, loss[loss=0.1282, simple_loss=0.2014, pruned_loss=0.02747, over 4761.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03065, over 973048.75 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 20:12:39,610 INFO [train.py:715] (7/8) Epoch 13, batch 22550, loss[loss=0.1354, simple_loss=0.2131, pruned_loss=0.02887, over 4755.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03083, over 973126.31 frames.], batch size: 19, lr: 1.67e-04 +2022-05-07 20:13:16,802 INFO [train.py:715] (7/8) Epoch 13, batch 22600, loss[loss=0.1393, simple_loss=0.2184, pruned_loss=0.03007, over 4777.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2096, pruned_loss=0.03156, over 973271.40 frames.], batch size: 18, lr: 1.67e-04 +2022-05-07 20:13:54,710 INFO [train.py:715] (7/8) Epoch 13, batch 22650, loss[loss=0.1327, simple_loss=0.2171, pruned_loss=0.02409, over 4822.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2098, pruned_loss=0.03175, over 973760.14 frames.], batch size: 26, lr: 1.67e-04 +2022-05-07 20:14:32,802 INFO [train.py:715] (7/8) Epoch 13, batch 22700, loss[loss=0.1349, simple_loss=0.2062, pruned_loss=0.03181, over 4965.00 frames.], tot_loss[loss=0.137, simple_loss=0.2102, pruned_loss=0.03196, over 973575.10 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:15:11,035 INFO [train.py:715] (7/8) Epoch 13, batch 22750, loss[loss=0.138, simple_loss=0.2118, pruned_loss=0.03209, over 4824.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03174, over 974245.76 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:15:49,013 INFO [train.py:715] (7/8) Epoch 13, batch 22800, loss[loss=0.1081, simple_loss=0.1918, pruned_loss=0.01225, over 4807.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.03145, over 973939.63 frames.], batch size: 12, lr: 1.67e-04 +2022-05-07 20:16:27,580 INFO [train.py:715] (7/8) Epoch 13, batch 22850, loss[loss=0.1488, simple_loss=0.2203, pruned_loss=0.0387, over 4846.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03165, over 973143.45 frames.], batch size: 32, lr: 1.67e-04 +2022-05-07 20:17:06,843 INFO [train.py:715] (7/8) Epoch 13, batch 22900, loss[loss=0.1444, simple_loss=0.2104, pruned_loss=0.03918, over 4875.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03129, over 972392.52 frames.], batch size: 20, lr: 1.67e-04 +2022-05-07 20:17:44,514 INFO [train.py:715] (7/8) Epoch 13, batch 22950, loss[loss=0.1551, simple_loss=0.2216, pruned_loss=0.04434, over 4841.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03145, over 972904.17 frames.], batch size: 13, lr: 1.67e-04 +2022-05-07 20:18:23,094 INFO [train.py:715] (7/8) Epoch 13, batch 23000, loss[loss=0.1301, simple_loss=0.206, pruned_loss=0.02712, over 4784.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03152, over 972790.85 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 20:19:01,742 INFO [train.py:715] (7/8) Epoch 13, batch 23050, loss[loss=0.1408, simple_loss=0.2154, pruned_loss=0.0331, over 4881.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.0315, over 972741.75 frames.], batch size: 22, lr: 1.67e-04 +2022-05-07 20:19:40,069 INFO [train.py:715] (7/8) Epoch 13, batch 23100, loss[loss=0.1353, simple_loss=0.2042, pruned_loss=0.03319, over 4826.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03163, over 973013.32 frames.], batch size: 26, lr: 1.67e-04 +2022-05-07 20:20:17,986 INFO [train.py:715] (7/8) Epoch 13, batch 23150, loss[loss=0.1625, simple_loss=0.2328, pruned_loss=0.04606, over 4806.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03173, over 971950.41 frames.], batch size: 26, lr: 1.67e-04 +2022-05-07 20:20:56,163 INFO [train.py:715] (7/8) Epoch 13, batch 23200, loss[loss=0.1199, simple_loss=0.1887, pruned_loss=0.02549, over 4883.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2097, pruned_loss=0.0318, over 972398.77 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 20:21:34,316 INFO [train.py:715] (7/8) Epoch 13, batch 23250, loss[loss=0.1261, simple_loss=0.2032, pruned_loss=0.02451, over 4828.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2091, pruned_loss=0.03154, over 971752.12 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:22:11,785 INFO [train.py:715] (7/8) Epoch 13, batch 23300, loss[loss=0.1214, simple_loss=0.1934, pruned_loss=0.02464, over 4928.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2087, pruned_loss=0.03127, over 971839.43 frames.], batch size: 23, lr: 1.67e-04 +2022-05-07 20:22:50,105 INFO [train.py:715] (7/8) Epoch 13, batch 23350, loss[loss=0.1332, simple_loss=0.2055, pruned_loss=0.03044, over 4801.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2085, pruned_loss=0.03123, over 971994.71 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:23:28,678 INFO [train.py:715] (7/8) Epoch 13, batch 23400, loss[loss=0.1322, simple_loss=0.2006, pruned_loss=0.03187, over 4910.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2083, pruned_loss=0.03092, over 972363.28 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 20:24:06,994 INFO [train.py:715] (7/8) Epoch 13, batch 23450, loss[loss=0.1254, simple_loss=0.2057, pruned_loss=0.02256, over 4933.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2083, pruned_loss=0.03074, over 971756.09 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:24:45,015 INFO [train.py:715] (7/8) Epoch 13, batch 23500, loss[loss=0.1347, simple_loss=0.2074, pruned_loss=0.03097, over 4803.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03083, over 972159.28 frames.], batch size: 24, lr: 1.67e-04 +2022-05-07 20:25:23,768 INFO [train.py:715] (7/8) Epoch 13, batch 23550, loss[loss=0.1313, simple_loss=0.2064, pruned_loss=0.02807, over 4875.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03102, over 972168.57 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 20:26:02,267 INFO [train.py:715] (7/8) Epoch 13, batch 23600, loss[loss=0.1587, simple_loss=0.2388, pruned_loss=0.03925, over 4766.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03112, over 972713.55 frames.], batch size: 16, lr: 1.67e-04 +2022-05-07 20:26:39,839 INFO [train.py:715] (7/8) Epoch 13, batch 23650, loss[loss=0.1541, simple_loss=0.2228, pruned_loss=0.0427, over 4689.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.0311, over 972404.94 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:27:18,104 INFO [train.py:715] (7/8) Epoch 13, batch 23700, loss[loss=0.1791, simple_loss=0.2598, pruned_loss=0.04914, over 4955.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2088, pruned_loss=0.031, over 972288.99 frames.], batch size: 39, lr: 1.67e-04 +2022-05-07 20:27:56,588 INFO [train.py:715] (7/8) Epoch 13, batch 23750, loss[loss=0.1312, simple_loss=0.2081, pruned_loss=0.02718, over 4976.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2083, pruned_loss=0.03064, over 972801.95 frames.], batch size: 28, lr: 1.67e-04 +2022-05-07 20:28:34,760 INFO [train.py:715] (7/8) Epoch 13, batch 23800, loss[loss=0.1108, simple_loss=0.1819, pruned_loss=0.01984, over 4964.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03079, over 973005.64 frames.], batch size: 24, lr: 1.67e-04 +2022-05-07 20:29:12,134 INFO [train.py:715] (7/8) Epoch 13, batch 23850, loss[loss=0.1429, simple_loss=0.2286, pruned_loss=0.02859, over 4984.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2085, pruned_loss=0.03063, over 972173.28 frames.], batch size: 28, lr: 1.67e-04 +2022-05-07 20:29:51,250 INFO [train.py:715] (7/8) Epoch 13, batch 23900, loss[loss=0.1489, simple_loss=0.2282, pruned_loss=0.03486, over 4936.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03106, over 972233.75 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:30:29,199 INFO [train.py:715] (7/8) Epoch 13, batch 23950, loss[loss=0.1424, simple_loss=0.2135, pruned_loss=0.03569, over 4805.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2098, pruned_loss=0.03135, over 971986.21 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:31:06,577 INFO [train.py:715] (7/8) Epoch 13, batch 24000, loss[loss=0.1509, simple_loss=0.2125, pruned_loss=0.04469, over 4778.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03153, over 972613.51 frames.], batch size: 17, lr: 1.67e-04 +2022-05-07 20:31:06,577 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 20:31:16,110 INFO [train.py:742] (7/8) Epoch 13, validation: loss=0.1053, simple_loss=0.1891, pruned_loss=0.01069, over 914524.00 frames. +2022-05-07 20:31:53,723 INFO [train.py:715] (7/8) Epoch 13, batch 24050, loss[loss=0.13, simple_loss=0.2155, pruned_loss=0.02225, over 4797.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03146, over 972694.23 frames.], batch size: 21, lr: 1.67e-04 +2022-05-07 20:32:31,541 INFO [train.py:715] (7/8) Epoch 13, batch 24100, loss[loss=0.1424, simple_loss=0.2372, pruned_loss=0.0238, over 4888.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03143, over 973071.40 frames.], batch size: 22, lr: 1.67e-04 +2022-05-07 20:33:10,918 INFO [train.py:715] (7/8) Epoch 13, batch 24150, loss[loss=0.1403, simple_loss=0.2115, pruned_loss=0.0346, over 4902.00 frames.], tot_loss[loss=0.136, simple_loss=0.2099, pruned_loss=0.03104, over 973394.35 frames.], batch size: 23, lr: 1.67e-04 +2022-05-07 20:33:49,885 INFO [train.py:715] (7/8) Epoch 13, batch 24200, loss[loss=0.1125, simple_loss=0.1868, pruned_loss=0.01908, over 4933.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03104, over 973417.24 frames.], batch size: 29, lr: 1.67e-04 +2022-05-07 20:34:28,087 INFO [train.py:715] (7/8) Epoch 13, batch 24250, loss[loss=0.1359, simple_loss=0.2171, pruned_loss=0.02736, over 4824.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03118, over 972734.44 frames.], batch size: 27, lr: 1.67e-04 +2022-05-07 20:35:06,951 INFO [train.py:715] (7/8) Epoch 13, batch 24300, loss[loss=0.1198, simple_loss=0.1973, pruned_loss=0.02119, over 4837.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2097, pruned_loss=0.03126, over 972968.08 frames.], batch size: 26, lr: 1.67e-04 +2022-05-07 20:35:45,649 INFO [train.py:715] (7/8) Epoch 13, batch 24350, loss[loss=0.1325, simple_loss=0.2089, pruned_loss=0.02805, over 4895.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2094, pruned_loss=0.03134, over 973717.02 frames.], batch size: 32, lr: 1.67e-04 +2022-05-07 20:36:23,175 INFO [train.py:715] (7/8) Epoch 13, batch 24400, loss[loss=0.135, simple_loss=0.2001, pruned_loss=0.03495, over 4951.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03154, over 973969.38 frames.], batch size: 35, lr: 1.67e-04 +2022-05-07 20:37:01,583 INFO [train.py:715] (7/8) Epoch 13, batch 24450, loss[loss=0.1201, simple_loss=0.1896, pruned_loss=0.02531, over 4697.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2091, pruned_loss=0.0313, over 972922.21 frames.], batch size: 15, lr: 1.67e-04 +2022-05-07 20:37:40,236 INFO [train.py:715] (7/8) Epoch 13, batch 24500, loss[loss=0.1261, simple_loss=0.2036, pruned_loss=0.0243, over 4941.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.0311, over 972593.63 frames.], batch size: 29, lr: 1.67e-04 +2022-05-07 20:38:18,535 INFO [train.py:715] (7/8) Epoch 13, batch 24550, loss[loss=0.125, simple_loss=0.1994, pruned_loss=0.02531, over 4983.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.03101, over 972871.57 frames.], batch size: 27, lr: 1.67e-04 +2022-05-07 20:38:56,898 INFO [train.py:715] (7/8) Epoch 13, batch 24600, loss[loss=0.1121, simple_loss=0.191, pruned_loss=0.01657, over 4951.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2086, pruned_loss=0.03101, over 972602.44 frames.], batch size: 23, lr: 1.67e-04 +2022-05-07 20:39:36,089 INFO [train.py:715] (7/8) Epoch 13, batch 24650, loss[loss=0.1255, simple_loss=0.2009, pruned_loss=0.02502, over 4871.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03102, over 972504.12 frames.], batch size: 20, lr: 1.67e-04 +2022-05-07 20:40:14,987 INFO [train.py:715] (7/8) Epoch 13, batch 24700, loss[loss=0.1286, simple_loss=0.2016, pruned_loss=0.02777, over 4797.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03172, over 972074.60 frames.], batch size: 14, lr: 1.67e-04 +2022-05-07 20:40:52,896 INFO [train.py:715] (7/8) Epoch 13, batch 24750, loss[loss=0.1304, simple_loss=0.2161, pruned_loss=0.02234, over 4818.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2097, pruned_loss=0.03167, over 971746.35 frames.], batch size: 25, lr: 1.67e-04 +2022-05-07 20:41:31,284 INFO [train.py:715] (7/8) Epoch 13, batch 24800, loss[loss=0.1593, simple_loss=0.2328, pruned_loss=0.04294, over 4988.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2104, pruned_loss=0.03189, over 972698.50 frames.], batch size: 31, lr: 1.67e-04 +2022-05-07 20:42:10,092 INFO [train.py:715] (7/8) Epoch 13, batch 24850, loss[loss=0.1265, simple_loss=0.2084, pruned_loss=0.02237, over 4919.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03184, over 973236.73 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 20:42:48,217 INFO [train.py:715] (7/8) Epoch 13, batch 24900, loss[loss=0.1474, simple_loss=0.2202, pruned_loss=0.03732, over 4962.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03099, over 973672.14 frames.], batch size: 35, lr: 1.66e-04 +2022-05-07 20:43:26,335 INFO [train.py:715] (7/8) Epoch 13, batch 24950, loss[loss=0.1307, simple_loss=0.2024, pruned_loss=0.02952, over 4910.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.03123, over 973865.33 frames.], batch size: 18, lr: 1.66e-04 +2022-05-07 20:44:04,943 INFO [train.py:715] (7/8) Epoch 13, batch 25000, loss[loss=0.1035, simple_loss=0.1759, pruned_loss=0.01562, over 4815.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03135, over 973903.51 frames.], batch size: 26, lr: 1.66e-04 +2022-05-07 20:44:43,238 INFO [train.py:715] (7/8) Epoch 13, batch 25050, loss[loss=0.1497, simple_loss=0.23, pruned_loss=0.03467, over 4771.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2102, pruned_loss=0.0311, over 973021.48 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 20:45:20,925 INFO [train.py:715] (7/8) Epoch 13, batch 25100, loss[loss=0.1213, simple_loss=0.2032, pruned_loss=0.01975, over 4895.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2102, pruned_loss=0.03128, over 973446.07 frames.], batch size: 22, lr: 1.66e-04 +2022-05-07 20:46:00,039 INFO [train.py:715] (7/8) Epoch 13, batch 25150, loss[loss=0.1329, simple_loss=0.2039, pruned_loss=0.03098, over 4815.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.03127, over 972055.04 frames.], batch size: 25, lr: 1.66e-04 +2022-05-07 20:46:38,587 INFO [train.py:715] (7/8) Epoch 13, batch 25200, loss[loss=0.1429, simple_loss=0.2106, pruned_loss=0.0376, over 4818.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.03123, over 971298.65 frames.], batch size: 13, lr: 1.66e-04 +2022-05-07 20:47:17,719 INFO [train.py:715] (7/8) Epoch 13, batch 25250, loss[loss=0.1352, simple_loss=0.2127, pruned_loss=0.02885, over 4826.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03136, over 971493.52 frames.], batch size: 27, lr: 1.66e-04 +2022-05-07 20:47:55,928 INFO [train.py:715] (7/8) Epoch 13, batch 25300, loss[loss=0.13, simple_loss=0.2085, pruned_loss=0.02578, over 4896.00 frames.], tot_loss[loss=0.1371, simple_loss=0.211, pruned_loss=0.03161, over 971033.83 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 20:48:34,492 INFO [train.py:715] (7/8) Epoch 13, batch 25350, loss[loss=0.1417, simple_loss=0.2087, pruned_loss=0.03734, over 4974.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2114, pruned_loss=0.03195, over 972117.39 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 20:49:13,702 INFO [train.py:715] (7/8) Epoch 13, batch 25400, loss[loss=0.1269, simple_loss=0.193, pruned_loss=0.03036, over 4768.00 frames.], tot_loss[loss=0.1373, simple_loss=0.211, pruned_loss=0.03174, over 971100.35 frames.], batch size: 18, lr: 1.66e-04 +2022-05-07 20:49:51,565 INFO [train.py:715] (7/8) Epoch 13, batch 25450, loss[loss=0.1156, simple_loss=0.1882, pruned_loss=0.02148, over 4850.00 frames.], tot_loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.03155, over 971273.14 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 20:50:30,629 INFO [train.py:715] (7/8) Epoch 13, batch 25500, loss[loss=0.1319, simple_loss=0.2177, pruned_loss=0.02304, over 4798.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2109, pruned_loss=0.03162, over 971411.39 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 20:51:09,202 INFO [train.py:715] (7/8) Epoch 13, batch 25550, loss[loss=0.1564, simple_loss=0.2275, pruned_loss=0.04263, over 4885.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2111, pruned_loss=0.0317, over 971859.99 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 20:51:47,746 INFO [train.py:715] (7/8) Epoch 13, batch 25600, loss[loss=0.1157, simple_loss=0.1848, pruned_loss=0.0233, over 4950.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2111, pruned_loss=0.03202, over 972020.74 frames.], batch size: 24, lr: 1.66e-04 +2022-05-07 20:52:25,798 INFO [train.py:715] (7/8) Epoch 13, batch 25650, loss[loss=0.1488, simple_loss=0.2289, pruned_loss=0.03436, over 4884.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03151, over 971551.11 frames.], batch size: 22, lr: 1.66e-04 +2022-05-07 20:53:05,134 INFO [train.py:715] (7/8) Epoch 13, batch 25700, loss[loss=0.1385, simple_loss=0.2075, pruned_loss=0.03475, over 4797.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.0312, over 971771.06 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 20:53:43,487 INFO [train.py:715] (7/8) Epoch 13, batch 25750, loss[loss=0.1316, simple_loss=0.2104, pruned_loss=0.02642, over 4749.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2109, pruned_loss=0.03151, over 971530.36 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 20:54:21,672 INFO [train.py:715] (7/8) Epoch 13, batch 25800, loss[loss=0.152, simple_loss=0.2205, pruned_loss=0.04172, over 4975.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03181, over 971524.76 frames.], batch size: 35, lr: 1.66e-04 +2022-05-07 20:55:00,566 INFO [train.py:715] (7/8) Epoch 13, batch 25850, loss[loss=0.1803, simple_loss=0.2553, pruned_loss=0.05266, over 4865.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.0314, over 971283.07 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 20:55:39,358 INFO [train.py:715] (7/8) Epoch 13, batch 25900, loss[loss=0.1497, simple_loss=0.2135, pruned_loss=0.04289, over 4861.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2106, pruned_loss=0.03147, over 971095.54 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 20:56:18,179 INFO [train.py:715] (7/8) Epoch 13, batch 25950, loss[loss=0.1391, simple_loss=0.2186, pruned_loss=0.02981, over 4974.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2111, pruned_loss=0.03166, over 970861.16 frames.], batch size: 39, lr: 1.66e-04 +2022-05-07 20:56:57,182 INFO [train.py:715] (7/8) Epoch 13, batch 26000, loss[loss=0.1471, simple_loss=0.225, pruned_loss=0.03461, over 4942.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03183, over 971708.64 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 20:57:36,543 INFO [train.py:715] (7/8) Epoch 13, batch 26050, loss[loss=0.1448, simple_loss=0.2172, pruned_loss=0.03618, over 4709.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03142, over 971882.55 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 20:58:15,739 INFO [train.py:715] (7/8) Epoch 13, batch 26100, loss[loss=0.1339, simple_loss=0.2084, pruned_loss=0.02975, over 4875.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2104, pruned_loss=0.03175, over 971537.11 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 20:58:54,123 INFO [train.py:715] (7/8) Epoch 13, batch 26150, loss[loss=0.1218, simple_loss=0.1924, pruned_loss=0.02559, over 4726.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2101, pruned_loss=0.03147, over 970831.12 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 20:59:33,348 INFO [train.py:715] (7/8) Epoch 13, batch 26200, loss[loss=0.1424, simple_loss=0.2089, pruned_loss=0.03799, over 4844.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03126, over 970502.13 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:00:12,168 INFO [train.py:715] (7/8) Epoch 13, batch 26250, loss[loss=0.1231, simple_loss=0.1923, pruned_loss=0.02697, over 4982.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2087, pruned_loss=0.03121, over 971409.22 frames.], batch size: 35, lr: 1.66e-04 +2022-05-07 21:00:50,344 INFO [train.py:715] (7/8) Epoch 13, batch 26300, loss[loss=0.2076, simple_loss=0.2613, pruned_loss=0.07691, over 4830.00 frames.], tot_loss[loss=0.137, simple_loss=0.2101, pruned_loss=0.03201, over 971051.91 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:01:28,297 INFO [train.py:715] (7/8) Epoch 13, batch 26350, loss[loss=0.1734, simple_loss=0.2423, pruned_loss=0.05227, over 4726.00 frames.], tot_loss[loss=0.1377, simple_loss=0.211, pruned_loss=0.03223, over 971206.71 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:02:07,164 INFO [train.py:715] (7/8) Epoch 13, batch 26400, loss[loss=0.1042, simple_loss=0.1759, pruned_loss=0.0162, over 4978.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03159, over 971766.20 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 21:02:46,105 INFO [train.py:715] (7/8) Epoch 13, batch 26450, loss[loss=0.128, simple_loss=0.2063, pruned_loss=0.02486, over 4850.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.03153, over 971960.80 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:03:24,276 INFO [train.py:715] (7/8) Epoch 13, batch 26500, loss[loss=0.1134, simple_loss=0.1889, pruned_loss=0.01889, over 4822.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.03125, over 971963.38 frames.], batch size: 12, lr: 1.66e-04 +2022-05-07 21:04:03,397 INFO [train.py:715] (7/8) Epoch 13, batch 26550, loss[loss=0.1488, simple_loss=0.2179, pruned_loss=0.03982, over 4937.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03086, over 971314.43 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:04:41,839 INFO [train.py:715] (7/8) Epoch 13, batch 26600, loss[loss=0.1129, simple_loss=0.1835, pruned_loss=0.02112, over 4777.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03102, over 971040.85 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 21:05:20,065 INFO [train.py:715] (7/8) Epoch 13, batch 26650, loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03116, over 4920.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03067, over 970190.99 frames.], batch size: 18, lr: 1.66e-04 +2022-05-07 21:05:58,315 INFO [train.py:715] (7/8) Epoch 13, batch 26700, loss[loss=0.1101, simple_loss=0.1838, pruned_loss=0.01816, over 4941.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03077, over 970629.03 frames.], batch size: 29, lr: 1.66e-04 +2022-05-07 21:06:37,482 INFO [train.py:715] (7/8) Epoch 13, batch 26750, loss[loss=0.1449, simple_loss=0.2183, pruned_loss=0.03573, over 4909.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03092, over 971558.79 frames.], batch size: 39, lr: 1.66e-04 +2022-05-07 21:07:15,990 INFO [train.py:715] (7/8) Epoch 13, batch 26800, loss[loss=0.1268, simple_loss=0.202, pruned_loss=0.02582, over 4797.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2099, pruned_loss=0.03117, over 971214.25 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:07:54,602 INFO [train.py:715] (7/8) Epoch 13, batch 26850, loss[loss=0.1319, simple_loss=0.2026, pruned_loss=0.03066, over 4820.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03088, over 971793.67 frames.], batch size: 27, lr: 1.66e-04 +2022-05-07 21:08:33,347 INFO [train.py:715] (7/8) Epoch 13, batch 26900, loss[loss=0.1059, simple_loss=0.181, pruned_loss=0.01537, over 4970.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03093, over 972421.53 frames.], batch size: 24, lr: 1.66e-04 +2022-05-07 21:09:11,795 INFO [train.py:715] (7/8) Epoch 13, batch 26950, loss[loss=0.1415, simple_loss=0.2034, pruned_loss=0.03978, over 4992.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03083, over 971456.50 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:09:50,377 INFO [train.py:715] (7/8) Epoch 13, batch 27000, loss[loss=0.1337, simple_loss=0.1978, pruned_loss=0.0348, over 4914.00 frames.], tot_loss[loss=0.137, simple_loss=0.2103, pruned_loss=0.03182, over 971767.20 frames.], batch size: 18, lr: 1.66e-04 +2022-05-07 21:09:50,377 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 21:09:59,936 INFO [train.py:742] (7/8) Epoch 13, validation: loss=0.1053, simple_loss=0.1891, pruned_loss=0.01077, over 914524.00 frames. +2022-05-07 21:10:39,027 INFO [train.py:715] (7/8) Epoch 13, batch 27050, loss[loss=0.1376, simple_loss=0.2011, pruned_loss=0.03705, over 4823.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2099, pruned_loss=0.03193, over 972214.86 frames.], batch size: 27, lr: 1.66e-04 +2022-05-07 21:11:17,913 INFO [train.py:715] (7/8) Epoch 13, batch 27100, loss[loss=0.1307, simple_loss=0.2046, pruned_loss=0.02838, over 4923.00 frames.], tot_loss[loss=0.136, simple_loss=0.2092, pruned_loss=0.03144, over 971699.83 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:11:57,148 INFO [train.py:715] (7/8) Epoch 13, batch 27150, loss[loss=0.1483, simple_loss=0.2193, pruned_loss=0.03867, over 4804.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.0316, over 971623.54 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:12:36,109 INFO [train.py:715] (7/8) Epoch 13, batch 27200, loss[loss=0.1742, simple_loss=0.2398, pruned_loss=0.05434, over 4880.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2107, pruned_loss=0.03182, over 972308.74 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:13:14,911 INFO [train.py:715] (7/8) Epoch 13, batch 27250, loss[loss=0.1252, simple_loss=0.1948, pruned_loss=0.02782, over 4942.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.0317, over 972434.27 frames.], batch size: 35, lr: 1.66e-04 +2022-05-07 21:13:54,908 INFO [train.py:715] (7/8) Epoch 13, batch 27300, loss[loss=0.1528, simple_loss=0.2246, pruned_loss=0.04051, over 4937.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03192, over 972288.57 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:14:33,863 INFO [train.py:715] (7/8) Epoch 13, batch 27350, loss[loss=0.1492, simple_loss=0.2284, pruned_loss=0.035, over 4789.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03174, over 972421.63 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:15:11,632 INFO [train.py:715] (7/8) Epoch 13, batch 27400, loss[loss=0.1257, simple_loss=0.2017, pruned_loss=0.02489, over 4892.00 frames.], tot_loss[loss=0.1367, simple_loss=0.21, pruned_loss=0.0317, over 972946.27 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:15:49,743 INFO [train.py:715] (7/8) Epoch 13, batch 27450, loss[loss=0.1411, simple_loss=0.2099, pruned_loss=0.03613, over 4890.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2093, pruned_loss=0.03113, over 972941.68 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:16:30,589 INFO [train.py:715] (7/8) Epoch 13, batch 27500, loss[loss=0.1587, simple_loss=0.232, pruned_loss=0.04269, over 4982.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03067, over 972599.56 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:17:08,828 INFO [train.py:715] (7/8) Epoch 13, batch 27550, loss[loss=0.1336, simple_loss=0.2162, pruned_loss=0.0255, over 4986.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03085, over 973085.34 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 21:17:46,777 INFO [train.py:715] (7/8) Epoch 13, batch 27600, loss[loss=0.1433, simple_loss=0.2138, pruned_loss=0.03645, over 4979.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03111, over 972710.96 frames.], batch size: 33, lr: 1.66e-04 +2022-05-07 21:18:25,956 INFO [train.py:715] (7/8) Epoch 13, batch 27650, loss[loss=0.1314, simple_loss=0.2125, pruned_loss=0.02519, over 4985.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.0312, over 972654.54 frames.], batch size: 35, lr: 1.66e-04 +2022-05-07 21:19:03,876 INFO [train.py:715] (7/8) Epoch 13, batch 27700, loss[loss=0.1616, simple_loss=0.241, pruned_loss=0.04113, over 4866.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03144, over 972224.77 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 21:19:42,880 INFO [train.py:715] (7/8) Epoch 13, batch 27750, loss[loss=0.128, simple_loss=0.2025, pruned_loss=0.02681, over 4818.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2085, pruned_loss=0.03092, over 972836.97 frames.], batch size: 25, lr: 1.66e-04 +2022-05-07 21:20:21,393 INFO [train.py:715] (7/8) Epoch 13, batch 27800, loss[loss=0.1444, simple_loss=0.2204, pruned_loss=0.03427, over 4899.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03175, over 972734.39 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:21:00,119 INFO [train.py:715] (7/8) Epoch 13, batch 27850, loss[loss=0.1345, simple_loss=0.2063, pruned_loss=0.03134, over 4824.00 frames.], tot_loss[loss=0.137, simple_loss=0.2102, pruned_loss=0.0319, over 972676.35 frames.], batch size: 12, lr: 1.66e-04 +2022-05-07 21:21:38,312 INFO [train.py:715] (7/8) Epoch 13, batch 27900, loss[loss=0.1548, simple_loss=0.2264, pruned_loss=0.04161, over 4879.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2109, pruned_loss=0.03178, over 972592.78 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:22:16,096 INFO [train.py:715] (7/8) Epoch 13, batch 27950, loss[loss=0.1247, simple_loss=0.2026, pruned_loss=0.02335, over 4978.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03154, over 973042.51 frames.], batch size: 28, lr: 1.66e-04 +2022-05-07 21:22:55,052 INFO [train.py:715] (7/8) Epoch 13, batch 28000, loss[loss=0.1244, simple_loss=0.2123, pruned_loss=0.01831, over 4822.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2099, pruned_loss=0.0312, over 973398.92 frames.], batch size: 26, lr: 1.66e-04 +2022-05-07 21:23:33,524 INFO [train.py:715] (7/8) Epoch 13, batch 28050, loss[loss=0.1412, simple_loss=0.2129, pruned_loss=0.03478, over 4954.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2107, pruned_loss=0.03116, over 972920.51 frames.], batch size: 35, lr: 1.66e-04 +2022-05-07 21:24:11,551 INFO [train.py:715] (7/8) Epoch 13, batch 28100, loss[loss=0.122, simple_loss=0.1958, pruned_loss=0.02409, over 4868.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2108, pruned_loss=0.03091, over 972906.58 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:24:49,597 INFO [train.py:715] (7/8) Epoch 13, batch 28150, loss[loss=0.1458, simple_loss=0.2209, pruned_loss=0.03536, over 4927.00 frames.], tot_loss[loss=0.137, simple_loss=0.2111, pruned_loss=0.0314, over 973036.70 frames.], batch size: 39, lr: 1.66e-04 +2022-05-07 21:25:28,816 INFO [train.py:715] (7/8) Epoch 13, batch 28200, loss[loss=0.1219, simple_loss=0.1946, pruned_loss=0.02458, over 4883.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2109, pruned_loss=0.03138, over 973518.53 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:26:06,611 INFO [train.py:715] (7/8) Epoch 13, batch 28250, loss[loss=0.1222, simple_loss=0.193, pruned_loss=0.02566, over 4859.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2111, pruned_loss=0.03164, over 973618.40 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 21:26:44,752 INFO [train.py:715] (7/8) Epoch 13, batch 28300, loss[loss=0.148, simple_loss=0.2294, pruned_loss=0.03334, over 4750.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2108, pruned_loss=0.03154, over 972689.86 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:27:23,460 INFO [train.py:715] (7/8) Epoch 13, batch 28350, loss[loss=0.1386, simple_loss=0.2149, pruned_loss=0.03113, over 4967.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.03114, over 972989.15 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:28:01,607 INFO [train.py:715] (7/8) Epoch 13, batch 28400, loss[loss=0.1289, simple_loss=0.2031, pruned_loss=0.02739, over 4921.00 frames.], tot_loss[loss=0.136, simple_loss=0.2103, pruned_loss=0.03088, over 972786.54 frames.], batch size: 29, lr: 1.66e-04 +2022-05-07 21:28:40,046 INFO [train.py:715] (7/8) Epoch 13, batch 28450, loss[loss=0.1199, simple_loss=0.1942, pruned_loss=0.02285, over 4780.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2099, pruned_loss=0.03085, over 972426.78 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:29:18,385 INFO [train.py:715] (7/8) Epoch 13, batch 28500, loss[loss=0.1366, simple_loss=0.2164, pruned_loss=0.02836, over 4933.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2115, pruned_loss=0.03174, over 972621.79 frames.], batch size: 29, lr: 1.66e-04 +2022-05-07 21:29:57,060 INFO [train.py:715] (7/8) Epoch 13, batch 28550, loss[loss=0.1455, simple_loss=0.2179, pruned_loss=0.03655, over 4773.00 frames.], tot_loss[loss=0.1378, simple_loss=0.2122, pruned_loss=0.03166, over 972267.83 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:30:35,259 INFO [train.py:715] (7/8) Epoch 13, batch 28600, loss[loss=0.1403, simple_loss=0.2153, pruned_loss=0.03271, over 4887.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2115, pruned_loss=0.03166, over 972900.74 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:31:13,615 INFO [train.py:715] (7/8) Epoch 13, batch 28650, loss[loss=0.1526, simple_loss=0.2239, pruned_loss=0.0406, over 4972.00 frames.], tot_loss[loss=0.1368, simple_loss=0.211, pruned_loss=0.03134, over 973627.58 frames.], batch size: 24, lr: 1.66e-04 +2022-05-07 21:31:52,261 INFO [train.py:715] (7/8) Epoch 13, batch 28700, loss[loss=0.1207, simple_loss=0.1919, pruned_loss=0.02472, over 4990.00 frames.], tot_loss[loss=0.1367, simple_loss=0.211, pruned_loss=0.03119, over 973551.46 frames.], batch size: 26, lr: 1.66e-04 +2022-05-07 21:32:30,333 INFO [train.py:715] (7/8) Epoch 13, batch 28750, loss[loss=0.1557, simple_loss=0.233, pruned_loss=0.03916, over 4943.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2101, pruned_loss=0.0309, over 973447.40 frames.], batch size: 39, lr: 1.66e-04 +2022-05-07 21:33:08,637 INFO [train.py:715] (7/8) Epoch 13, batch 28800, loss[loss=0.158, simple_loss=0.2291, pruned_loss=0.04341, over 4782.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2105, pruned_loss=0.03109, over 972152.32 frames.], batch size: 17, lr: 1.66e-04 +2022-05-07 21:33:47,845 INFO [train.py:715] (7/8) Epoch 13, batch 28850, loss[loss=0.1481, simple_loss=0.2133, pruned_loss=0.04142, over 4786.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03128, over 971837.50 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 21:34:26,366 INFO [train.py:715] (7/8) Epoch 13, batch 28900, loss[loss=0.1462, simple_loss=0.212, pruned_loss=0.04017, over 4756.00 frames.], tot_loss[loss=0.1376, simple_loss=0.2114, pruned_loss=0.03189, over 971694.54 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:35:04,279 INFO [train.py:715] (7/8) Epoch 13, batch 28950, loss[loss=0.1361, simple_loss=0.2105, pruned_loss=0.03086, over 4867.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2117, pruned_loss=0.03163, over 971986.56 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 21:35:42,443 INFO [train.py:715] (7/8) Epoch 13, batch 29000, loss[loss=0.1287, simple_loss=0.2069, pruned_loss=0.02522, over 4747.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2112, pruned_loss=0.03131, over 971420.93 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:36:21,623 INFO [train.py:715] (7/8) Epoch 13, batch 29050, loss[loss=0.1365, simple_loss=0.2161, pruned_loss=0.02847, over 4799.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2105, pruned_loss=0.03129, over 971309.41 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:37:00,157 INFO [train.py:715] (7/8) Epoch 13, batch 29100, loss[loss=0.1415, simple_loss=0.2126, pruned_loss=0.03514, over 4907.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2107, pruned_loss=0.0318, over 971877.46 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:37:38,204 INFO [train.py:715] (7/8) Epoch 13, batch 29150, loss[loss=0.121, simple_loss=0.1847, pruned_loss=0.02867, over 4851.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03145, over 971873.31 frames.], batch size: 12, lr: 1.66e-04 +2022-05-07 21:38:16,963 INFO [train.py:715] (7/8) Epoch 13, batch 29200, loss[loss=0.1434, simple_loss=0.204, pruned_loss=0.04139, over 4834.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.0314, over 971582.77 frames.], batch size: 30, lr: 1.66e-04 +2022-05-07 21:38:55,210 INFO [train.py:715] (7/8) Epoch 13, batch 29250, loss[loss=0.1378, simple_loss=0.201, pruned_loss=0.03735, over 4833.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03151, over 970829.49 frames.], batch size: 30, lr: 1.66e-04 +2022-05-07 21:39:34,054 INFO [train.py:715] (7/8) Epoch 13, batch 29300, loss[loss=0.1555, simple_loss=0.227, pruned_loss=0.04196, over 4749.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2104, pruned_loss=0.03132, over 971413.59 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:40:12,799 INFO [train.py:715] (7/8) Epoch 13, batch 29350, loss[loss=0.1486, simple_loss=0.2221, pruned_loss=0.03754, over 4768.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03113, over 971711.65 frames.], batch size: 18, lr: 1.66e-04 +2022-05-07 21:40:51,674 INFO [train.py:715] (7/8) Epoch 13, batch 29400, loss[loss=0.1672, simple_loss=0.2433, pruned_loss=0.04554, over 4808.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2099, pruned_loss=0.03101, over 971750.07 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:41:29,698 INFO [train.py:715] (7/8) Epoch 13, batch 29450, loss[loss=0.1554, simple_loss=0.2207, pruned_loss=0.04507, over 4865.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2106, pruned_loss=0.0315, over 971728.13 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:42:08,737 INFO [train.py:715] (7/8) Epoch 13, batch 29500, loss[loss=0.1363, simple_loss=0.2065, pruned_loss=0.03309, over 4828.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2102, pruned_loss=0.03138, over 971358.26 frames.], batch size: 25, lr: 1.66e-04 +2022-05-07 21:42:47,371 INFO [train.py:715] (7/8) Epoch 13, batch 29550, loss[loss=0.1281, simple_loss=0.2043, pruned_loss=0.02593, over 4847.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2109, pruned_loss=0.03148, over 971306.85 frames.], batch size: 30, lr: 1.66e-04 +2022-05-07 21:43:25,733 INFO [train.py:715] (7/8) Epoch 13, batch 29600, loss[loss=0.1362, simple_loss=0.2215, pruned_loss=0.02543, over 4797.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.03143, over 971583.16 frames.], batch size: 24, lr: 1.66e-04 +2022-05-07 21:44:03,484 INFO [train.py:715] (7/8) Epoch 13, batch 29650, loss[loss=0.1361, simple_loss=0.2004, pruned_loss=0.03591, over 4820.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03115, over 971826.14 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:44:41,766 INFO [train.py:715] (7/8) Epoch 13, batch 29700, loss[loss=0.1159, simple_loss=0.1923, pruned_loss=0.01977, over 4936.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03095, over 971733.79 frames.], batch size: 29, lr: 1.66e-04 +2022-05-07 21:45:20,124 INFO [train.py:715] (7/8) Epoch 13, batch 29750, loss[loss=0.1243, simple_loss=0.1971, pruned_loss=0.02578, over 4901.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03099, over 971703.60 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:45:59,496 INFO [train.py:715] (7/8) Epoch 13, batch 29800, loss[loss=0.1199, simple_loss=0.1955, pruned_loss=0.02213, over 4859.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03083, over 972571.66 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:46:38,729 INFO [train.py:715] (7/8) Epoch 13, batch 29850, loss[loss=0.1211, simple_loss=0.19, pruned_loss=0.0261, over 4865.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03092, over 972234.69 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:47:18,337 INFO [train.py:715] (7/8) Epoch 13, batch 29900, loss[loss=0.1309, simple_loss=0.2019, pruned_loss=0.03001, over 4827.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03134, over 972693.96 frames.], batch size: 26, lr: 1.66e-04 +2022-05-07 21:47:57,734 INFO [train.py:715] (7/8) Epoch 13, batch 29950, loss[loss=0.1478, simple_loss=0.2161, pruned_loss=0.03975, over 4785.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2108, pruned_loss=0.03136, over 972118.37 frames.], batch size: 14, lr: 1.66e-04 +2022-05-07 21:48:36,364 INFO [train.py:715] (7/8) Epoch 13, batch 30000, loss[loss=0.1189, simple_loss=0.1873, pruned_loss=0.02527, over 4913.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03109, over 971777.40 frames.], batch size: 18, lr: 1.66e-04 +2022-05-07 21:48:36,364 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 21:48:45,862 INFO [train.py:742] (7/8) Epoch 13, validation: loss=0.1054, simple_loss=0.1891, pruned_loss=0.01083, over 914524.00 frames. +2022-05-07 21:49:25,287 INFO [train.py:715] (7/8) Epoch 13, batch 30050, loss[loss=0.1395, simple_loss=0.2196, pruned_loss=0.02973, over 4805.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2098, pruned_loss=0.03059, over 972373.57 frames.], batch size: 21, lr: 1.66e-04 +2022-05-07 21:50:05,095 INFO [train.py:715] (7/8) Epoch 13, batch 30100, loss[loss=0.1287, simple_loss=0.213, pruned_loss=0.02217, over 4694.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2114, pruned_loss=0.0311, over 971910.10 frames.], batch size: 15, lr: 1.66e-04 +2022-05-07 21:50:44,563 INFO [train.py:715] (7/8) Epoch 13, batch 30150, loss[loss=0.1253, simple_loss=0.1967, pruned_loss=0.02696, over 4795.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2117, pruned_loss=0.03154, over 972128.57 frames.], batch size: 24, lr: 1.66e-04 +2022-05-07 21:51:23,146 INFO [train.py:715] (7/8) Epoch 13, batch 30200, loss[loss=0.1239, simple_loss=0.2059, pruned_loss=0.02091, over 4889.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03094, over 972567.33 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:52:02,984 INFO [train.py:715] (7/8) Epoch 13, batch 30250, loss[loss=0.1243, simple_loss=0.1982, pruned_loss=0.0252, over 4896.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2108, pruned_loss=0.03148, over 971713.12 frames.], batch size: 19, lr: 1.66e-04 +2022-05-07 21:52:42,785 INFO [train.py:715] (7/8) Epoch 13, batch 30300, loss[loss=0.1535, simple_loss=0.2347, pruned_loss=0.03616, over 4862.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2113, pruned_loss=0.03162, over 972111.77 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 21:53:22,303 INFO [train.py:715] (7/8) Epoch 13, batch 30350, loss[loss=0.1382, simple_loss=0.2051, pruned_loss=0.03565, over 4854.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03127, over 972149.13 frames.], batch size: 20, lr: 1.66e-04 +2022-05-07 21:54:01,877 INFO [train.py:715] (7/8) Epoch 13, batch 30400, loss[loss=0.1326, simple_loss=0.2097, pruned_loss=0.02777, over 4861.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03095, over 971504.41 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:54:42,498 INFO [train.py:715] (7/8) Epoch 13, batch 30450, loss[loss=0.1605, simple_loss=0.2288, pruned_loss=0.04612, over 4758.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.0313, over 971885.65 frames.], batch size: 16, lr: 1.66e-04 +2022-05-07 21:55:22,608 INFO [train.py:715] (7/8) Epoch 13, batch 30500, loss[loss=0.1578, simple_loss=0.2332, pruned_loss=0.04117, over 4971.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03151, over 972431.19 frames.], batch size: 39, lr: 1.66e-04 +2022-05-07 21:56:02,402 INFO [train.py:715] (7/8) Epoch 13, batch 30550, loss[loss=0.118, simple_loss=0.1941, pruned_loss=0.02101, over 4874.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2113, pruned_loss=0.03187, over 972156.02 frames.], batch size: 22, lr: 1.66e-04 +2022-05-07 21:56:43,836 INFO [train.py:715] (7/8) Epoch 13, batch 30600, loss[loss=0.1333, simple_loss=0.2065, pruned_loss=0.03006, over 4843.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2106, pruned_loss=0.03156, over 972431.77 frames.], batch size: 32, lr: 1.66e-04 +2022-05-07 21:57:24,955 INFO [train.py:715] (7/8) Epoch 13, batch 30650, loss[loss=0.1533, simple_loss=0.2229, pruned_loss=0.04183, over 4756.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2103, pruned_loss=0.03174, over 972393.95 frames.], batch size: 16, lr: 1.65e-04 +2022-05-07 21:58:05,363 INFO [train.py:715] (7/8) Epoch 13, batch 30700, loss[loss=0.1393, simple_loss=0.216, pruned_loss=0.03128, over 4951.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03148, over 972858.99 frames.], batch size: 21, lr: 1.65e-04 +2022-05-07 21:58:45,824 INFO [train.py:715] (7/8) Epoch 13, batch 30750, loss[loss=0.1027, simple_loss=0.1822, pruned_loss=0.01155, over 4887.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2103, pruned_loss=0.03156, over 972767.48 frames.], batch size: 22, lr: 1.65e-04 +2022-05-07 21:59:26,819 INFO [train.py:715] (7/8) Epoch 13, batch 30800, loss[loss=0.134, simple_loss=0.2203, pruned_loss=0.02391, over 4779.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03104, over 972817.67 frames.], batch size: 18, lr: 1.65e-04 +2022-05-07 22:00:07,577 INFO [train.py:715] (7/8) Epoch 13, batch 30850, loss[loss=0.125, simple_loss=0.1901, pruned_loss=0.03002, over 4830.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.03119, over 972465.69 frames.], batch size: 25, lr: 1.65e-04 +2022-05-07 22:00:48,211 INFO [train.py:715] (7/8) Epoch 13, batch 30900, loss[loss=0.1049, simple_loss=0.1726, pruned_loss=0.01857, over 4783.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2085, pruned_loss=0.03086, over 972387.03 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:01:29,253 INFO [train.py:715] (7/8) Epoch 13, batch 30950, loss[loss=0.1208, simple_loss=0.2033, pruned_loss=0.01917, over 4877.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03113, over 972223.40 frames.], batch size: 22, lr: 1.65e-04 +2022-05-07 22:02:09,963 INFO [train.py:715] (7/8) Epoch 13, batch 31000, loss[loss=0.135, simple_loss=0.1984, pruned_loss=0.03582, over 4857.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.03116, over 971840.01 frames.], batch size: 20, lr: 1.65e-04 +2022-05-07 22:02:50,144 INFO [train.py:715] (7/8) Epoch 13, batch 31050, loss[loss=0.1755, simple_loss=0.2446, pruned_loss=0.05318, over 4711.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2093, pruned_loss=0.03121, over 972253.21 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:03:30,719 INFO [train.py:715] (7/8) Epoch 13, batch 31100, loss[loss=0.1578, simple_loss=0.2286, pruned_loss=0.0435, over 4994.00 frames.], tot_loss[loss=0.136, simple_loss=0.209, pruned_loss=0.03154, over 972506.36 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:04:11,682 INFO [train.py:715] (7/8) Epoch 13, batch 31150, loss[loss=0.153, simple_loss=0.2291, pruned_loss=0.03842, over 4811.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03107, over 972973.47 frames.], batch size: 24, lr: 1.65e-04 +2022-05-07 22:04:52,785 INFO [train.py:715] (7/8) Epoch 13, batch 31200, loss[loss=0.1301, simple_loss=0.1997, pruned_loss=0.03029, over 4940.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03106, over 972798.44 frames.], batch size: 21, lr: 1.65e-04 +2022-05-07 22:05:32,914 INFO [train.py:715] (7/8) Epoch 13, batch 31250, loss[loss=0.1376, simple_loss=0.2138, pruned_loss=0.03068, over 4912.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03106, over 972780.72 frames.], batch size: 23, lr: 1.65e-04 +2022-05-07 22:06:13,241 INFO [train.py:715] (7/8) Epoch 13, batch 31300, loss[loss=0.1458, simple_loss=0.2247, pruned_loss=0.03347, over 4934.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2097, pruned_loss=0.03102, over 972551.18 frames.], batch size: 29, lr: 1.65e-04 +2022-05-07 22:06:53,516 INFO [train.py:715] (7/8) Epoch 13, batch 31350, loss[loss=0.135, simple_loss=0.2042, pruned_loss=0.03288, over 4820.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03097, over 971807.58 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:07:33,262 INFO [train.py:715] (7/8) Epoch 13, batch 31400, loss[loss=0.1383, simple_loss=0.2157, pruned_loss=0.03048, over 4965.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03142, over 971590.08 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:08:13,736 INFO [train.py:715] (7/8) Epoch 13, batch 31450, loss[loss=0.1402, simple_loss=0.2104, pruned_loss=0.03502, over 4872.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2108, pruned_loss=0.0319, over 971582.46 frames.], batch size: 30, lr: 1.65e-04 +2022-05-07 22:08:54,100 INFO [train.py:715] (7/8) Epoch 13, batch 31500, loss[loss=0.137, simple_loss=0.217, pruned_loss=0.0285, over 4989.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03179, over 972025.40 frames.], batch size: 25, lr: 1.65e-04 +2022-05-07 22:09:33,924 INFO [train.py:715] (7/8) Epoch 13, batch 31550, loss[loss=0.1288, simple_loss=0.2006, pruned_loss=0.02851, over 4899.00 frames.], tot_loss[loss=0.137, simple_loss=0.2109, pruned_loss=0.03153, over 972601.08 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:10:14,441 INFO [train.py:715] (7/8) Epoch 13, batch 31600, loss[loss=0.1449, simple_loss=0.2132, pruned_loss=0.0383, over 4766.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03086, over 972607.59 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:10:55,015 INFO [train.py:715] (7/8) Epoch 13, batch 31650, loss[loss=0.1158, simple_loss=0.1947, pruned_loss=0.01845, over 4815.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03102, over 972115.82 frames.], batch size: 26, lr: 1.65e-04 +2022-05-07 22:11:35,405 INFO [train.py:715] (7/8) Epoch 13, batch 31700, loss[loss=0.142, simple_loss=0.2212, pruned_loss=0.03136, over 4928.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2081, pruned_loss=0.03073, over 971824.50 frames.], batch size: 29, lr: 1.65e-04 +2022-05-07 22:12:15,837 INFO [train.py:715] (7/8) Epoch 13, batch 31750, loss[loss=0.1565, simple_loss=0.2336, pruned_loss=0.03974, over 4913.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2083, pruned_loss=0.03076, over 971664.77 frames.], batch size: 17, lr: 1.65e-04 +2022-05-07 22:12:56,363 INFO [train.py:715] (7/8) Epoch 13, batch 31800, loss[loss=0.1184, simple_loss=0.1831, pruned_loss=0.02683, over 4788.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03079, over 971685.64 frames.], batch size: 17, lr: 1.65e-04 +2022-05-07 22:13:37,271 INFO [train.py:715] (7/8) Epoch 13, batch 31850, loss[loss=0.1387, simple_loss=0.2203, pruned_loss=0.02854, over 4774.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2086, pruned_loss=0.03087, over 971696.78 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:14:18,120 INFO [train.py:715] (7/8) Epoch 13, batch 31900, loss[loss=0.1271, simple_loss=0.2115, pruned_loss=0.02136, over 4792.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2085, pruned_loss=0.0309, over 971640.75 frames.], batch size: 24, lr: 1.65e-04 +2022-05-07 22:14:59,147 INFO [train.py:715] (7/8) Epoch 13, batch 31950, loss[loss=0.1363, simple_loss=0.2037, pruned_loss=0.03447, over 4904.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03098, over 972246.30 frames.], batch size: 18, lr: 1.65e-04 +2022-05-07 22:15:39,539 INFO [train.py:715] (7/8) Epoch 13, batch 32000, loss[loss=0.1262, simple_loss=0.1945, pruned_loss=0.02896, over 4891.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03077, over 972145.05 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:16:20,151 INFO [train.py:715] (7/8) Epoch 13, batch 32050, loss[loss=0.1193, simple_loss=0.1952, pruned_loss=0.02173, over 4802.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2093, pruned_loss=0.0316, over 972098.50 frames.], batch size: 24, lr: 1.65e-04 +2022-05-07 22:17:00,690 INFO [train.py:715] (7/8) Epoch 13, batch 32100, loss[loss=0.1509, simple_loss=0.2199, pruned_loss=0.04097, over 4938.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2103, pruned_loss=0.03205, over 972755.95 frames.], batch size: 39, lr: 1.65e-04 +2022-05-07 22:17:41,703 INFO [train.py:715] (7/8) Epoch 13, batch 32150, loss[loss=0.1249, simple_loss=0.1934, pruned_loss=0.02821, over 4784.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2107, pruned_loss=0.0323, over 972552.05 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:18:22,394 INFO [train.py:715] (7/8) Epoch 13, batch 32200, loss[loss=0.137, simple_loss=0.2118, pruned_loss=0.03112, over 4806.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2104, pruned_loss=0.03221, over 972570.90 frames.], batch size: 13, lr: 1.65e-04 +2022-05-07 22:19:03,053 INFO [train.py:715] (7/8) Epoch 13, batch 32250, loss[loss=0.1708, simple_loss=0.2411, pruned_loss=0.05026, over 4771.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2097, pruned_loss=0.0316, over 972686.05 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:19:43,880 INFO [train.py:715] (7/8) Epoch 13, batch 32300, loss[loss=0.1002, simple_loss=0.1725, pruned_loss=0.01396, over 4783.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2101, pruned_loss=0.03163, over 972736.44 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:20:24,943 INFO [train.py:715] (7/8) Epoch 13, batch 32350, loss[loss=0.163, simple_loss=0.2221, pruned_loss=0.05195, over 4747.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03142, over 972376.70 frames.], batch size: 16, lr: 1.65e-04 +2022-05-07 22:21:06,369 INFO [train.py:715] (7/8) Epoch 13, batch 32400, loss[loss=0.1629, simple_loss=0.2221, pruned_loss=0.05182, over 4796.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03134, over 971691.34 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:21:47,426 INFO [train.py:715] (7/8) Epoch 13, batch 32450, loss[loss=0.1198, simple_loss=0.1868, pruned_loss=0.02638, over 4859.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03158, over 970607.36 frames.], batch size: 38, lr: 1.65e-04 +2022-05-07 22:22:28,212 INFO [train.py:715] (7/8) Epoch 13, batch 32500, loss[loss=0.1303, simple_loss=0.1997, pruned_loss=0.03047, over 4806.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2094, pruned_loss=0.03118, over 970856.71 frames.], batch size: 25, lr: 1.65e-04 +2022-05-07 22:23:09,251 INFO [train.py:715] (7/8) Epoch 13, batch 32550, loss[loss=0.1286, simple_loss=0.2024, pruned_loss=0.02735, over 4798.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2097, pruned_loss=0.03158, over 971371.18 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:23:49,649 INFO [train.py:715] (7/8) Epoch 13, batch 32600, loss[loss=0.1341, simple_loss=0.2211, pruned_loss=0.02356, over 4944.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2106, pruned_loss=0.03179, over 971142.92 frames.], batch size: 21, lr: 1.65e-04 +2022-05-07 22:24:30,000 INFO [train.py:715] (7/8) Epoch 13, batch 32650, loss[loss=0.1457, simple_loss=0.2117, pruned_loss=0.03981, over 4835.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03142, over 971134.25 frames.], batch size: 30, lr: 1.65e-04 +2022-05-07 22:25:10,589 INFO [train.py:715] (7/8) Epoch 13, batch 32700, loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03091, over 4773.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03142, over 971508.43 frames.], batch size: 17, lr: 1.65e-04 +2022-05-07 22:25:50,910 INFO [train.py:715] (7/8) Epoch 13, batch 32750, loss[loss=0.1594, simple_loss=0.2246, pruned_loss=0.04707, over 4970.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03114, over 972235.54 frames.], batch size: 39, lr: 1.65e-04 +2022-05-07 22:26:31,936 INFO [train.py:715] (7/8) Epoch 13, batch 32800, loss[loss=0.1103, simple_loss=0.1799, pruned_loss=0.02035, over 4812.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03106, over 972080.43 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:27:12,677 INFO [train.py:715] (7/8) Epoch 13, batch 32850, loss[loss=0.1659, simple_loss=0.2419, pruned_loss=0.04493, over 4771.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03111, over 972310.36 frames.], batch size: 18, lr: 1.65e-04 +2022-05-07 22:27:53,752 INFO [train.py:715] (7/8) Epoch 13, batch 32900, loss[loss=0.1119, simple_loss=0.1826, pruned_loss=0.02061, over 4911.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03073, over 972826.77 frames.], batch size: 18, lr: 1.65e-04 +2022-05-07 22:28:33,953 INFO [train.py:715] (7/8) Epoch 13, batch 32950, loss[loss=0.1494, simple_loss=0.2398, pruned_loss=0.02945, over 4806.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2101, pruned_loss=0.03135, over 973253.28 frames.], batch size: 21, lr: 1.65e-04 +2022-05-07 22:29:14,629 INFO [train.py:715] (7/8) Epoch 13, batch 33000, loss[loss=0.1157, simple_loss=0.1811, pruned_loss=0.02512, over 4749.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2106, pruned_loss=0.03188, over 972891.38 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:29:14,630 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 22:29:24,503 INFO [train.py:742] (7/8) Epoch 13, validation: loss=0.1054, simple_loss=0.1892, pruned_loss=0.01081, over 914524.00 frames. +2022-05-07 22:30:05,557 INFO [train.py:715] (7/8) Epoch 13, batch 33050, loss[loss=0.1233, simple_loss=0.1977, pruned_loss=0.02445, over 4865.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03172, over 973111.91 frames.], batch size: 32, lr: 1.65e-04 +2022-05-07 22:30:45,207 INFO [train.py:715] (7/8) Epoch 13, batch 33100, loss[loss=0.1265, simple_loss=0.1976, pruned_loss=0.02776, over 4950.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2108, pruned_loss=0.03177, over 973349.73 frames.], batch size: 24, lr: 1.65e-04 +2022-05-07 22:31:25,147 INFO [train.py:715] (7/8) Epoch 13, batch 33150, loss[loss=0.167, simple_loss=0.2403, pruned_loss=0.04685, over 4962.00 frames.], tot_loss[loss=0.1365, simple_loss=0.21, pruned_loss=0.03152, over 973314.86 frames.], batch size: 35, lr: 1.65e-04 +2022-05-07 22:32:05,571 INFO [train.py:715] (7/8) Epoch 13, batch 33200, loss[loss=0.1322, simple_loss=0.2076, pruned_loss=0.02839, over 4775.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03095, over 972713.83 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:32:46,036 INFO [train.py:715] (7/8) Epoch 13, batch 33250, loss[loss=0.1623, simple_loss=0.2359, pruned_loss=0.04437, over 4789.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.03115, over 972795.02 frames.], batch size: 24, lr: 1.65e-04 +2022-05-07 22:33:26,587 INFO [train.py:715] (7/8) Epoch 13, batch 33300, loss[loss=0.139, simple_loss=0.2102, pruned_loss=0.03394, over 4763.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03104, over 973078.81 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:34:07,015 INFO [train.py:715] (7/8) Epoch 13, batch 33350, loss[loss=0.1283, simple_loss=0.198, pruned_loss=0.02926, over 4937.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03081, over 973402.33 frames.], batch size: 18, lr: 1.65e-04 +2022-05-07 22:34:47,634 INFO [train.py:715] (7/8) Epoch 13, batch 33400, loss[loss=0.1491, simple_loss=0.2235, pruned_loss=0.03736, over 4993.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.0304, over 973600.23 frames.], batch size: 16, lr: 1.65e-04 +2022-05-07 22:35:28,234 INFO [train.py:715] (7/8) Epoch 13, batch 33450, loss[loss=0.1179, simple_loss=0.1972, pruned_loss=0.01935, over 4940.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03051, over 973284.29 frames.], batch size: 29, lr: 1.65e-04 +2022-05-07 22:36:08,958 INFO [train.py:715] (7/8) Epoch 13, batch 33500, loss[loss=0.1228, simple_loss=0.1981, pruned_loss=0.02378, over 4970.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.0304, over 973796.37 frames.], batch size: 28, lr: 1.65e-04 +2022-05-07 22:36:49,606 INFO [train.py:715] (7/8) Epoch 13, batch 33550, loss[loss=0.1338, simple_loss=0.2075, pruned_loss=0.03, over 4773.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03082, over 973262.37 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:37:30,305 INFO [train.py:715] (7/8) Epoch 13, batch 33600, loss[loss=0.161, simple_loss=0.222, pruned_loss=0.05005, over 4967.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2088, pruned_loss=0.03073, over 972146.37 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:38:10,829 INFO [train.py:715] (7/8) Epoch 13, batch 33650, loss[loss=0.1255, simple_loss=0.2019, pruned_loss=0.02452, over 4837.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03047, over 971350.56 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:38:51,062 INFO [train.py:715] (7/8) Epoch 13, batch 33700, loss[loss=0.129, simple_loss=0.2052, pruned_loss=0.02641, over 4923.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03035, over 970888.03 frames.], batch size: 23, lr: 1.65e-04 +2022-05-07 22:39:32,032 INFO [train.py:715] (7/8) Epoch 13, batch 33750, loss[loss=0.1321, simple_loss=0.2084, pruned_loss=0.0279, over 4902.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03038, over 971027.40 frames.], batch size: 17, lr: 1.65e-04 +2022-05-07 22:40:12,829 INFO [train.py:715] (7/8) Epoch 13, batch 33800, loss[loss=0.1393, simple_loss=0.2219, pruned_loss=0.02836, over 4914.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.0306, over 972060.70 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:40:53,571 INFO [train.py:715] (7/8) Epoch 13, batch 33850, loss[loss=0.1361, simple_loss=0.215, pruned_loss=0.02858, over 4759.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03067, over 972459.28 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:41:34,025 INFO [train.py:715] (7/8) Epoch 13, batch 33900, loss[loss=0.1354, simple_loss=0.2139, pruned_loss=0.02849, over 4987.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03079, over 972615.96 frames.], batch size: 14, lr: 1.65e-04 +2022-05-07 22:42:15,284 INFO [train.py:715] (7/8) Epoch 13, batch 33950, loss[loss=0.1534, simple_loss=0.2149, pruned_loss=0.04599, over 4968.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.0311, over 972153.51 frames.], batch size: 35, lr: 1.65e-04 +2022-05-07 22:42:56,289 INFO [train.py:715] (7/8) Epoch 13, batch 34000, loss[loss=0.153, simple_loss=0.2223, pruned_loss=0.04185, over 4961.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.0305, over 972436.94 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:43:36,857 INFO [train.py:715] (7/8) Epoch 13, batch 34050, loss[loss=0.1701, simple_loss=0.2231, pruned_loss=0.05858, over 4744.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.03063, over 972618.11 frames.], batch size: 16, lr: 1.65e-04 +2022-05-07 22:44:17,680 INFO [train.py:715] (7/8) Epoch 13, batch 34100, loss[loss=0.1151, simple_loss=0.1834, pruned_loss=0.02342, over 4846.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2089, pruned_loss=0.03108, over 972347.91 frames.], batch size: 32, lr: 1.65e-04 +2022-05-07 22:44:57,554 INFO [train.py:715] (7/8) Epoch 13, batch 34150, loss[loss=0.1293, simple_loss=0.2041, pruned_loss=0.02726, over 4837.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03091, over 971678.66 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:45:38,243 INFO [train.py:715] (7/8) Epoch 13, batch 34200, loss[loss=0.1142, simple_loss=0.1778, pruned_loss=0.02529, over 4788.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2082, pruned_loss=0.03035, over 971933.58 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:46:18,601 INFO [train.py:715] (7/8) Epoch 13, batch 34250, loss[loss=0.131, simple_loss=0.1954, pruned_loss=0.03331, over 4843.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03057, over 972806.73 frames.], batch size: 13, lr: 1.65e-04 +2022-05-07 22:46:59,526 INFO [train.py:715] (7/8) Epoch 13, batch 34300, loss[loss=0.0975, simple_loss=0.1584, pruned_loss=0.01831, over 4866.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03099, over 973155.95 frames.], batch size: 13, lr: 1.65e-04 +2022-05-07 22:47:39,595 INFO [train.py:715] (7/8) Epoch 13, batch 34350, loss[loss=0.1545, simple_loss=0.229, pruned_loss=0.04001, over 4772.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2089, pruned_loss=0.0307, over 973419.83 frames.], batch size: 17, lr: 1.65e-04 +2022-05-07 22:48:20,188 INFO [train.py:715] (7/8) Epoch 13, batch 34400, loss[loss=0.1349, simple_loss=0.2209, pruned_loss=0.02439, over 4948.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03081, over 973816.91 frames.], batch size: 24, lr: 1.65e-04 +2022-05-07 22:49:01,283 INFO [train.py:715] (7/8) Epoch 13, batch 34450, loss[loss=0.1353, simple_loss=0.2111, pruned_loss=0.0298, over 4931.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2102, pruned_loss=0.03074, over 974438.14 frames.], batch size: 21, lr: 1.65e-04 +2022-05-07 22:49:41,688 INFO [train.py:715] (7/8) Epoch 13, batch 34500, loss[loss=0.1142, simple_loss=0.1887, pruned_loss=0.01988, over 4748.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2102, pruned_loss=0.03077, over 973420.41 frames.], batch size: 16, lr: 1.65e-04 +2022-05-07 22:50:21,496 INFO [train.py:715] (7/8) Epoch 13, batch 34550, loss[loss=0.1288, simple_loss=0.2034, pruned_loss=0.02707, over 4975.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03075, over 972792.15 frames.], batch size: 28, lr: 1.65e-04 +2022-05-07 22:51:01,502 INFO [train.py:715] (7/8) Epoch 13, batch 34600, loss[loss=0.1358, simple_loss=0.1996, pruned_loss=0.03599, over 4746.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.0307, over 971942.16 frames.], batch size: 16, lr: 1.65e-04 +2022-05-07 22:51:40,867 INFO [train.py:715] (7/8) Epoch 13, batch 34650, loss[loss=0.1456, simple_loss=0.2095, pruned_loss=0.04087, over 4899.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03079, over 971885.19 frames.], batch size: 19, lr: 1.65e-04 +2022-05-07 22:52:20,404 INFO [train.py:715] (7/8) Epoch 13, batch 34700, loss[loss=0.1674, simple_loss=0.2472, pruned_loss=0.04378, over 4703.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.0307, over 971793.98 frames.], batch size: 15, lr: 1.65e-04 +2022-05-07 22:52:59,360 INFO [train.py:715] (7/8) Epoch 13, batch 34750, loss[loss=0.1526, simple_loss=0.2171, pruned_loss=0.04405, over 4838.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.0306, over 971714.33 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:53:36,108 INFO [train.py:715] (7/8) Epoch 13, batch 34800, loss[loss=0.1151, simple_loss=0.1858, pruned_loss=0.02218, over 4841.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2081, pruned_loss=0.03072, over 971233.61 frames.], batch size: 12, lr: 1.65e-04 +2022-05-07 22:54:25,047 INFO [train.py:715] (7/8) Epoch 14, batch 0, loss[loss=0.1045, simple_loss=0.1822, pruned_loss=0.01339, over 4971.00 frames.], tot_loss[loss=0.1045, simple_loss=0.1822, pruned_loss=0.01339, over 4971.00 frames.], batch size: 25, lr: 1.59e-04 +2022-05-07 22:55:04,013 INFO [train.py:715] (7/8) Epoch 14, batch 50, loss[loss=0.1292, simple_loss=0.2063, pruned_loss=0.02609, over 4807.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02974, over 220018.33 frames.], batch size: 25, lr: 1.59e-04 +2022-05-07 22:55:42,421 INFO [train.py:715] (7/8) Epoch 14, batch 100, loss[loss=0.141, simple_loss=0.2082, pruned_loss=0.0369, over 4710.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2083, pruned_loss=0.03151, over 386495.37 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 22:56:21,302 INFO [train.py:715] (7/8) Epoch 14, batch 150, loss[loss=0.1409, simple_loss=0.2217, pruned_loss=0.03006, over 4924.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03112, over 516456.45 frames.], batch size: 23, lr: 1.59e-04 +2022-05-07 22:56:59,876 INFO [train.py:715] (7/8) Epoch 14, batch 200, loss[loss=0.113, simple_loss=0.1831, pruned_loss=0.02147, over 4930.00 frames.], tot_loss[loss=0.1359, simple_loss=0.209, pruned_loss=0.03139, over 618394.97 frames.], batch size: 29, lr: 1.59e-04 +2022-05-07 22:57:38,468 INFO [train.py:715] (7/8) Epoch 14, batch 250, loss[loss=0.1542, simple_loss=0.229, pruned_loss=0.0397, over 4931.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2088, pruned_loss=0.03113, over 697429.52 frames.], batch size: 23, lr: 1.59e-04 +2022-05-07 22:58:17,251 INFO [train.py:715] (7/8) Epoch 14, batch 300, loss[loss=0.1791, simple_loss=0.2639, pruned_loss=0.04711, over 4760.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2104, pruned_loss=0.03224, over 757798.66 frames.], batch size: 19, lr: 1.59e-04 +2022-05-07 22:58:56,805 INFO [train.py:715] (7/8) Epoch 14, batch 350, loss[loss=0.175, simple_loss=0.2469, pruned_loss=0.05156, over 4953.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2104, pruned_loss=0.03201, over 805299.38 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 22:59:35,347 INFO [train.py:715] (7/8) Epoch 14, batch 400, loss[loss=0.1297, simple_loss=0.2051, pruned_loss=0.02719, over 4935.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2098, pruned_loss=0.03182, over 843096.02 frames.], batch size: 35, lr: 1.59e-04 +2022-05-07 23:00:14,781 INFO [train.py:715] (7/8) Epoch 14, batch 450, loss[loss=0.1648, simple_loss=0.2364, pruned_loss=0.04664, over 4783.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2087, pruned_loss=0.03147, over 872095.61 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:00:54,070 INFO [train.py:715] (7/8) Epoch 14, batch 500, loss[loss=0.1413, simple_loss=0.2122, pruned_loss=0.0352, over 4885.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2087, pruned_loss=0.03129, over 894736.95 frames.], batch size: 22, lr: 1.59e-04 +2022-05-07 23:01:33,690 INFO [train.py:715] (7/8) Epoch 14, batch 550, loss[loss=0.1288, simple_loss=0.2094, pruned_loss=0.02413, over 4969.00 frames.], tot_loss[loss=0.136, simple_loss=0.2091, pruned_loss=0.03144, over 911469.15 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:02:12,466 INFO [train.py:715] (7/8) Epoch 14, batch 600, loss[loss=0.1242, simple_loss=0.193, pruned_loss=0.02766, over 4904.00 frames.], tot_loss[loss=0.136, simple_loss=0.2092, pruned_loss=0.03139, over 925014.70 frames.], batch size: 17, lr: 1.59e-04 +2022-05-07 23:02:51,129 INFO [train.py:715] (7/8) Epoch 14, batch 650, loss[loss=0.1332, simple_loss=0.2118, pruned_loss=0.02728, over 4768.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03103, over 935019.18 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:03:32,629 INFO [train.py:715] (7/8) Epoch 14, batch 700, loss[loss=0.1436, simple_loss=0.2229, pruned_loss=0.03211, over 4805.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03068, over 943589.69 frames.], batch size: 26, lr: 1.59e-04 +2022-05-07 23:04:11,048 INFO [train.py:715] (7/8) Epoch 14, batch 750, loss[loss=0.1526, simple_loss=0.2251, pruned_loss=0.04004, over 4845.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2097, pruned_loss=0.031, over 949893.93 frames.], batch size: 30, lr: 1.59e-04 +2022-05-07 23:04:51,157 INFO [train.py:715] (7/8) Epoch 14, batch 800, loss[loss=0.1342, simple_loss=0.2025, pruned_loss=0.03294, over 4849.00 frames.], tot_loss[loss=0.1359, simple_loss=0.21, pruned_loss=0.03089, over 955144.64 frames.], batch size: 30, lr: 1.59e-04 +2022-05-07 23:05:30,249 INFO [train.py:715] (7/8) Epoch 14, batch 850, loss[loss=0.1113, simple_loss=0.1841, pruned_loss=0.01925, over 4831.00 frames.], tot_loss[loss=0.1357, simple_loss=0.21, pruned_loss=0.03064, over 959680.97 frames.], batch size: 12, lr: 1.59e-04 +2022-05-07 23:06:09,706 INFO [train.py:715] (7/8) Epoch 14, batch 900, loss[loss=0.1354, simple_loss=0.2215, pruned_loss=0.02464, over 4929.00 frames.], tot_loss[loss=0.135, simple_loss=0.2097, pruned_loss=0.03019, over 963115.58 frames.], batch size: 29, lr: 1.59e-04 +2022-05-07 23:06:48,330 INFO [train.py:715] (7/8) Epoch 14, batch 950, loss[loss=0.1629, simple_loss=0.2355, pruned_loss=0.04518, over 4779.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2097, pruned_loss=0.03043, over 964922.27 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:07:27,881 INFO [train.py:715] (7/8) Epoch 14, batch 1000, loss[loss=0.1509, simple_loss=0.2133, pruned_loss=0.04423, over 4860.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03018, over 966471.61 frames.], batch size: 32, lr: 1.59e-04 +2022-05-07 23:08:07,945 INFO [train.py:715] (7/8) Epoch 14, batch 1050, loss[loss=0.1433, simple_loss=0.2152, pruned_loss=0.03566, over 4842.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03053, over 967485.83 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 23:08:47,253 INFO [train.py:715] (7/8) Epoch 14, batch 1100, loss[loss=0.1734, simple_loss=0.2363, pruned_loss=0.0553, over 4741.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03021, over 967962.42 frames.], batch size: 16, lr: 1.59e-04 +2022-05-07 23:09:26,967 INFO [train.py:715] (7/8) Epoch 14, batch 1150, loss[loss=0.1182, simple_loss=0.1967, pruned_loss=0.01983, over 4809.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03049, over 968874.92 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:10:07,014 INFO [train.py:715] (7/8) Epoch 14, batch 1200, loss[loss=0.1155, simple_loss=0.179, pruned_loss=0.02598, over 4847.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03044, over 969916.34 frames.], batch size: 12, lr: 1.59e-04 +2022-05-07 23:10:47,175 INFO [train.py:715] (7/8) Epoch 14, batch 1250, loss[loss=0.1311, simple_loss=0.2032, pruned_loss=0.0295, over 4809.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03018, over 970508.10 frames.], batch size: 25, lr: 1.59e-04 +2022-05-07 23:11:26,192 INFO [train.py:715] (7/8) Epoch 14, batch 1300, loss[loss=0.1251, simple_loss=0.2028, pruned_loss=0.02371, over 4804.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.0309, over 970893.59 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:12:05,714 INFO [train.py:715] (7/8) Epoch 14, batch 1350, loss[loss=0.1649, simple_loss=0.2329, pruned_loss=0.04838, over 4833.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03056, over 971356.42 frames.], batch size: 30, lr: 1.59e-04 +2022-05-07 23:12:45,083 INFO [train.py:715] (7/8) Epoch 14, batch 1400, loss[loss=0.1304, simple_loss=0.1975, pruned_loss=0.03165, over 4746.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03083, over 971716.40 frames.], batch size: 19, lr: 1.59e-04 +2022-05-07 23:13:24,652 INFO [train.py:715] (7/8) Epoch 14, batch 1450, loss[loss=0.1621, simple_loss=0.2361, pruned_loss=0.04404, over 4988.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03055, over 971431.73 frames.], batch size: 25, lr: 1.59e-04 +2022-05-07 23:14:04,620 INFO [train.py:715] (7/8) Epoch 14, batch 1500, loss[loss=0.1367, simple_loss=0.2151, pruned_loss=0.02914, over 4871.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03066, over 971125.16 frames.], batch size: 16, lr: 1.59e-04 +2022-05-07 23:14:44,300 INFO [train.py:715] (7/8) Epoch 14, batch 1550, loss[loss=0.1289, simple_loss=0.2124, pruned_loss=0.02263, over 4961.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03022, over 971122.90 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:15:24,190 INFO [train.py:715] (7/8) Epoch 14, batch 1600, loss[loss=0.1325, simple_loss=0.2043, pruned_loss=0.03034, over 4953.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03014, over 971465.39 frames.], batch size: 35, lr: 1.59e-04 +2022-05-07 23:16:03,424 INFO [train.py:715] (7/8) Epoch 14, batch 1650, loss[loss=0.1383, simple_loss=0.2165, pruned_loss=0.03003, over 4908.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02994, over 971734.16 frames.], batch size: 19, lr: 1.59e-04 +2022-05-07 23:16:43,079 INFO [train.py:715] (7/8) Epoch 14, batch 1700, loss[loss=0.1446, simple_loss=0.2197, pruned_loss=0.0348, over 4787.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03025, over 972164.66 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:17:22,557 INFO [train.py:715] (7/8) Epoch 14, batch 1750, loss[loss=0.1336, simple_loss=0.2121, pruned_loss=0.02762, over 4817.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03064, over 973380.27 frames.], batch size: 26, lr: 1.59e-04 +2022-05-07 23:18:02,285 INFO [train.py:715] (7/8) Epoch 14, batch 1800, loss[loss=0.1233, simple_loss=0.1997, pruned_loss=0.02347, over 4805.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03041, over 973589.42 frames.], batch size: 26, lr: 1.59e-04 +2022-05-07 23:18:40,626 INFO [train.py:715] (7/8) Epoch 14, batch 1850, loss[loss=0.1343, simple_loss=0.2021, pruned_loss=0.03327, over 4837.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03017, over 972129.87 frames.], batch size: 30, lr: 1.59e-04 +2022-05-07 23:19:19,859 INFO [train.py:715] (7/8) Epoch 14, batch 1900, loss[loss=0.134, simple_loss=0.2087, pruned_loss=0.02966, over 4842.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03074, over 971398.70 frames.], batch size: 25, lr: 1.59e-04 +2022-05-07 23:19:59,663 INFO [train.py:715] (7/8) Epoch 14, batch 1950, loss[loss=0.1254, simple_loss=0.1992, pruned_loss=0.0258, over 4846.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2072, pruned_loss=0.03023, over 972042.32 frames.], batch size: 13, lr: 1.59e-04 +2022-05-07 23:20:39,804 INFO [train.py:715] (7/8) Epoch 14, batch 2000, loss[loss=0.1274, simple_loss=0.2069, pruned_loss=0.02401, over 4972.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2078, pruned_loss=0.03058, over 971512.81 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:21:19,092 INFO [train.py:715] (7/8) Epoch 14, batch 2050, loss[loss=0.1352, simple_loss=0.2132, pruned_loss=0.02862, over 4893.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2082, pruned_loss=0.03067, over 972036.18 frames.], batch size: 22, lr: 1.59e-04 +2022-05-07 23:21:58,531 INFO [train.py:715] (7/8) Epoch 14, batch 2100, loss[loss=0.1273, simple_loss=0.2121, pruned_loss=0.02124, over 4821.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2081, pruned_loss=0.03067, over 971893.14 frames.], batch size: 26, lr: 1.59e-04 +2022-05-07 23:22:38,245 INFO [train.py:715] (7/8) Epoch 14, batch 2150, loss[loss=0.161, simple_loss=0.2246, pruned_loss=0.04872, over 4856.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2074, pruned_loss=0.03052, over 972293.60 frames.], batch size: 20, lr: 1.59e-04 +2022-05-07 23:23:16,937 INFO [train.py:715] (7/8) Epoch 14, batch 2200, loss[loss=0.1359, simple_loss=0.2047, pruned_loss=0.03351, over 4854.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2076, pruned_loss=0.0307, over 972083.62 frames.], batch size: 32, lr: 1.59e-04 +2022-05-07 23:23:55,887 INFO [train.py:715] (7/8) Epoch 14, batch 2250, loss[loss=0.1196, simple_loss=0.1946, pruned_loss=0.02236, over 4862.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2083, pruned_loss=0.03135, over 973165.99 frames.], batch size: 20, lr: 1.59e-04 +2022-05-07 23:24:34,961 INFO [train.py:715] (7/8) Epoch 14, batch 2300, loss[loss=0.139, simple_loss=0.2084, pruned_loss=0.03485, over 4960.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2091, pruned_loss=0.03137, over 973257.73 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:25:14,125 INFO [train.py:715] (7/8) Epoch 14, batch 2350, loss[loss=0.167, simple_loss=0.2303, pruned_loss=0.05188, over 4779.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03089, over 972844.87 frames.], batch size: 17, lr: 1.59e-04 +2022-05-07 23:25:53,241 INFO [train.py:715] (7/8) Epoch 14, batch 2400, loss[loss=0.1508, simple_loss=0.2234, pruned_loss=0.03908, over 4850.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03106, over 972248.15 frames.], batch size: 38, lr: 1.59e-04 +2022-05-07 23:26:32,286 INFO [train.py:715] (7/8) Epoch 14, batch 2450, loss[loss=0.1451, simple_loss=0.2148, pruned_loss=0.03772, over 4804.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03116, over 972178.53 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:27:11,604 INFO [train.py:715] (7/8) Epoch 14, batch 2500, loss[loss=0.1558, simple_loss=0.2229, pruned_loss=0.04432, over 4957.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2102, pruned_loss=0.03147, over 972082.77 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 23:27:50,106 INFO [train.py:715] (7/8) Epoch 14, batch 2550, loss[loss=0.1289, simple_loss=0.1995, pruned_loss=0.02918, over 4818.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03108, over 972501.67 frames.], batch size: 27, lr: 1.59e-04 +2022-05-07 23:28:29,681 INFO [train.py:715] (7/8) Epoch 14, batch 2600, loss[loss=0.141, simple_loss=0.2108, pruned_loss=0.03563, over 4749.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2098, pruned_loss=0.03115, over 972897.14 frames.], batch size: 16, lr: 1.59e-04 +2022-05-07 23:29:09,140 INFO [train.py:715] (7/8) Epoch 14, batch 2650, loss[loss=0.1536, simple_loss=0.2278, pruned_loss=0.03973, over 4925.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2097, pruned_loss=0.03085, over 972224.21 frames.], batch size: 29, lr: 1.59e-04 +2022-05-07 23:29:48,490 INFO [train.py:715] (7/8) Epoch 14, batch 2700, loss[loss=0.1132, simple_loss=0.1954, pruned_loss=0.01551, over 4872.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2099, pruned_loss=0.0308, over 972563.62 frames.], batch size: 22, lr: 1.59e-04 +2022-05-07 23:30:27,050 INFO [train.py:715] (7/8) Epoch 14, batch 2750, loss[loss=0.1377, simple_loss=0.2204, pruned_loss=0.02753, over 4930.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03098, over 972028.08 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:31:06,236 INFO [train.py:715] (7/8) Epoch 14, batch 2800, loss[loss=0.1463, simple_loss=0.2224, pruned_loss=0.03512, over 4775.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03125, over 972590.96 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:31:45,908 INFO [train.py:715] (7/8) Epoch 14, batch 2850, loss[loss=0.108, simple_loss=0.1758, pruned_loss=0.02013, over 4748.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03143, over 972530.62 frames.], batch size: 12, lr: 1.59e-04 +2022-05-07 23:32:24,353 INFO [train.py:715] (7/8) Epoch 14, batch 2900, loss[loss=0.1487, simple_loss=0.2145, pruned_loss=0.04146, over 4921.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2091, pruned_loss=0.03139, over 971301.42 frames.], batch size: 29, lr: 1.59e-04 +2022-05-07 23:33:06,135 INFO [train.py:715] (7/8) Epoch 14, batch 2950, loss[loss=0.1287, simple_loss=0.2007, pruned_loss=0.02837, over 4961.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03103, over 971202.87 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:33:45,666 INFO [train.py:715] (7/8) Epoch 14, batch 3000, loss[loss=0.1383, simple_loss=0.2135, pruned_loss=0.03151, over 4820.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2089, pruned_loss=0.03104, over 972594.79 frames.], batch size: 26, lr: 1.59e-04 +2022-05-07 23:33:45,667 INFO [train.py:733] (7/8) Computing validation loss +2022-05-07 23:33:55,240 INFO [train.py:742] (7/8) Epoch 14, validation: loss=0.1052, simple_loss=0.1891, pruned_loss=0.01067, over 914524.00 frames. +2022-05-07 23:34:34,252 INFO [train.py:715] (7/8) Epoch 14, batch 3050, loss[loss=0.1284, simple_loss=0.2027, pruned_loss=0.02699, over 4852.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03133, over 972490.73 frames.], batch size: 20, lr: 1.59e-04 +2022-05-07 23:35:14,220 INFO [train.py:715] (7/8) Epoch 14, batch 3100, loss[loss=0.122, simple_loss=0.2038, pruned_loss=0.02014, over 4948.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03147, over 971886.81 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:35:53,769 INFO [train.py:715] (7/8) Epoch 14, batch 3150, loss[loss=0.1705, simple_loss=0.2298, pruned_loss=0.05566, over 4706.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2101, pruned_loss=0.03152, over 971579.19 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 23:36:33,462 INFO [train.py:715] (7/8) Epoch 14, batch 3200, loss[loss=0.1068, simple_loss=0.1792, pruned_loss=0.01719, over 4978.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2092, pruned_loss=0.03168, over 972626.34 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 23:37:14,487 INFO [train.py:715] (7/8) Epoch 14, batch 3250, loss[loss=0.1347, simple_loss=0.2015, pruned_loss=0.03397, over 4972.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2096, pruned_loss=0.03172, over 972154.00 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 23:37:54,310 INFO [train.py:715] (7/8) Epoch 14, batch 3300, loss[loss=0.1279, simple_loss=0.2007, pruned_loss=0.02756, over 4763.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03153, over 972760.03 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:38:34,436 INFO [train.py:715] (7/8) Epoch 14, batch 3350, loss[loss=0.1375, simple_loss=0.2119, pruned_loss=0.03151, over 4779.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.03087, over 973141.72 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:39:15,380 INFO [train.py:715] (7/8) Epoch 14, batch 3400, loss[loss=0.1334, simple_loss=0.2136, pruned_loss=0.02661, over 4885.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03078, over 972083.97 frames.], batch size: 22, lr: 1.59e-04 +2022-05-07 23:39:56,034 INFO [train.py:715] (7/8) Epoch 14, batch 3450, loss[loss=0.1425, simple_loss=0.2229, pruned_loss=0.031, over 4911.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03098, over 973230.00 frames.], batch size: 23, lr: 1.59e-04 +2022-05-07 23:40:35,914 INFO [train.py:715] (7/8) Epoch 14, batch 3500, loss[loss=0.1429, simple_loss=0.2035, pruned_loss=0.04116, over 4843.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.0311, over 973280.05 frames.], batch size: 12, lr: 1.59e-04 +2022-05-07 23:41:15,984 INFO [train.py:715] (7/8) Epoch 14, batch 3550, loss[loss=0.119, simple_loss=0.1924, pruned_loss=0.02284, over 4980.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.03135, over 973173.13 frames.], batch size: 28, lr: 1.59e-04 +2022-05-07 23:41:56,121 INFO [train.py:715] (7/8) Epoch 14, batch 3600, loss[loss=0.1265, simple_loss=0.2017, pruned_loss=0.02566, over 4811.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03114, over 973235.28 frames.], batch size: 26, lr: 1.59e-04 +2022-05-07 23:42:36,130 INFO [train.py:715] (7/8) Epoch 14, batch 3650, loss[loss=0.1365, simple_loss=0.1998, pruned_loss=0.03661, over 4885.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2095, pruned_loss=0.03151, over 972528.24 frames.], batch size: 17, lr: 1.59e-04 +2022-05-07 23:43:16,034 INFO [train.py:715] (7/8) Epoch 14, batch 3700, loss[loss=0.1379, simple_loss=0.2103, pruned_loss=0.03274, over 4912.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03136, over 972669.06 frames.], batch size: 19, lr: 1.59e-04 +2022-05-07 23:43:56,764 INFO [train.py:715] (7/8) Epoch 14, batch 3750, loss[loss=0.1239, simple_loss=0.2015, pruned_loss=0.02313, over 4979.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03076, over 972623.37 frames.], batch size: 35, lr: 1.59e-04 +2022-05-07 23:44:36,925 INFO [train.py:715] (7/8) Epoch 14, batch 3800, loss[loss=0.1243, simple_loss=0.2151, pruned_loss=0.01673, over 4918.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2085, pruned_loss=0.03064, over 972760.33 frames.], batch size: 23, lr: 1.59e-04 +2022-05-07 23:45:16,166 INFO [train.py:715] (7/8) Epoch 14, batch 3850, loss[loss=0.1468, simple_loss=0.213, pruned_loss=0.04026, over 4877.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03081, over 973050.38 frames.], batch size: 39, lr: 1.59e-04 +2022-05-07 23:45:56,642 INFO [train.py:715] (7/8) Epoch 14, batch 3900, loss[loss=0.1174, simple_loss=0.1886, pruned_loss=0.02313, over 4983.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03052, over 972906.75 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:46:37,883 INFO [train.py:715] (7/8) Epoch 14, batch 3950, loss[loss=0.1469, simple_loss=0.2187, pruned_loss=0.03749, over 4978.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2088, pruned_loss=0.03099, over 972915.83 frames.], batch size: 25, lr: 1.59e-04 +2022-05-07 23:47:18,821 INFO [train.py:715] (7/8) Epoch 14, batch 4000, loss[loss=0.1222, simple_loss=0.1865, pruned_loss=0.0289, over 4825.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03097, over 973390.33 frames.], batch size: 13, lr: 1.59e-04 +2022-05-07 23:47:59,307 INFO [train.py:715] (7/8) Epoch 14, batch 4050, loss[loss=0.1216, simple_loss=0.1907, pruned_loss=0.02624, over 4718.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.0318, over 972859.27 frames.], batch size: 12, lr: 1.59e-04 +2022-05-07 23:48:40,125 INFO [train.py:715] (7/8) Epoch 14, batch 4100, loss[loss=0.165, simple_loss=0.2376, pruned_loss=0.04619, over 4870.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2104, pruned_loss=0.03155, over 972844.04 frames.], batch size: 39, lr: 1.59e-04 +2022-05-07 23:49:21,563 INFO [train.py:715] (7/8) Epoch 14, batch 4150, loss[loss=0.158, simple_loss=0.2382, pruned_loss=0.03887, over 4796.00 frames.], tot_loss[loss=0.1364, simple_loss=0.21, pruned_loss=0.03139, over 973316.29 frames.], batch size: 21, lr: 1.59e-04 +2022-05-07 23:50:02,216 INFO [train.py:715] (7/8) Epoch 14, batch 4200, loss[loss=0.13, simple_loss=0.2018, pruned_loss=0.0291, over 4869.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03087, over 972415.59 frames.], batch size: 20, lr: 1.59e-04 +2022-05-07 23:50:43,286 INFO [train.py:715] (7/8) Epoch 14, batch 4250, loss[loss=0.1507, simple_loss=0.2136, pruned_loss=0.04392, over 4763.00 frames.], tot_loss[loss=0.1359, simple_loss=0.21, pruned_loss=0.03092, over 972480.32 frames.], batch size: 12, lr: 1.59e-04 +2022-05-07 23:51:25,173 INFO [train.py:715] (7/8) Epoch 14, batch 4300, loss[loss=0.1422, simple_loss=0.2195, pruned_loss=0.03246, over 4976.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2105, pruned_loss=0.03135, over 972815.79 frames.], batch size: 24, lr: 1.59e-04 +2022-05-07 23:52:06,420 INFO [train.py:715] (7/8) Epoch 14, batch 4350, loss[loss=0.1262, simple_loss=0.2008, pruned_loss=0.02578, over 4812.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2104, pruned_loss=0.03143, over 972918.67 frames.], batch size: 26, lr: 1.59e-04 +2022-05-07 23:52:46,952 INFO [train.py:715] (7/8) Epoch 14, batch 4400, loss[loss=0.1234, simple_loss=0.195, pruned_loss=0.02596, over 4896.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2106, pruned_loss=0.03136, over 973304.90 frames.], batch size: 22, lr: 1.59e-04 +2022-05-07 23:53:27,638 INFO [train.py:715] (7/8) Epoch 14, batch 4450, loss[loss=0.1441, simple_loss=0.218, pruned_loss=0.0351, over 4834.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2104, pruned_loss=0.03102, over 972534.94 frames.], batch size: 15, lr: 1.59e-04 +2022-05-07 23:54:08,629 INFO [train.py:715] (7/8) Epoch 14, batch 4500, loss[loss=0.1455, simple_loss=0.2234, pruned_loss=0.03385, over 4984.00 frames.], tot_loss[loss=0.1359, simple_loss=0.21, pruned_loss=0.03095, over 971402.10 frames.], batch size: 25, lr: 1.59e-04 +2022-05-07 23:54:48,676 INFO [train.py:715] (7/8) Epoch 14, batch 4550, loss[loss=0.1302, simple_loss=0.205, pruned_loss=0.02775, over 4959.00 frames.], tot_loss[loss=0.137, simple_loss=0.2108, pruned_loss=0.03158, over 970733.58 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:55:27,629 INFO [train.py:715] (7/8) Epoch 14, batch 4600, loss[loss=0.1283, simple_loss=0.1985, pruned_loss=0.02903, over 4963.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03115, over 969767.72 frames.], batch size: 14, lr: 1.59e-04 +2022-05-07 23:56:08,465 INFO [train.py:715] (7/8) Epoch 14, batch 4650, loss[loss=0.1355, simple_loss=0.205, pruned_loss=0.03295, over 4773.00 frames.], tot_loss[loss=0.137, simple_loss=0.2111, pruned_loss=0.03142, over 970285.50 frames.], batch size: 17, lr: 1.59e-04 +2022-05-07 23:56:48,195 INFO [train.py:715] (7/8) Epoch 14, batch 4700, loss[loss=0.136, simple_loss=0.2143, pruned_loss=0.02888, over 4902.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2106, pruned_loss=0.03121, over 970659.11 frames.], batch size: 18, lr: 1.59e-04 +2022-05-07 23:57:26,877 INFO [train.py:715] (7/8) Epoch 14, batch 4750, loss[loss=0.1292, simple_loss=0.2087, pruned_loss=0.02489, over 4768.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.0314, over 970800.47 frames.], batch size: 19, lr: 1.58e-04 +2022-05-07 23:58:06,243 INFO [train.py:715] (7/8) Epoch 14, batch 4800, loss[loss=0.1008, simple_loss=0.182, pruned_loss=0.009843, over 4881.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2088, pruned_loss=0.03131, over 972136.02 frames.], batch size: 22, lr: 1.58e-04 +2022-05-07 23:58:46,080 INFO [train.py:715] (7/8) Epoch 14, batch 4850, loss[loss=0.1197, simple_loss=0.1885, pruned_loss=0.02546, over 4855.00 frames.], tot_loss[loss=0.1352, simple_loss=0.208, pruned_loss=0.03123, over 971856.44 frames.], batch size: 20, lr: 1.58e-04 +2022-05-07 23:59:25,004 INFO [train.py:715] (7/8) Epoch 14, batch 4900, loss[loss=0.128, simple_loss=0.2036, pruned_loss=0.02625, over 4818.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2073, pruned_loss=0.03077, over 971522.58 frames.], batch size: 27, lr: 1.58e-04 +2022-05-08 00:00:04,152 INFO [train.py:715] (7/8) Epoch 14, batch 4950, loss[loss=0.1473, simple_loss=0.2149, pruned_loss=0.03982, over 4933.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2084, pruned_loss=0.03117, over 972240.62 frames.], batch size: 23, lr: 1.58e-04 +2022-05-08 00:00:44,229 INFO [train.py:715] (7/8) Epoch 14, batch 5000, loss[loss=0.1142, simple_loss=0.1839, pruned_loss=0.02232, over 4834.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2089, pruned_loss=0.03144, over 972709.54 frames.], batch size: 30, lr: 1.58e-04 +2022-05-08 00:01:23,509 INFO [train.py:715] (7/8) Epoch 14, batch 5050, loss[loss=0.1601, simple_loss=0.2289, pruned_loss=0.04563, over 4861.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2093, pruned_loss=0.03154, over 973017.09 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 00:02:02,198 INFO [train.py:715] (7/8) Epoch 14, batch 5100, loss[loss=0.1599, simple_loss=0.2359, pruned_loss=0.042, over 4744.00 frames.], tot_loss[loss=0.136, simple_loss=0.2093, pruned_loss=0.03133, over 972221.87 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:02:41,795 INFO [train.py:715] (7/8) Epoch 14, batch 5150, loss[loss=0.1607, simple_loss=0.224, pruned_loss=0.04864, over 4834.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03137, over 972596.22 frames.], batch size: 30, lr: 1.58e-04 +2022-05-08 00:03:21,425 INFO [train.py:715] (7/8) Epoch 14, batch 5200, loss[loss=0.1414, simple_loss=0.2267, pruned_loss=0.02807, over 4900.00 frames.], tot_loss[loss=0.1357, simple_loss=0.209, pruned_loss=0.03117, over 972097.81 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:03:59,950 INFO [train.py:715] (7/8) Epoch 14, batch 5250, loss[loss=0.1221, simple_loss=0.2048, pruned_loss=0.01976, over 4797.00 frames.], tot_loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03076, over 971929.31 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 00:04:38,442 INFO [train.py:715] (7/8) Epoch 14, batch 5300, loss[loss=0.1263, simple_loss=0.1965, pruned_loss=0.02806, over 4853.00 frames.], tot_loss[loss=0.1344, simple_loss=0.208, pruned_loss=0.03041, over 972733.79 frames.], batch size: 13, lr: 1.58e-04 +2022-05-08 00:05:17,624 INFO [train.py:715] (7/8) Epoch 14, batch 5350, loss[loss=0.1269, simple_loss=0.211, pruned_loss=0.02141, over 4981.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03051, over 972288.84 frames.], batch size: 28, lr: 1.58e-04 +2022-05-08 00:05:56,197 INFO [train.py:715] (7/8) Epoch 14, batch 5400, loss[loss=0.1339, simple_loss=0.2063, pruned_loss=0.03072, over 4817.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03072, over 971612.00 frames.], batch size: 13, lr: 1.58e-04 +2022-05-08 00:06:34,704 INFO [train.py:715] (7/8) Epoch 14, batch 5450, loss[loss=0.1255, simple_loss=0.2044, pruned_loss=0.0233, over 4757.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03097, over 971247.98 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:07:13,536 INFO [train.py:715] (7/8) Epoch 14, batch 5500, loss[loss=0.1207, simple_loss=0.1876, pruned_loss=0.02694, over 4892.00 frames.], tot_loss[loss=0.135, simple_loss=0.2091, pruned_loss=0.03045, over 971077.98 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:07:53,213 INFO [train.py:715] (7/8) Epoch 14, batch 5550, loss[loss=0.1226, simple_loss=0.1968, pruned_loss=0.0242, over 4872.00 frames.], tot_loss[loss=0.135, simple_loss=0.2095, pruned_loss=0.03024, over 971538.40 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 00:08:31,526 INFO [train.py:715] (7/8) Epoch 14, batch 5600, loss[loss=0.144, simple_loss=0.228, pruned_loss=0.03001, over 4858.00 frames.], tot_loss[loss=0.135, simple_loss=0.2096, pruned_loss=0.03022, over 971768.31 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 00:09:10,031 INFO [train.py:715] (7/8) Epoch 14, batch 5650, loss[loss=0.144, simple_loss=0.2188, pruned_loss=0.03462, over 4916.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2103, pruned_loss=0.03074, over 972153.51 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:09:49,143 INFO [train.py:715] (7/8) Epoch 14, batch 5700, loss[loss=0.1208, simple_loss=0.2056, pruned_loss=0.01797, over 4808.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2097, pruned_loss=0.03033, over 972494.35 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:10:27,414 INFO [train.py:715] (7/8) Epoch 14, batch 5750, loss[loss=0.1366, simple_loss=0.215, pruned_loss=0.02912, over 4967.00 frames.], tot_loss[loss=0.1357, simple_loss=0.21, pruned_loss=0.03075, over 973678.11 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:11:05,796 INFO [train.py:715] (7/8) Epoch 14, batch 5800, loss[loss=0.1324, simple_loss=0.2131, pruned_loss=0.02589, over 4941.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2099, pruned_loss=0.03043, over 972845.16 frames.], batch size: 39, lr: 1.58e-04 +2022-05-08 00:11:44,415 INFO [train.py:715] (7/8) Epoch 14, batch 5850, loss[loss=0.1305, simple_loss=0.1957, pruned_loss=0.03265, over 4776.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2098, pruned_loss=0.03038, over 972521.69 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 00:12:23,190 INFO [train.py:715] (7/8) Epoch 14, batch 5900, loss[loss=0.1507, simple_loss=0.2144, pruned_loss=0.04354, over 4927.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.03001, over 972281.28 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:13:02,950 INFO [train.py:715] (7/8) Epoch 14, batch 5950, loss[loss=0.1202, simple_loss=0.2057, pruned_loss=0.01738, over 4936.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2094, pruned_loss=0.03014, over 971667.46 frames.], batch size: 39, lr: 1.58e-04 +2022-05-08 00:13:42,646 INFO [train.py:715] (7/8) Epoch 14, batch 6000, loss[loss=0.1097, simple_loss=0.1893, pruned_loss=0.01502, over 4828.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2086, pruned_loss=0.03008, over 971783.00 frames.], batch size: 26, lr: 1.58e-04 +2022-05-08 00:13:42,647 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 00:13:52,504 INFO [train.py:742] (7/8) Epoch 14, validation: loss=0.105, simple_loss=0.1888, pruned_loss=0.01057, over 914524.00 frames. +2022-05-08 00:14:31,606 INFO [train.py:715] (7/8) Epoch 14, batch 6050, loss[loss=0.13, simple_loss=0.1986, pruned_loss=0.03068, over 4847.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03051, over 971460.22 frames.], batch size: 13, lr: 1.58e-04 +2022-05-08 00:15:10,781 INFO [train.py:715] (7/8) Epoch 14, batch 6100, loss[loss=0.1403, simple_loss=0.2166, pruned_loss=0.03196, over 4818.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03029, over 971382.34 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:15:50,782 INFO [train.py:715] (7/8) Epoch 14, batch 6150, loss[loss=0.1128, simple_loss=0.1831, pruned_loss=0.02123, over 4980.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03049, over 972559.93 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:16:30,391 INFO [train.py:715] (7/8) Epoch 14, batch 6200, loss[loss=0.1427, simple_loss=0.2257, pruned_loss=0.02986, over 4809.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03059, over 972715.80 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:17:10,263 INFO [train.py:715] (7/8) Epoch 14, batch 6250, loss[loss=0.1529, simple_loss=0.2294, pruned_loss=0.03821, over 4701.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03027, over 972142.77 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:17:49,639 INFO [train.py:715] (7/8) Epoch 14, batch 6300, loss[loss=0.144, simple_loss=0.2172, pruned_loss=0.03535, over 4809.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02998, over 972187.50 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:18:29,665 INFO [train.py:715] (7/8) Epoch 14, batch 6350, loss[loss=0.1261, simple_loss=0.2036, pruned_loss=0.02429, over 4878.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03031, over 972562.05 frames.], batch size: 22, lr: 1.58e-04 +2022-05-08 00:19:09,441 INFO [train.py:715] (7/8) Epoch 14, batch 6400, loss[loss=0.1213, simple_loss=0.1843, pruned_loss=0.0291, over 4811.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2098, pruned_loss=0.03052, over 972769.00 frames.], batch size: 13, lr: 1.58e-04 +2022-05-08 00:19:49,528 INFO [train.py:715] (7/8) Epoch 14, batch 6450, loss[loss=0.1518, simple_loss=0.2227, pruned_loss=0.04051, over 4773.00 frames.], tot_loss[loss=0.1357, simple_loss=0.21, pruned_loss=0.03068, over 972519.51 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:20:29,517 INFO [train.py:715] (7/8) Epoch 14, batch 6500, loss[loss=0.1397, simple_loss=0.211, pruned_loss=0.0342, over 4762.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2093, pruned_loss=0.03062, over 972778.84 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 00:21:09,173 INFO [train.py:715] (7/8) Epoch 14, batch 6550, loss[loss=0.1153, simple_loss=0.175, pruned_loss=0.02782, over 4771.00 frames.], tot_loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03075, over 972504.42 frames.], batch size: 12, lr: 1.58e-04 +2022-05-08 00:21:49,052 INFO [train.py:715] (7/8) Epoch 14, batch 6600, loss[loss=0.1283, simple_loss=0.2028, pruned_loss=0.02688, over 4960.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.03036, over 972911.14 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:22:29,231 INFO [train.py:715] (7/8) Epoch 14, batch 6650, loss[loss=0.1463, simple_loss=0.2197, pruned_loss=0.03644, over 4810.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.0314, over 973240.59 frames.], batch size: 26, lr: 1.58e-04 +2022-05-08 00:23:08,960 INFO [train.py:715] (7/8) Epoch 14, batch 6700, loss[loss=0.1562, simple_loss=0.227, pruned_loss=0.04273, over 4936.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03135, over 972702.23 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:23:48,847 INFO [train.py:715] (7/8) Epoch 14, batch 6750, loss[loss=0.1321, simple_loss=0.2082, pruned_loss=0.02804, over 4906.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03139, over 973090.23 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:24:28,839 INFO [train.py:715] (7/8) Epoch 14, batch 6800, loss[loss=0.1596, simple_loss=0.2345, pruned_loss=0.04238, over 4899.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2105, pruned_loss=0.03147, over 973634.28 frames.], batch size: 39, lr: 1.58e-04 +2022-05-08 00:25:08,848 INFO [train.py:715] (7/8) Epoch 14, batch 6850, loss[loss=0.119, simple_loss=0.2007, pruned_loss=0.01865, over 4875.00 frames.], tot_loss[loss=0.1373, simple_loss=0.2109, pruned_loss=0.03187, over 974631.69 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 00:25:48,266 INFO [train.py:715] (7/8) Epoch 14, batch 6900, loss[loss=0.1187, simple_loss=0.1962, pruned_loss=0.02055, over 4758.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2111, pruned_loss=0.03184, over 974023.08 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:26:28,459 INFO [train.py:715] (7/8) Epoch 14, batch 6950, loss[loss=0.1357, simple_loss=0.2145, pruned_loss=0.02851, over 4889.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2103, pruned_loss=0.03097, over 973629.99 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:27:08,563 INFO [train.py:715] (7/8) Epoch 14, batch 7000, loss[loss=0.1164, simple_loss=0.1942, pruned_loss=0.01926, over 4971.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2098, pruned_loss=0.03055, over 972819.67 frames.], batch size: 25, lr: 1.58e-04 +2022-05-08 00:27:48,550 INFO [train.py:715] (7/8) Epoch 14, batch 7050, loss[loss=0.1319, simple_loss=0.2087, pruned_loss=0.02755, over 4908.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03034, over 972411.89 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 00:28:27,878 INFO [train.py:715] (7/8) Epoch 14, batch 7100, loss[loss=0.1115, simple_loss=0.1885, pruned_loss=0.01723, over 4770.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.0309, over 973034.04 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 00:29:07,968 INFO [train.py:715] (7/8) Epoch 14, batch 7150, loss[loss=0.1136, simple_loss=0.1905, pruned_loss=0.01833, over 4912.00 frames.], tot_loss[loss=0.136, simple_loss=0.2094, pruned_loss=0.03129, over 973189.29 frames.], batch size: 23, lr: 1.58e-04 +2022-05-08 00:29:48,170 INFO [train.py:715] (7/8) Epoch 14, batch 7200, loss[loss=0.1359, simple_loss=0.2106, pruned_loss=0.03064, over 4985.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2099, pruned_loss=0.03159, over 973505.04 frames.], batch size: 33, lr: 1.58e-04 +2022-05-08 00:30:28,016 INFO [train.py:715] (7/8) Epoch 14, batch 7250, loss[loss=0.1436, simple_loss=0.2132, pruned_loss=0.03694, over 4854.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03104, over 973268.06 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 00:31:08,141 INFO [train.py:715] (7/8) Epoch 14, batch 7300, loss[loss=0.1319, simple_loss=0.2035, pruned_loss=0.03018, over 4986.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03098, over 972819.77 frames.], batch size: 35, lr: 1.58e-04 +2022-05-08 00:31:48,262 INFO [train.py:715] (7/8) Epoch 14, batch 7350, loss[loss=0.135, simple_loss=0.2088, pruned_loss=0.03058, over 4858.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03105, over 973241.83 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 00:32:28,615 INFO [train.py:715] (7/8) Epoch 14, batch 7400, loss[loss=0.1551, simple_loss=0.2335, pruned_loss=0.03837, over 4969.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03088, over 972985.09 frames.], batch size: 28, lr: 1.58e-04 +2022-05-08 00:33:08,061 INFO [train.py:715] (7/8) Epoch 14, batch 7450, loss[loss=0.1634, simple_loss=0.2276, pruned_loss=0.04958, over 4847.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.03111, over 973264.08 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:33:47,754 INFO [train.py:715] (7/8) Epoch 14, batch 7500, loss[loss=0.127, simple_loss=0.2133, pruned_loss=0.02033, over 4827.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03081, over 972534.69 frames.], batch size: 13, lr: 1.58e-04 +2022-05-08 00:34:27,408 INFO [train.py:715] (7/8) Epoch 14, batch 7550, loss[loss=0.1506, simple_loss=0.232, pruned_loss=0.03457, over 4797.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03021, over 972488.73 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:35:06,412 INFO [train.py:715] (7/8) Epoch 14, batch 7600, loss[loss=0.1296, simple_loss=0.2038, pruned_loss=0.02777, over 4921.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03013, over 972870.41 frames.], batch size: 23, lr: 1.58e-04 +2022-05-08 00:35:46,368 INFO [train.py:715] (7/8) Epoch 14, batch 7650, loss[loss=0.1373, simple_loss=0.2121, pruned_loss=0.03124, over 4915.00 frames.], tot_loss[loss=0.134, simple_loss=0.2085, pruned_loss=0.02977, over 972817.74 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:36:25,271 INFO [train.py:715] (7/8) Epoch 14, batch 7700, loss[loss=0.1299, simple_loss=0.1947, pruned_loss=0.03253, over 4965.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02992, over 973700.58 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:37:05,577 INFO [train.py:715] (7/8) Epoch 14, batch 7750, loss[loss=0.1298, simple_loss=0.2073, pruned_loss=0.0261, over 4936.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03002, over 973076.68 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:37:44,382 INFO [train.py:715] (7/8) Epoch 14, batch 7800, loss[loss=0.1243, simple_loss=0.1975, pruned_loss=0.02557, over 4985.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2097, pruned_loss=0.03075, over 972652.30 frames.], batch size: 27, lr: 1.58e-04 +2022-05-08 00:38:23,473 INFO [train.py:715] (7/8) Epoch 14, batch 7850, loss[loss=0.1188, simple_loss=0.1851, pruned_loss=0.02629, over 4765.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.03142, over 972636.72 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:39:03,277 INFO [train.py:715] (7/8) Epoch 14, batch 7900, loss[loss=0.1252, simple_loss=0.2006, pruned_loss=0.02495, over 4842.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03118, over 973070.15 frames.], batch size: 25, lr: 1.58e-04 +2022-05-08 00:39:42,079 INFO [train.py:715] (7/8) Epoch 14, batch 7950, loss[loss=0.1609, simple_loss=0.2318, pruned_loss=0.04497, over 4982.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.031, over 973489.44 frames.], batch size: 39, lr: 1.58e-04 +2022-05-08 00:40:21,692 INFO [train.py:715] (7/8) Epoch 14, batch 8000, loss[loss=0.1643, simple_loss=0.2409, pruned_loss=0.0438, over 4784.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.0311, over 972707.44 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 00:41:00,526 INFO [train.py:715] (7/8) Epoch 14, batch 8050, loss[loss=0.1293, simple_loss=0.1966, pruned_loss=0.03094, over 4881.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03097, over 973414.03 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 00:41:40,055 INFO [train.py:715] (7/8) Epoch 14, batch 8100, loss[loss=0.1301, simple_loss=0.2088, pruned_loss=0.02572, over 4930.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.03058, over 973840.21 frames.], batch size: 29, lr: 1.58e-04 +2022-05-08 00:42:18,788 INFO [train.py:715] (7/8) Epoch 14, batch 8150, loss[loss=0.1312, simple_loss=0.2063, pruned_loss=0.02802, over 4976.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03047, over 972855.75 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:42:58,273 INFO [train.py:715] (7/8) Epoch 14, batch 8200, loss[loss=0.1325, simple_loss=0.2101, pruned_loss=0.0275, over 4927.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02975, over 973822.28 frames.], batch size: 23, lr: 1.58e-04 +2022-05-08 00:43:37,714 INFO [train.py:715] (7/8) Epoch 14, batch 8250, loss[loss=0.1337, simple_loss=0.2242, pruned_loss=0.02154, over 4959.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03007, over 974335.88 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:44:17,182 INFO [train.py:715] (7/8) Epoch 14, batch 8300, loss[loss=0.1393, simple_loss=0.1975, pruned_loss=0.0406, over 4761.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03066, over 974891.49 frames.], batch size: 12, lr: 1.58e-04 +2022-05-08 00:44:56,128 INFO [train.py:715] (7/8) Epoch 14, batch 8350, loss[loss=0.1472, simple_loss=0.2231, pruned_loss=0.03561, over 4794.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03055, over 974086.77 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:45:35,325 INFO [train.py:715] (7/8) Epoch 14, batch 8400, loss[loss=0.1321, simple_loss=0.1978, pruned_loss=0.03317, over 4963.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03056, over 974131.22 frames.], batch size: 35, lr: 1.58e-04 +2022-05-08 00:46:14,805 INFO [train.py:715] (7/8) Epoch 14, batch 8450, loss[loss=0.1588, simple_loss=0.2164, pruned_loss=0.05063, over 4945.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2085, pruned_loss=0.03082, over 973978.19 frames.], batch size: 35, lr: 1.58e-04 +2022-05-08 00:46:53,359 INFO [train.py:715] (7/8) Epoch 14, batch 8500, loss[loss=0.146, simple_loss=0.2345, pruned_loss=0.02876, over 4790.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2083, pruned_loss=0.03058, over 973539.21 frames.], batch size: 12, lr: 1.58e-04 +2022-05-08 00:47:32,488 INFO [train.py:715] (7/8) Epoch 14, batch 8550, loss[loss=0.1234, simple_loss=0.1975, pruned_loss=0.0246, over 4813.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2085, pruned_loss=0.03065, over 972551.92 frames.], batch size: 25, lr: 1.58e-04 +2022-05-08 00:48:13,442 INFO [train.py:715] (7/8) Epoch 14, batch 8600, loss[loss=0.1341, simple_loss=0.2057, pruned_loss=0.03123, over 4840.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2078, pruned_loss=0.03046, over 972570.26 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 00:48:52,734 INFO [train.py:715] (7/8) Epoch 14, batch 8650, loss[loss=0.133, simple_loss=0.2058, pruned_loss=0.0301, over 4924.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03108, over 971610.93 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:49:34,159 INFO [train.py:715] (7/8) Epoch 14, batch 8700, loss[loss=0.1324, simple_loss=0.2035, pruned_loss=0.03068, over 4872.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2103, pruned_loss=0.0317, over 972183.16 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:50:13,529 INFO [train.py:715] (7/8) Epoch 14, batch 8750, loss[loss=0.1439, simple_loss=0.2179, pruned_loss=0.03493, over 4945.00 frames.], tot_loss[loss=0.137, simple_loss=0.2104, pruned_loss=0.03179, over 973016.64 frames.], batch size: 29, lr: 1.58e-04 +2022-05-08 00:50:53,246 INFO [train.py:715] (7/8) Epoch 14, batch 8800, loss[loss=0.1507, simple_loss=0.2302, pruned_loss=0.03558, over 4948.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2101, pruned_loss=0.03211, over 973509.35 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 00:51:32,825 INFO [train.py:715] (7/8) Epoch 14, batch 8850, loss[loss=0.1501, simple_loss=0.2251, pruned_loss=0.03761, over 4848.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2101, pruned_loss=0.03182, over 973149.49 frames.], batch size: 30, lr: 1.58e-04 +2022-05-08 00:52:13,345 INFO [train.py:715] (7/8) Epoch 14, batch 8900, loss[loss=0.1437, simple_loss=0.2176, pruned_loss=0.03487, over 4864.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03132, over 973533.63 frames.], batch size: 39, lr: 1.58e-04 +2022-05-08 00:52:53,215 INFO [train.py:715] (7/8) Epoch 14, batch 8950, loss[loss=0.115, simple_loss=0.198, pruned_loss=0.01604, over 4738.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03088, over 972734.09 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:53:33,016 INFO [train.py:715] (7/8) Epoch 14, batch 9000, loss[loss=0.1412, simple_loss=0.2099, pruned_loss=0.03619, over 4906.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03077, over 972386.30 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 00:53:33,017 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 00:53:47,941 INFO [train.py:742] (7/8) Epoch 14, validation: loss=0.1052, simple_loss=0.189, pruned_loss=0.01074, over 914524.00 frames. +2022-05-08 00:54:27,486 INFO [train.py:715] (7/8) Epoch 14, batch 9050, loss[loss=0.1419, simple_loss=0.2189, pruned_loss=0.03244, over 4741.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.0305, over 972062.00 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:55:07,810 INFO [train.py:715] (7/8) Epoch 14, batch 9100, loss[loss=0.1504, simple_loss=0.2332, pruned_loss=0.03383, over 4767.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03046, over 972071.25 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 00:55:47,313 INFO [train.py:715] (7/8) Epoch 14, batch 9150, loss[loss=0.1277, simple_loss=0.1905, pruned_loss=0.0325, over 4872.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2086, pruned_loss=0.03107, over 971640.83 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:56:27,173 INFO [train.py:715] (7/8) Epoch 14, batch 9200, loss[loss=0.1606, simple_loss=0.2241, pruned_loss=0.04853, over 4859.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2079, pruned_loss=0.03082, over 972123.48 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 00:57:06,891 INFO [train.py:715] (7/8) Epoch 14, batch 9250, loss[loss=0.1224, simple_loss=0.211, pruned_loss=0.01695, over 4961.00 frames.], tot_loss[loss=0.135, simple_loss=0.2084, pruned_loss=0.03077, over 971600.63 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 00:57:46,607 INFO [train.py:715] (7/8) Epoch 14, batch 9300, loss[loss=0.1272, simple_loss=0.2057, pruned_loss=0.02438, over 4987.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2089, pruned_loss=0.03136, over 971920.54 frames.], batch size: 25, lr: 1.58e-04 +2022-05-08 00:58:26,528 INFO [train.py:715] (7/8) Epoch 14, batch 9350, loss[loss=0.1431, simple_loss=0.2195, pruned_loss=0.03333, over 4876.00 frames.], tot_loss[loss=0.1358, simple_loss=0.209, pruned_loss=0.03128, over 972237.15 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 00:59:06,673 INFO [train.py:715] (7/8) Epoch 14, batch 9400, loss[loss=0.1701, simple_loss=0.241, pruned_loss=0.04956, over 4695.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2087, pruned_loss=0.0313, over 972639.34 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 00:59:46,295 INFO [train.py:715] (7/8) Epoch 14, batch 9450, loss[loss=0.1356, simple_loss=0.2169, pruned_loss=0.02713, over 4762.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2083, pruned_loss=0.03102, over 972617.85 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 01:00:26,052 INFO [train.py:715] (7/8) Epoch 14, batch 9500, loss[loss=0.1347, simple_loss=0.2198, pruned_loss=0.02479, over 4820.00 frames.], tot_loss[loss=0.134, simple_loss=0.2075, pruned_loss=0.03028, over 973219.45 frames.], batch size: 26, lr: 1.58e-04 +2022-05-08 01:01:05,842 INFO [train.py:715] (7/8) Epoch 14, batch 9550, loss[loss=0.1459, simple_loss=0.2132, pruned_loss=0.03932, over 4877.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03073, over 972618.69 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 01:01:46,002 INFO [train.py:715] (7/8) Epoch 14, batch 9600, loss[loss=0.1205, simple_loss=0.1848, pruned_loss=0.02811, over 4771.00 frames.], tot_loss[loss=0.135, simple_loss=0.2084, pruned_loss=0.03082, over 972780.05 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 01:02:25,430 INFO [train.py:715] (7/8) Epoch 14, batch 9650, loss[loss=0.129, simple_loss=0.2056, pruned_loss=0.02623, over 4852.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2089, pruned_loss=0.03105, over 973008.58 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 01:03:05,454 INFO [train.py:715] (7/8) Epoch 14, batch 9700, loss[loss=0.1212, simple_loss=0.2008, pruned_loss=0.02084, over 4815.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03149, over 973011.81 frames.], batch size: 21, lr: 1.58e-04 +2022-05-08 01:03:45,041 INFO [train.py:715] (7/8) Epoch 14, batch 9750, loss[loss=0.1261, simple_loss=0.2017, pruned_loss=0.02525, over 4837.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.03157, over 972268.53 frames.], batch size: 13, lr: 1.58e-04 +2022-05-08 01:04:25,343 INFO [train.py:715] (7/8) Epoch 14, batch 9800, loss[loss=0.1638, simple_loss=0.237, pruned_loss=0.04529, over 4784.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2096, pruned_loss=0.03138, over 972362.34 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 01:05:04,569 INFO [train.py:715] (7/8) Epoch 14, batch 9850, loss[loss=0.1416, simple_loss=0.2213, pruned_loss=0.03091, over 4937.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2092, pruned_loss=0.03107, over 972870.51 frames.], batch size: 23, lr: 1.58e-04 +2022-05-08 01:05:44,639 INFO [train.py:715] (7/8) Epoch 14, batch 9900, loss[loss=0.117, simple_loss=0.1976, pruned_loss=0.01819, over 4873.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03072, over 972121.77 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 01:06:24,620 INFO [train.py:715] (7/8) Epoch 14, batch 9950, loss[loss=0.1481, simple_loss=0.2177, pruned_loss=0.03929, over 4895.00 frames.], tot_loss[loss=0.136, simple_loss=0.2103, pruned_loss=0.03088, over 971866.91 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 01:07:03,945 INFO [train.py:715] (7/8) Epoch 14, batch 10000, loss[loss=0.1265, simple_loss=0.2093, pruned_loss=0.02182, over 4938.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03094, over 972543.32 frames.], batch size: 23, lr: 1.58e-04 +2022-05-08 01:07:43,995 INFO [train.py:715] (7/8) Epoch 14, batch 10050, loss[loss=0.156, simple_loss=0.2333, pruned_loss=0.03935, over 4770.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03067, over 973027.55 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 01:08:23,517 INFO [train.py:715] (7/8) Epoch 14, batch 10100, loss[loss=0.13, simple_loss=0.2123, pruned_loss=0.0238, over 4773.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03038, over 973064.69 frames.], batch size: 18, lr: 1.58e-04 +2022-05-08 01:09:03,294 INFO [train.py:715] (7/8) Epoch 14, batch 10150, loss[loss=0.11, simple_loss=0.1843, pruned_loss=0.01779, over 4814.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03041, over 972508.63 frames.], batch size: 25, lr: 1.58e-04 +2022-05-08 01:09:42,487 INFO [train.py:715] (7/8) Epoch 14, batch 10200, loss[loss=0.1193, simple_loss=0.1955, pruned_loss=0.02157, over 4861.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03072, over 972270.03 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 01:10:22,733 INFO [train.py:715] (7/8) Epoch 14, batch 10250, loss[loss=0.1138, simple_loss=0.1854, pruned_loss=0.0211, over 4824.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03107, over 971316.75 frames.], batch size: 25, lr: 1.58e-04 +2022-05-08 01:11:02,459 INFO [train.py:715] (7/8) Epoch 14, batch 10300, loss[loss=0.1248, simple_loss=0.2061, pruned_loss=0.02171, over 4806.00 frames.], tot_loss[loss=0.1375, simple_loss=0.2109, pruned_loss=0.03202, over 971743.22 frames.], batch size: 12, lr: 1.58e-04 +2022-05-08 01:11:41,909 INFO [train.py:715] (7/8) Epoch 14, batch 10350, loss[loss=0.128, simple_loss=0.1973, pruned_loss=0.02939, over 4887.00 frames.], tot_loss[loss=0.137, simple_loss=0.2105, pruned_loss=0.03172, over 971798.27 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 01:12:22,111 INFO [train.py:715] (7/8) Epoch 14, batch 10400, loss[loss=0.1296, simple_loss=0.2058, pruned_loss=0.02675, over 4961.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2094, pruned_loss=0.03159, over 971985.10 frames.], batch size: 24, lr: 1.58e-04 +2022-05-08 01:13:01,501 INFO [train.py:715] (7/8) Epoch 14, batch 10450, loss[loss=0.1293, simple_loss=0.199, pruned_loss=0.02984, over 4766.00 frames.], tot_loss[loss=0.135, simple_loss=0.2083, pruned_loss=0.03088, over 971949.76 frames.], batch size: 19, lr: 1.58e-04 +2022-05-08 01:13:41,730 INFO [train.py:715] (7/8) Epoch 14, batch 10500, loss[loss=0.1534, simple_loss=0.237, pruned_loss=0.03488, over 4754.00 frames.], tot_loss[loss=0.135, simple_loss=0.2084, pruned_loss=0.03079, over 971476.12 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 01:14:21,002 INFO [train.py:715] (7/8) Epoch 14, batch 10550, loss[loss=0.1472, simple_loss=0.2184, pruned_loss=0.03796, over 4858.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03111, over 971965.89 frames.], batch size: 32, lr: 1.58e-04 +2022-05-08 01:15:01,262 INFO [train.py:715] (7/8) Epoch 14, batch 10600, loss[loss=0.1426, simple_loss=0.2162, pruned_loss=0.03443, over 4950.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2107, pruned_loss=0.03124, over 971666.57 frames.], batch size: 35, lr: 1.58e-04 +2022-05-08 01:15:40,588 INFO [train.py:715] (7/8) Epoch 14, batch 10650, loss[loss=0.1282, simple_loss=0.202, pruned_loss=0.02722, over 4961.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.0309, over 971568.04 frames.], batch size: 14, lr: 1.58e-04 +2022-05-08 01:16:19,714 INFO [train.py:715] (7/8) Epoch 14, batch 10700, loss[loss=0.1127, simple_loss=0.1878, pruned_loss=0.01882, over 4904.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03047, over 972162.27 frames.], batch size: 17, lr: 1.58e-04 +2022-05-08 01:16:58,896 INFO [train.py:715] (7/8) Epoch 14, batch 10750, loss[loss=0.1505, simple_loss=0.2315, pruned_loss=0.03473, over 4810.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03015, over 972658.15 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 01:17:38,323 INFO [train.py:715] (7/8) Epoch 14, batch 10800, loss[loss=0.1332, simple_loss=0.2058, pruned_loss=0.03035, over 4848.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02997, over 972879.67 frames.], batch size: 27, lr: 1.58e-04 +2022-05-08 01:18:17,863 INFO [train.py:715] (7/8) Epoch 14, batch 10850, loss[loss=0.116, simple_loss=0.1985, pruned_loss=0.01677, over 4849.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03034, over 973208.75 frames.], batch size: 15, lr: 1.58e-04 +2022-05-08 01:18:56,527 INFO [train.py:715] (7/8) Epoch 14, batch 10900, loss[loss=0.1424, simple_loss=0.2253, pruned_loss=0.02971, over 4871.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03057, over 972998.76 frames.], batch size: 20, lr: 1.58e-04 +2022-05-08 01:19:36,726 INFO [train.py:715] (7/8) Epoch 14, batch 10950, loss[loss=0.1533, simple_loss=0.2342, pruned_loss=0.03623, over 4881.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03029, over 973152.02 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 01:20:17,492 INFO [train.py:715] (7/8) Epoch 14, batch 11000, loss[loss=0.1296, simple_loss=0.2007, pruned_loss=0.02927, over 4748.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2089, pruned_loss=0.03028, over 973010.61 frames.], batch size: 16, lr: 1.58e-04 +2022-05-08 01:20:56,616 INFO [train.py:715] (7/8) Epoch 14, batch 11050, loss[loss=0.1235, simple_loss=0.19, pruned_loss=0.02849, over 4703.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03045, over 972517.53 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:21:37,659 INFO [train.py:715] (7/8) Epoch 14, batch 11100, loss[loss=0.1339, simple_loss=0.2013, pruned_loss=0.0333, over 4767.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.0306, over 971529.13 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 01:22:18,215 INFO [train.py:715] (7/8) Epoch 14, batch 11150, loss[loss=0.1038, simple_loss=0.1662, pruned_loss=0.02068, over 4989.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03012, over 971503.37 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 01:22:58,447 INFO [train.py:715] (7/8) Epoch 14, batch 11200, loss[loss=0.1519, simple_loss=0.2285, pruned_loss=0.03761, over 4980.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02999, over 972278.39 frames.], batch size: 25, lr: 1.57e-04 +2022-05-08 01:23:37,882 INFO [train.py:715] (7/8) Epoch 14, batch 11250, loss[loss=0.132, simple_loss=0.2103, pruned_loss=0.02686, over 4816.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03016, over 972073.55 frames.], batch size: 13, lr: 1.57e-04 +2022-05-08 01:24:18,307 INFO [train.py:715] (7/8) Epoch 14, batch 11300, loss[loss=0.1363, simple_loss=0.2124, pruned_loss=0.03015, over 4777.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02997, over 971618.69 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 01:24:58,567 INFO [train.py:715] (7/8) Epoch 14, batch 11350, loss[loss=0.1258, simple_loss=0.2008, pruned_loss=0.02536, over 4937.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03001, over 972710.12 frames.], batch size: 23, lr: 1.57e-04 +2022-05-08 01:25:37,743 INFO [train.py:715] (7/8) Epoch 14, batch 11400, loss[loss=0.1338, simple_loss=0.2126, pruned_loss=0.02748, over 4852.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02999, over 972658.28 frames.], batch size: 20, lr: 1.57e-04 +2022-05-08 01:26:18,731 INFO [train.py:715] (7/8) Epoch 14, batch 11450, loss[loss=0.1223, simple_loss=0.2001, pruned_loss=0.02223, over 4926.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02972, over 973082.39 frames.], batch size: 23, lr: 1.57e-04 +2022-05-08 01:26:59,106 INFO [train.py:715] (7/8) Epoch 14, batch 11500, loss[loss=0.1199, simple_loss=0.196, pruned_loss=0.02191, over 4891.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02989, over 973309.86 frames.], batch size: 22, lr: 1.57e-04 +2022-05-08 01:27:39,024 INFO [train.py:715] (7/8) Epoch 14, batch 11550, loss[loss=0.1715, simple_loss=0.2412, pruned_loss=0.05091, over 4894.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02992, over 972849.83 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 01:28:18,478 INFO [train.py:715] (7/8) Epoch 14, batch 11600, loss[loss=0.1304, simple_loss=0.2042, pruned_loss=0.02828, over 4808.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03019, over 972259.44 frames.], batch size: 25, lr: 1.57e-04 +2022-05-08 01:28:58,180 INFO [train.py:715] (7/8) Epoch 14, batch 11650, loss[loss=0.1207, simple_loss=0.2002, pruned_loss=0.02055, over 4910.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03009, over 972216.80 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 01:29:37,889 INFO [train.py:715] (7/8) Epoch 14, batch 11700, loss[loss=0.1347, simple_loss=0.204, pruned_loss=0.0327, over 4979.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03005, over 971956.56 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 01:30:17,156 INFO [train.py:715] (7/8) Epoch 14, batch 11750, loss[loss=0.1141, simple_loss=0.1866, pruned_loss=0.02081, over 4973.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02985, over 972864.57 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 01:30:56,860 INFO [train.py:715] (7/8) Epoch 14, batch 11800, loss[loss=0.1452, simple_loss=0.2238, pruned_loss=0.0333, over 4852.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03023, over 973077.75 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 01:31:35,986 INFO [train.py:715] (7/8) Epoch 14, batch 11850, loss[loss=0.1182, simple_loss=0.1894, pruned_loss=0.0235, over 4819.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03036, over 973724.69 frames.], batch size: 25, lr: 1.57e-04 +2022-05-08 01:32:14,893 INFO [train.py:715] (7/8) Epoch 14, batch 11900, loss[loss=0.1325, simple_loss=0.2144, pruned_loss=0.02527, over 4797.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.0304, over 972767.90 frames.], batch size: 25, lr: 1.57e-04 +2022-05-08 01:32:54,220 INFO [train.py:715] (7/8) Epoch 14, batch 11950, loss[loss=0.1448, simple_loss=0.2138, pruned_loss=0.03786, over 4785.00 frames.], tot_loss[loss=0.135, simple_loss=0.2088, pruned_loss=0.0306, over 972571.62 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 01:33:33,588 INFO [train.py:715] (7/8) Epoch 14, batch 12000, loss[loss=0.127, simple_loss=0.2046, pruned_loss=0.02473, over 4751.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03021, over 972384.54 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 01:33:33,588 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 01:33:43,199 INFO [train.py:742] (7/8) Epoch 14, validation: loss=0.1051, simple_loss=0.1889, pruned_loss=0.01067, over 914524.00 frames. +2022-05-08 01:34:22,500 INFO [train.py:715] (7/8) Epoch 14, batch 12050, loss[loss=0.1227, simple_loss=0.1972, pruned_loss=0.02406, over 4794.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03019, over 972361.13 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 01:35:01,858 INFO [train.py:715] (7/8) Epoch 14, batch 12100, loss[loss=0.1598, simple_loss=0.2291, pruned_loss=0.04524, over 4855.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03051, over 972557.40 frames.], batch size: 30, lr: 1.57e-04 +2022-05-08 01:35:41,280 INFO [train.py:715] (7/8) Epoch 14, batch 12150, loss[loss=0.1247, simple_loss=0.1987, pruned_loss=0.02538, over 4768.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03023, over 971861.70 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 01:36:20,622 INFO [train.py:715] (7/8) Epoch 14, batch 12200, loss[loss=0.1197, simple_loss=0.1921, pruned_loss=0.02362, over 4879.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03033, over 972958.55 frames.], batch size: 32, lr: 1.57e-04 +2022-05-08 01:37:00,486 INFO [train.py:715] (7/8) Epoch 14, batch 12250, loss[loss=0.15, simple_loss=0.2294, pruned_loss=0.03527, over 4864.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03035, over 973189.66 frames.], batch size: 38, lr: 1.57e-04 +2022-05-08 01:37:39,679 INFO [train.py:715] (7/8) Epoch 14, batch 12300, loss[loss=0.146, simple_loss=0.2096, pruned_loss=0.04123, over 4940.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.0301, over 973228.53 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 01:38:19,197 INFO [train.py:715] (7/8) Epoch 14, batch 12350, loss[loss=0.1378, simple_loss=0.2163, pruned_loss=0.02967, over 4897.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03016, over 972586.85 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 01:38:58,801 INFO [train.py:715] (7/8) Epoch 14, batch 12400, loss[loss=0.1244, simple_loss=0.2037, pruned_loss=0.02255, over 4919.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02956, over 972909.17 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 01:39:37,831 INFO [train.py:715] (7/8) Epoch 14, batch 12450, loss[loss=0.143, simple_loss=0.2117, pruned_loss=0.0372, over 4808.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03019, over 972147.82 frames.], batch size: 27, lr: 1.57e-04 +2022-05-08 01:40:17,259 INFO [train.py:715] (7/8) Epoch 14, batch 12500, loss[loss=0.1244, simple_loss=0.1893, pruned_loss=0.02976, over 4989.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.0298, over 972970.48 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:40:57,009 INFO [train.py:715] (7/8) Epoch 14, batch 12550, loss[loss=0.1688, simple_loss=0.2384, pruned_loss=0.04961, over 4929.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2091, pruned_loss=0.03026, over 972473.99 frames.], batch size: 23, lr: 1.57e-04 +2022-05-08 01:41:36,647 INFO [train.py:715] (7/8) Epoch 14, batch 12600, loss[loss=0.1335, simple_loss=0.2023, pruned_loss=0.03232, over 4745.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.0306, over 972283.99 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 01:42:15,596 INFO [train.py:715] (7/8) Epoch 14, batch 12650, loss[loss=0.1334, simple_loss=0.2023, pruned_loss=0.03227, over 4757.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03037, over 972564.35 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 01:42:55,479 INFO [train.py:715] (7/8) Epoch 14, batch 12700, loss[loss=0.1388, simple_loss=0.2174, pruned_loss=0.03008, over 4930.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2092, pruned_loss=0.03033, over 973086.49 frames.], batch size: 23, lr: 1.57e-04 +2022-05-08 01:43:35,534 INFO [train.py:715] (7/8) Epoch 14, batch 12750, loss[loss=0.1402, simple_loss=0.2216, pruned_loss=0.02934, over 4973.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03092, over 973580.96 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:44:15,513 INFO [train.py:715] (7/8) Epoch 14, batch 12800, loss[loss=0.1356, simple_loss=0.2078, pruned_loss=0.03174, over 4898.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03091, over 973355.28 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 01:44:55,318 INFO [train.py:715] (7/8) Epoch 14, batch 12850, loss[loss=0.1551, simple_loss=0.2262, pruned_loss=0.04198, over 4982.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03043, over 972770.90 frames.], batch size: 33, lr: 1.57e-04 +2022-05-08 01:45:35,525 INFO [train.py:715] (7/8) Epoch 14, batch 12900, loss[loss=0.1421, simple_loss=0.2127, pruned_loss=0.03578, over 4842.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03085, over 972954.66 frames.], batch size: 30, lr: 1.57e-04 +2022-05-08 01:46:15,875 INFO [train.py:715] (7/8) Epoch 14, batch 12950, loss[loss=0.1604, simple_loss=0.2364, pruned_loss=0.04227, over 4699.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03037, over 972986.82 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:46:55,837 INFO [train.py:715] (7/8) Epoch 14, batch 13000, loss[loss=0.1372, simple_loss=0.2048, pruned_loss=0.03483, over 4847.00 frames.], tot_loss[loss=0.1357, simple_loss=0.21, pruned_loss=0.0307, over 973049.45 frames.], batch size: 32, lr: 1.57e-04 +2022-05-08 01:47:36,084 INFO [train.py:715] (7/8) Epoch 14, batch 13050, loss[loss=0.1391, simple_loss=0.215, pruned_loss=0.0316, over 4935.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03088, over 973149.79 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 01:48:16,101 INFO [train.py:715] (7/8) Epoch 14, batch 13100, loss[loss=0.1443, simple_loss=0.2201, pruned_loss=0.03429, over 4809.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2098, pruned_loss=0.03064, over 973350.71 frames.], batch size: 26, lr: 1.57e-04 +2022-05-08 01:48:56,291 INFO [train.py:715] (7/8) Epoch 14, batch 13150, loss[loss=0.1088, simple_loss=0.1849, pruned_loss=0.01633, over 4983.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2108, pruned_loss=0.0312, over 973473.98 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 01:49:36,384 INFO [train.py:715] (7/8) Epoch 14, batch 13200, loss[loss=0.135, simple_loss=0.2011, pruned_loss=0.03446, over 4955.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2109, pruned_loss=0.0314, over 972733.31 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:50:16,586 INFO [train.py:715] (7/8) Epoch 14, batch 13250, loss[loss=0.1248, simple_loss=0.2033, pruned_loss=0.02314, over 4897.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.03052, over 972379.45 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 01:50:56,843 INFO [train.py:715] (7/8) Epoch 14, batch 13300, loss[loss=0.1527, simple_loss=0.2157, pruned_loss=0.04486, over 4952.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03091, over 973247.17 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 01:51:36,429 INFO [train.py:715] (7/8) Epoch 14, batch 13350, loss[loss=0.1512, simple_loss=0.2252, pruned_loss=0.03858, over 4903.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03098, over 974013.19 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 01:52:15,921 INFO [train.py:715] (7/8) Epoch 14, batch 13400, loss[loss=0.1178, simple_loss=0.1981, pruned_loss=0.01878, over 4904.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2096, pruned_loss=0.03071, over 973251.53 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 01:52:55,517 INFO [train.py:715] (7/8) Epoch 14, batch 13450, loss[loss=0.1273, simple_loss=0.1974, pruned_loss=0.02865, over 4739.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2091, pruned_loss=0.03016, over 972666.45 frames.], batch size: 12, lr: 1.57e-04 +2022-05-08 01:53:35,079 INFO [train.py:715] (7/8) Epoch 14, batch 13500, loss[loss=0.1383, simple_loss=0.2123, pruned_loss=0.03211, over 4924.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.03055, over 972727.68 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 01:54:14,284 INFO [train.py:715] (7/8) Epoch 14, batch 13550, loss[loss=0.1308, simple_loss=0.209, pruned_loss=0.02632, over 4977.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2093, pruned_loss=0.03019, over 972978.52 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 01:54:53,681 INFO [train.py:715] (7/8) Epoch 14, batch 13600, loss[loss=0.1141, simple_loss=0.1796, pruned_loss=0.02432, over 4807.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.0298, over 972988.54 frames.], batch size: 13, lr: 1.57e-04 +2022-05-08 01:55:32,973 INFO [train.py:715] (7/8) Epoch 14, batch 13650, loss[loss=0.1356, simple_loss=0.1987, pruned_loss=0.03619, over 4971.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.0302, over 973381.88 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 01:56:12,542 INFO [train.py:715] (7/8) Epoch 14, batch 13700, loss[loss=0.1212, simple_loss=0.1913, pruned_loss=0.0256, over 4845.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2077, pruned_loss=0.03029, over 972914.99 frames.], batch size: 30, lr: 1.57e-04 +2022-05-08 01:56:51,589 INFO [train.py:715] (7/8) Epoch 14, batch 13750, loss[loss=0.08706, simple_loss=0.1555, pruned_loss=0.009311, over 4793.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2068, pruned_loss=0.03, over 972111.20 frames.], batch size: 12, lr: 1.57e-04 +2022-05-08 01:57:30,927 INFO [train.py:715] (7/8) Epoch 14, batch 13800, loss[loss=0.1277, simple_loss=0.204, pruned_loss=0.02571, over 4916.00 frames.], tot_loss[loss=0.1345, simple_loss=0.208, pruned_loss=0.03056, over 972488.91 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 01:58:12,497 INFO [train.py:715] (7/8) Epoch 14, batch 13850, loss[loss=0.1516, simple_loss=0.2213, pruned_loss=0.04099, over 4883.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03039, over 972007.55 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 01:58:51,822 INFO [train.py:715] (7/8) Epoch 14, batch 13900, loss[loss=0.1316, simple_loss=0.2044, pruned_loss=0.02937, over 4789.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03041, over 971834.56 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 01:59:31,449 INFO [train.py:715] (7/8) Epoch 14, batch 13950, loss[loss=0.1259, simple_loss=0.2052, pruned_loss=0.02325, over 4950.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03047, over 972620.62 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 02:00:10,944 INFO [train.py:715] (7/8) Epoch 14, batch 14000, loss[loss=0.1368, simple_loss=0.2192, pruned_loss=0.02718, over 4877.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03045, over 972897.34 frames.], batch size: 39, lr: 1.57e-04 +2022-05-08 02:00:50,383 INFO [train.py:715] (7/8) Epoch 14, batch 14050, loss[loss=0.135, simple_loss=0.2039, pruned_loss=0.03299, over 4878.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2091, pruned_loss=0.03025, over 972930.16 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 02:01:30,047 INFO [train.py:715] (7/8) Epoch 14, batch 14100, loss[loss=0.1304, simple_loss=0.2064, pruned_loss=0.02722, over 4810.00 frames.], tot_loss[loss=0.1359, simple_loss=0.21, pruned_loss=0.03089, over 973058.91 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 02:02:09,598 INFO [train.py:715] (7/8) Epoch 14, batch 14150, loss[loss=0.1174, simple_loss=0.1953, pruned_loss=0.0198, over 4933.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2105, pruned_loss=0.03102, over 973071.23 frames.], batch size: 23, lr: 1.57e-04 +2022-05-08 02:02:49,099 INFO [train.py:715] (7/8) Epoch 14, batch 14200, loss[loss=0.144, simple_loss=0.2128, pruned_loss=0.03755, over 4980.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2105, pruned_loss=0.03099, over 972366.89 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:03:28,357 INFO [train.py:715] (7/8) Epoch 14, batch 14250, loss[loss=0.101, simple_loss=0.1799, pruned_loss=0.01108, over 4977.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03083, over 971897.21 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 02:04:08,208 INFO [train.py:715] (7/8) Epoch 14, batch 14300, loss[loss=0.1423, simple_loss=0.2167, pruned_loss=0.03396, over 4932.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03074, over 971754.93 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 02:04:47,399 INFO [train.py:715] (7/8) Epoch 14, batch 14350, loss[loss=0.1178, simple_loss=0.1969, pruned_loss=0.01929, over 4894.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03083, over 971768.69 frames.], batch size: 22, lr: 1.57e-04 +2022-05-08 02:05:26,854 INFO [train.py:715] (7/8) Epoch 14, batch 14400, loss[loss=0.1353, simple_loss=0.2131, pruned_loss=0.02882, over 4814.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03087, over 971638.17 frames.], batch size: 27, lr: 1.57e-04 +2022-05-08 02:06:06,347 INFO [train.py:715] (7/8) Epoch 14, batch 14450, loss[loss=0.09892, simple_loss=0.179, pruned_loss=0.009399, over 4793.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03056, over 971205.67 frames.], batch size: 12, lr: 1.57e-04 +2022-05-08 02:06:45,904 INFO [train.py:715] (7/8) Epoch 14, batch 14500, loss[loss=0.1428, simple_loss=0.2318, pruned_loss=0.02686, over 4807.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03054, over 972316.87 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 02:07:25,179 INFO [train.py:715] (7/8) Epoch 14, batch 14550, loss[loss=0.1265, simple_loss=0.2012, pruned_loss=0.02595, over 4948.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03049, over 972721.23 frames.], batch size: 21, lr: 1.57e-04 +2022-05-08 02:08:04,456 INFO [train.py:715] (7/8) Epoch 14, batch 14600, loss[loss=0.1341, simple_loss=0.2124, pruned_loss=0.02791, over 4927.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2075, pruned_loss=0.03004, over 973010.97 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 02:08:44,680 INFO [train.py:715] (7/8) Epoch 14, batch 14650, loss[loss=0.1723, simple_loss=0.2408, pruned_loss=0.05192, over 4844.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2076, pruned_loss=0.03012, over 972650.32 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:09:24,107 INFO [train.py:715] (7/8) Epoch 14, batch 14700, loss[loss=0.1635, simple_loss=0.2331, pruned_loss=0.04689, over 4968.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2076, pruned_loss=0.03007, over 972707.12 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 02:10:03,919 INFO [train.py:715] (7/8) Epoch 14, batch 14750, loss[loss=0.1498, simple_loss=0.2236, pruned_loss=0.03805, over 4922.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03021, over 972530.09 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 02:10:43,096 INFO [train.py:715] (7/8) Epoch 14, batch 14800, loss[loss=0.1729, simple_loss=0.2471, pruned_loss=0.04938, over 4703.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03074, over 972360.47 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:11:23,012 INFO [train.py:715] (7/8) Epoch 14, batch 14850, loss[loss=0.1662, simple_loss=0.2255, pruned_loss=0.05344, over 4773.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2088, pruned_loss=0.03072, over 972838.07 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 02:12:02,554 INFO [train.py:715] (7/8) Epoch 14, batch 14900, loss[loss=0.1254, simple_loss=0.2034, pruned_loss=0.02367, over 4876.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.0307, over 972076.74 frames.], batch size: 38, lr: 1.57e-04 +2022-05-08 02:12:42,002 INFO [train.py:715] (7/8) Epoch 14, batch 14950, loss[loss=0.1362, simple_loss=0.1991, pruned_loss=0.03665, over 4854.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03029, over 972057.62 frames.], batch size: 32, lr: 1.57e-04 +2022-05-08 02:13:22,061 INFO [train.py:715] (7/8) Epoch 14, batch 15000, loss[loss=0.1387, simple_loss=0.2172, pruned_loss=0.03008, over 4871.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03006, over 971557.00 frames.], batch size: 32, lr: 1.57e-04 +2022-05-08 02:13:22,061 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 02:13:31,707 INFO [train.py:742] (7/8) Epoch 14, validation: loss=0.1052, simple_loss=0.1889, pruned_loss=0.01079, over 914524.00 frames. +2022-05-08 02:14:12,555 INFO [train.py:715] (7/8) Epoch 14, batch 15050, loss[loss=0.1594, simple_loss=0.2248, pruned_loss=0.047, over 4970.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03043, over 971527.78 frames.], batch size: 31, lr: 1.57e-04 +2022-05-08 02:14:52,643 INFO [train.py:715] (7/8) Epoch 14, batch 15100, loss[loss=0.1519, simple_loss=0.226, pruned_loss=0.03885, over 4961.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2096, pruned_loss=0.03075, over 972179.43 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 02:15:33,287 INFO [train.py:715] (7/8) Epoch 14, batch 15150, loss[loss=0.135, simple_loss=0.2101, pruned_loss=0.02992, over 4928.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03057, over 971730.66 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 02:16:13,416 INFO [train.py:715] (7/8) Epoch 14, batch 15200, loss[loss=0.1254, simple_loss=0.1949, pruned_loss=0.02792, over 4972.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03092, over 972399.18 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 02:16:54,060 INFO [train.py:715] (7/8) Epoch 14, batch 15250, loss[loss=0.1333, simple_loss=0.1974, pruned_loss=0.03463, over 4839.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2096, pruned_loss=0.03168, over 972114.77 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:17:33,930 INFO [train.py:715] (7/8) Epoch 14, batch 15300, loss[loss=0.1822, simple_loss=0.2487, pruned_loss=0.05789, over 4910.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2101, pruned_loss=0.03187, over 972156.70 frames.], batch size: 39, lr: 1.57e-04 +2022-05-08 02:18:13,478 INFO [train.py:715] (7/8) Epoch 14, batch 15350, loss[loss=0.1228, simple_loss=0.1936, pruned_loss=0.02599, over 4973.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.03116, over 972191.49 frames.], batch size: 31, lr: 1.57e-04 +2022-05-08 02:18:53,585 INFO [train.py:715] (7/8) Epoch 14, batch 15400, loss[loss=0.1185, simple_loss=0.1784, pruned_loss=0.02932, over 4745.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03087, over 972914.39 frames.], batch size: 12, lr: 1.57e-04 +2022-05-08 02:19:32,977 INFO [train.py:715] (7/8) Epoch 14, batch 15450, loss[loss=0.1724, simple_loss=0.2531, pruned_loss=0.04583, over 4898.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2095, pruned_loss=0.03098, over 972871.93 frames.], batch size: 39, lr: 1.57e-04 +2022-05-08 02:20:12,215 INFO [train.py:715] (7/8) Epoch 14, batch 15500, loss[loss=0.1369, simple_loss=0.2186, pruned_loss=0.02765, over 4795.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03087, over 973257.77 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 02:20:51,551 INFO [train.py:715] (7/8) Epoch 14, batch 15550, loss[loss=0.142, simple_loss=0.2157, pruned_loss=0.03421, over 4746.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03073, over 972908.55 frames.], batch size: 16, lr: 1.57e-04 +2022-05-08 02:21:31,494 INFO [train.py:715] (7/8) Epoch 14, batch 15600, loss[loss=0.1349, simple_loss=0.2159, pruned_loss=0.02697, over 4764.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2085, pruned_loss=0.03085, over 971967.67 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 02:22:10,938 INFO [train.py:715] (7/8) Epoch 14, batch 15650, loss[loss=0.1284, simple_loss=0.2024, pruned_loss=0.02722, over 4908.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.03088, over 972186.07 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 02:22:49,324 INFO [train.py:715] (7/8) Epoch 14, batch 15700, loss[loss=0.117, simple_loss=0.1974, pruned_loss=0.01833, over 4920.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03054, over 972380.29 frames.], batch size: 29, lr: 1.57e-04 +2022-05-08 02:23:29,534 INFO [train.py:715] (7/8) Epoch 14, batch 15750, loss[loss=0.1257, simple_loss=0.211, pruned_loss=0.02019, over 4929.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.0303, over 972354.97 frames.], batch size: 18, lr: 1.57e-04 +2022-05-08 02:24:09,062 INFO [train.py:715] (7/8) Epoch 14, batch 15800, loss[loss=0.1322, simple_loss=0.2106, pruned_loss=0.02689, over 4971.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03055, over 972417.10 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 02:24:48,300 INFO [train.py:715] (7/8) Epoch 14, batch 15850, loss[loss=0.1354, simple_loss=0.2088, pruned_loss=0.03102, over 4957.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03041, over 973644.27 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 02:25:27,580 INFO [train.py:715] (7/8) Epoch 14, batch 15900, loss[loss=0.1151, simple_loss=0.1868, pruned_loss=0.02168, over 4971.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03096, over 973099.57 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 02:26:07,620 INFO [train.py:715] (7/8) Epoch 14, batch 15950, loss[loss=0.1183, simple_loss=0.1908, pruned_loss=0.02287, over 4795.00 frames.], tot_loss[loss=0.136, simple_loss=0.2097, pruned_loss=0.03113, over 972186.49 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 02:26:47,034 INFO [train.py:715] (7/8) Epoch 14, batch 16000, loss[loss=0.1381, simple_loss=0.2086, pruned_loss=0.03383, over 4890.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03097, over 972239.51 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 02:27:25,752 INFO [train.py:715] (7/8) Epoch 14, batch 16050, loss[loss=0.1503, simple_loss=0.225, pruned_loss=0.03781, over 4853.00 frames.], tot_loss[loss=0.1361, simple_loss=0.21, pruned_loss=0.03109, over 972398.40 frames.], batch size: 32, lr: 1.57e-04 +2022-05-08 02:28:04,470 INFO [train.py:715] (7/8) Epoch 14, batch 16100, loss[loss=0.1368, simple_loss=0.2128, pruned_loss=0.03045, over 4832.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2096, pruned_loss=0.03147, over 971333.88 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:28:42,588 INFO [train.py:715] (7/8) Epoch 14, batch 16150, loss[loss=0.1221, simple_loss=0.2014, pruned_loss=0.02138, over 4802.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2104, pruned_loss=0.03152, over 971535.44 frames.], batch size: 13, lr: 1.57e-04 +2022-05-08 02:29:20,835 INFO [train.py:715] (7/8) Epoch 14, batch 16200, loss[loss=0.1498, simple_loss=0.219, pruned_loss=0.04034, over 4834.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.03076, over 972042.58 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:29:59,439 INFO [train.py:715] (7/8) Epoch 14, batch 16250, loss[loss=0.1134, simple_loss=0.1938, pruned_loss=0.01653, over 4978.00 frames.], tot_loss[loss=0.135, simple_loss=0.2088, pruned_loss=0.03054, over 972164.57 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 02:30:38,582 INFO [train.py:715] (7/8) Epoch 14, batch 16300, loss[loss=0.1974, simple_loss=0.2579, pruned_loss=0.06845, over 4887.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03017, over 972257.31 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 02:31:16,532 INFO [train.py:715] (7/8) Epoch 14, batch 16350, loss[loss=0.1607, simple_loss=0.2375, pruned_loss=0.04193, over 4971.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2078, pruned_loss=0.0304, over 971941.76 frames.], batch size: 24, lr: 1.57e-04 +2022-05-08 02:31:55,708 INFO [train.py:715] (7/8) Epoch 14, batch 16400, loss[loss=0.1497, simple_loss=0.2183, pruned_loss=0.04055, over 4959.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03094, over 972129.22 frames.], batch size: 35, lr: 1.57e-04 +2022-05-08 02:32:35,422 INFO [train.py:715] (7/8) Epoch 14, batch 16450, loss[loss=0.1361, simple_loss=0.2138, pruned_loss=0.02922, over 4866.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03072, over 972095.26 frames.], batch size: 38, lr: 1.57e-04 +2022-05-08 02:33:14,858 INFO [train.py:715] (7/8) Epoch 14, batch 16500, loss[loss=0.1295, simple_loss=0.1978, pruned_loss=0.03062, over 4955.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2078, pruned_loss=0.03046, over 971673.50 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:33:53,745 INFO [train.py:715] (7/8) Epoch 14, batch 16550, loss[loss=0.1544, simple_loss=0.23, pruned_loss=0.03936, over 4702.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03096, over 971458.04 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:34:34,122 INFO [train.py:715] (7/8) Epoch 14, batch 16600, loss[loss=0.1255, simple_loss=0.1952, pruned_loss=0.02792, over 4814.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2093, pruned_loss=0.03124, over 971844.45 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:35:13,401 INFO [train.py:715] (7/8) Epoch 14, batch 16650, loss[loss=0.1089, simple_loss=0.1929, pruned_loss=0.0124, over 4980.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03111, over 972730.87 frames.], batch size: 28, lr: 1.57e-04 +2022-05-08 02:35:55,022 INFO [train.py:715] (7/8) Epoch 14, batch 16700, loss[loss=0.1282, simple_loss=0.2045, pruned_loss=0.02591, over 4830.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03058, over 973094.25 frames.], batch size: 26, lr: 1.57e-04 +2022-05-08 02:36:34,908 INFO [train.py:715] (7/8) Epoch 14, batch 16750, loss[loss=0.1352, simple_loss=0.1998, pruned_loss=0.03529, over 4761.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2102, pruned_loss=0.03108, over 973565.54 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 02:37:15,257 INFO [train.py:715] (7/8) Epoch 14, batch 16800, loss[loss=0.1207, simple_loss=0.1934, pruned_loss=0.02405, over 4786.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2106, pruned_loss=0.03124, over 974417.53 frames.], batch size: 17, lr: 1.57e-04 +2022-05-08 02:37:54,769 INFO [train.py:715] (7/8) Epoch 14, batch 16850, loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02929, over 4856.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2104, pruned_loss=0.03131, over 974239.60 frames.], batch size: 20, lr: 1.57e-04 +2022-05-08 02:38:34,395 INFO [train.py:715] (7/8) Epoch 14, batch 16900, loss[loss=0.1114, simple_loss=0.1985, pruned_loss=0.0122, over 4807.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2107, pruned_loss=0.03126, over 974229.18 frames.], batch size: 26, lr: 1.57e-04 +2022-05-08 02:39:15,369 INFO [train.py:715] (7/8) Epoch 14, batch 16950, loss[loss=0.1396, simple_loss=0.2217, pruned_loss=0.02876, over 4819.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03097, over 973528.36 frames.], batch size: 27, lr: 1.57e-04 +2022-05-08 02:39:56,921 INFO [train.py:715] (7/8) Epoch 14, batch 17000, loss[loss=0.1385, simple_loss=0.2199, pruned_loss=0.02861, over 4848.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03104, over 973600.73 frames.], batch size: 30, lr: 1.57e-04 +2022-05-08 02:40:37,818 INFO [train.py:715] (7/8) Epoch 14, batch 17050, loss[loss=0.1475, simple_loss=0.2183, pruned_loss=0.03835, over 4976.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2096, pruned_loss=0.03075, over 973226.21 frames.], batch size: 14, lr: 1.57e-04 +2022-05-08 02:41:18,913 INFO [train.py:715] (7/8) Epoch 14, batch 17100, loss[loss=0.1325, simple_loss=0.2074, pruned_loss=0.02885, over 4690.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03076, over 973154.57 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:42:01,003 INFO [train.py:715] (7/8) Epoch 14, batch 17150, loss[loss=0.1718, simple_loss=0.245, pruned_loss=0.04929, over 4938.00 frames.], tot_loss[loss=0.136, simple_loss=0.2101, pruned_loss=0.03089, over 972920.65 frames.], batch size: 39, lr: 1.57e-04 +2022-05-08 02:42:41,747 INFO [train.py:715] (7/8) Epoch 14, batch 17200, loss[loss=0.1715, simple_loss=0.2423, pruned_loss=0.05038, over 4891.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2105, pruned_loss=0.03126, over 973845.28 frames.], batch size: 19, lr: 1.57e-04 +2022-05-08 02:43:22,728 INFO [train.py:715] (7/8) Epoch 14, batch 17250, loss[loss=0.1317, simple_loss=0.2022, pruned_loss=0.03059, over 4838.00 frames.], tot_loss[loss=0.1368, simple_loss=0.211, pruned_loss=0.03132, over 972778.98 frames.], batch size: 15, lr: 1.57e-04 +2022-05-08 02:44:04,205 INFO [train.py:715] (7/8) Epoch 14, batch 17300, loss[loss=0.1276, simple_loss=0.1935, pruned_loss=0.03091, over 4774.00 frames.], tot_loss[loss=0.1367, simple_loss=0.2106, pruned_loss=0.03142, over 972827.09 frames.], batch size: 12, lr: 1.57e-04 +2022-05-08 02:44:45,870 INFO [train.py:715] (7/8) Epoch 14, batch 17350, loss[loss=0.1068, simple_loss=0.1816, pruned_loss=0.01603, over 4803.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.03136, over 972851.58 frames.], batch size: 25, lr: 1.57e-04 +2022-05-08 02:45:26,238 INFO [train.py:715] (7/8) Epoch 14, batch 17400, loss[loss=0.1146, simple_loss=0.1921, pruned_loss=0.01853, over 4798.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03139, over 971965.90 frames.], batch size: 24, lr: 1.56e-04 +2022-05-08 02:46:07,489 INFO [train.py:715] (7/8) Epoch 14, batch 17450, loss[loss=0.09349, simple_loss=0.1698, pruned_loss=0.008568, over 4918.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2103, pruned_loss=0.03104, over 971297.02 frames.], batch size: 29, lr: 1.56e-04 +2022-05-08 02:46:49,066 INFO [train.py:715] (7/8) Epoch 14, batch 17500, loss[loss=0.1517, simple_loss=0.2259, pruned_loss=0.03875, over 4971.00 frames.], tot_loss[loss=0.1359, simple_loss=0.21, pruned_loss=0.03087, over 971945.62 frames.], batch size: 35, lr: 1.56e-04 +2022-05-08 02:47:29,774 INFO [train.py:715] (7/8) Epoch 14, batch 17550, loss[loss=0.1115, simple_loss=0.1774, pruned_loss=0.02274, over 4928.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.03057, over 972112.60 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 02:48:10,326 INFO [train.py:715] (7/8) Epoch 14, batch 17600, loss[loss=0.1083, simple_loss=0.1867, pruned_loss=0.01499, over 4810.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.03086, over 972651.28 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 02:48:52,043 INFO [train.py:715] (7/8) Epoch 14, batch 17650, loss[loss=0.1438, simple_loss=0.2197, pruned_loss=0.03397, over 4907.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2103, pruned_loss=0.03132, over 973230.20 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 02:49:33,166 INFO [train.py:715] (7/8) Epoch 14, batch 17700, loss[loss=0.1284, simple_loss=0.2123, pruned_loss=0.02222, over 4955.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2104, pruned_loss=0.03114, over 973923.37 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 02:50:13,655 INFO [train.py:715] (7/8) Epoch 14, batch 17750, loss[loss=0.1124, simple_loss=0.1855, pruned_loss=0.01968, over 4978.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2108, pruned_loss=0.03148, over 973724.07 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 02:50:55,018 INFO [train.py:715] (7/8) Epoch 14, batch 17800, loss[loss=0.1287, simple_loss=0.212, pruned_loss=0.02275, over 4869.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2104, pruned_loss=0.03127, over 973543.11 frames.], batch size: 22, lr: 1.56e-04 +2022-05-08 02:51:35,999 INFO [train.py:715] (7/8) Epoch 14, batch 17850, loss[loss=0.1842, simple_loss=0.2463, pruned_loss=0.06101, over 4760.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2096, pruned_loss=0.03131, over 973368.08 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 02:52:16,750 INFO [train.py:715] (7/8) Epoch 14, batch 17900, loss[loss=0.1782, simple_loss=0.2333, pruned_loss=0.06151, over 4859.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2091, pruned_loss=0.03106, over 973522.15 frames.], batch size: 30, lr: 1.56e-04 +2022-05-08 02:52:57,208 INFO [train.py:715] (7/8) Epoch 14, batch 17950, loss[loss=0.1333, simple_loss=0.2119, pruned_loss=0.02732, over 4820.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03086, over 972742.05 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 02:53:38,600 INFO [train.py:715] (7/8) Epoch 14, batch 18000, loss[loss=0.1219, simple_loss=0.192, pruned_loss=0.02587, over 4867.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.03101, over 971596.28 frames.], batch size: 20, lr: 1.56e-04 +2022-05-08 02:53:38,600 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 02:53:48,448 INFO [train.py:742] (7/8) Epoch 14, validation: loss=0.1052, simple_loss=0.1889, pruned_loss=0.01075, over 914524.00 frames. +2022-05-08 02:54:29,836 INFO [train.py:715] (7/8) Epoch 14, batch 18050, loss[loss=0.1486, simple_loss=0.221, pruned_loss=0.03806, over 4698.00 frames.], tot_loss[loss=0.136, simple_loss=0.2095, pruned_loss=0.03127, over 971839.50 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 02:55:10,985 INFO [train.py:715] (7/8) Epoch 14, batch 18100, loss[loss=0.1652, simple_loss=0.2452, pruned_loss=0.04257, over 4886.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2094, pruned_loss=0.0309, over 971247.55 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 02:55:52,584 INFO [train.py:715] (7/8) Epoch 14, batch 18150, loss[loss=0.11, simple_loss=0.1838, pruned_loss=0.01808, over 4788.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.0307, over 970453.72 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 02:56:33,502 INFO [train.py:715] (7/8) Epoch 14, batch 18200, loss[loss=0.1767, simple_loss=0.2494, pruned_loss=0.05206, over 4941.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03075, over 971218.97 frames.], batch size: 23, lr: 1.56e-04 +2022-05-08 02:57:15,446 INFO [train.py:715] (7/8) Epoch 14, batch 18250, loss[loss=0.1294, simple_loss=0.2047, pruned_loss=0.02701, over 4905.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03048, over 971910.57 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 02:57:56,898 INFO [train.py:715] (7/8) Epoch 14, batch 18300, loss[loss=0.1422, simple_loss=0.2203, pruned_loss=0.03201, over 4817.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03025, over 971307.40 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 02:58:36,496 INFO [train.py:715] (7/8) Epoch 14, batch 18350, loss[loss=0.1374, simple_loss=0.2099, pruned_loss=0.0324, over 4986.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03033, over 971292.19 frames.], batch size: 26, lr: 1.56e-04 +2022-05-08 02:59:17,360 INFO [train.py:715] (7/8) Epoch 14, batch 18400, loss[loss=0.129, simple_loss=0.2194, pruned_loss=0.0193, over 4978.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03024, over 971232.76 frames.], batch size: 24, lr: 1.56e-04 +2022-05-08 02:59:57,998 INFO [train.py:715] (7/8) Epoch 14, batch 18450, loss[loss=0.1445, simple_loss=0.2228, pruned_loss=0.0331, over 4741.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2082, pruned_loss=0.0304, over 970888.37 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:00:38,224 INFO [train.py:715] (7/8) Epoch 14, batch 18500, loss[loss=0.143, simple_loss=0.2209, pruned_loss=0.0326, over 4838.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03043, over 970610.73 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 03:01:18,700 INFO [train.py:715] (7/8) Epoch 14, batch 18550, loss[loss=0.1626, simple_loss=0.2305, pruned_loss=0.04736, over 4774.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03072, over 970447.16 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 03:01:59,560 INFO [train.py:715] (7/8) Epoch 14, batch 18600, loss[loss=0.1652, simple_loss=0.2393, pruned_loss=0.04554, over 4984.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03096, over 970645.90 frames.], batch size: 40, lr: 1.56e-04 +2022-05-08 03:02:39,867 INFO [train.py:715] (7/8) Epoch 14, batch 18650, loss[loss=0.1463, simple_loss=0.2241, pruned_loss=0.0342, over 4778.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.0311, over 970729.26 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 03:03:20,566 INFO [train.py:715] (7/8) Epoch 14, batch 18700, loss[loss=0.1479, simple_loss=0.2202, pruned_loss=0.03781, over 4823.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03082, over 971498.49 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 03:04:01,159 INFO [train.py:715] (7/8) Epoch 14, batch 18750, loss[loss=0.1147, simple_loss=0.1853, pruned_loss=0.02203, over 4879.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.03046, over 971197.18 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:04:41,122 INFO [train.py:715] (7/8) Epoch 14, batch 18800, loss[loss=0.1721, simple_loss=0.2548, pruned_loss=0.04466, over 4806.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2089, pruned_loss=0.03103, over 971951.13 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 03:05:21,083 INFO [train.py:715] (7/8) Epoch 14, batch 18850, loss[loss=0.1505, simple_loss=0.2326, pruned_loss=0.03416, over 4983.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.0308, over 972498.47 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 03:06:01,835 INFO [train.py:715] (7/8) Epoch 14, batch 18900, loss[loss=0.1453, simple_loss=0.2217, pruned_loss=0.03444, over 4788.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03104, over 972872.89 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:06:42,899 INFO [train.py:715] (7/8) Epoch 14, batch 18950, loss[loss=0.1319, simple_loss=0.198, pruned_loss=0.03293, over 4816.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2095, pruned_loss=0.03111, over 972888.32 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:07:23,144 INFO [train.py:715] (7/8) Epoch 14, batch 19000, loss[loss=0.2173, simple_loss=0.2721, pruned_loss=0.08125, over 4735.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2098, pruned_loss=0.03146, over 972407.93 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:08:04,068 INFO [train.py:715] (7/8) Epoch 14, batch 19050, loss[loss=0.1318, simple_loss=0.1958, pruned_loss=0.03391, over 4769.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03075, over 971926.65 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:08:45,082 INFO [train.py:715] (7/8) Epoch 14, batch 19100, loss[loss=0.1212, simple_loss=0.21, pruned_loss=0.01619, over 4740.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03021, over 971836.30 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:09:25,470 INFO [train.py:715] (7/8) Epoch 14, batch 19150, loss[loss=0.1029, simple_loss=0.1882, pruned_loss=0.008808, over 4786.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03006, over 971515.30 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:10:04,874 INFO [train.py:715] (7/8) Epoch 14, batch 19200, loss[loss=0.1289, simple_loss=0.203, pruned_loss=0.02745, over 4785.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02994, over 971846.96 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:10:45,986 INFO [train.py:715] (7/8) Epoch 14, batch 19250, loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.03442, over 4928.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2081, pruned_loss=0.03041, over 972197.28 frames.], batch size: 23, lr: 1.56e-04 +2022-05-08 03:11:26,903 INFO [train.py:715] (7/8) Epoch 14, batch 19300, loss[loss=0.1483, simple_loss=0.2288, pruned_loss=0.03386, over 4820.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03048, over 972246.13 frames.], batch size: 26, lr: 1.56e-04 +2022-05-08 03:12:06,955 INFO [train.py:715] (7/8) Epoch 14, batch 19350, loss[loss=0.1023, simple_loss=0.1755, pruned_loss=0.01452, over 4853.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.0306, over 972469.22 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 03:12:47,199 INFO [train.py:715] (7/8) Epoch 14, batch 19400, loss[loss=0.1669, simple_loss=0.2477, pruned_loss=0.04305, over 4798.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03068, over 972206.39 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 03:13:28,649 INFO [train.py:715] (7/8) Epoch 14, batch 19450, loss[loss=0.1542, simple_loss=0.2157, pruned_loss=0.04638, over 4965.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03076, over 971279.21 frames.], batch size: 35, lr: 1.56e-04 +2022-05-08 03:14:08,963 INFO [train.py:715] (7/8) Epoch 14, batch 19500, loss[loss=0.1214, simple_loss=0.1971, pruned_loss=0.02291, over 4779.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2094, pruned_loss=0.03059, over 971973.49 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 03:14:49,611 INFO [train.py:715] (7/8) Epoch 14, batch 19550, loss[loss=0.1704, simple_loss=0.2481, pruned_loss=0.04638, over 4702.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2101, pruned_loss=0.03048, over 972359.33 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 03:15:30,065 INFO [train.py:715] (7/8) Epoch 14, batch 19600, loss[loss=0.1174, simple_loss=0.201, pruned_loss=0.01691, over 4954.00 frames.], tot_loss[loss=0.136, simple_loss=0.2105, pruned_loss=0.03072, over 972744.08 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 03:16:11,001 INFO [train.py:715] (7/8) Epoch 14, batch 19650, loss[loss=0.1159, simple_loss=0.1883, pruned_loss=0.02171, over 4978.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2101, pruned_loss=0.03072, over 972147.59 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 03:16:51,977 INFO [train.py:715] (7/8) Epoch 14, batch 19700, loss[loss=0.1154, simple_loss=0.1851, pruned_loss=0.0229, over 4807.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2106, pruned_loss=0.03093, over 971980.82 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 03:17:32,734 INFO [train.py:715] (7/8) Epoch 14, batch 19750, loss[loss=0.08879, simple_loss=0.16, pruned_loss=0.008771, over 4772.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2104, pruned_loss=0.03061, over 971876.81 frames.], batch size: 12, lr: 1.56e-04 +2022-05-08 03:18:13,651 INFO [train.py:715] (7/8) Epoch 14, batch 19800, loss[loss=0.1742, simple_loss=0.2391, pruned_loss=0.05464, over 4848.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2104, pruned_loss=0.03118, over 972544.34 frames.], batch size: 32, lr: 1.56e-04 +2022-05-08 03:18:54,282 INFO [train.py:715] (7/8) Epoch 14, batch 19850, loss[loss=0.1084, simple_loss=0.1732, pruned_loss=0.02176, over 4946.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2103, pruned_loss=0.0311, over 972150.66 frames.], batch size: 23, lr: 1.56e-04 +2022-05-08 03:19:35,285 INFO [train.py:715] (7/8) Epoch 14, batch 19900, loss[loss=0.1493, simple_loss=0.2216, pruned_loss=0.03849, over 4811.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03102, over 971885.06 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 03:20:15,394 INFO [train.py:715] (7/8) Epoch 14, batch 19950, loss[loss=0.1078, simple_loss=0.1859, pruned_loss=0.01485, over 4962.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03028, over 972259.59 frames.], batch size: 24, lr: 1.56e-04 +2022-05-08 03:20:55,693 INFO [train.py:715] (7/8) Epoch 14, batch 20000, loss[loss=0.1321, simple_loss=0.2056, pruned_loss=0.02926, over 4890.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03032, over 971438.25 frames.], batch size: 22, lr: 1.56e-04 +2022-05-08 03:21:35,500 INFO [train.py:715] (7/8) Epoch 14, batch 20050, loss[loss=0.1098, simple_loss=0.188, pruned_loss=0.01582, over 4966.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03044, over 972488.81 frames.], batch size: 24, lr: 1.56e-04 +2022-05-08 03:22:15,347 INFO [train.py:715] (7/8) Epoch 14, batch 20100, loss[loss=0.122, simple_loss=0.2022, pruned_loss=0.02089, over 4968.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03037, over 972937.12 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 03:22:55,791 INFO [train.py:715] (7/8) Epoch 14, batch 20150, loss[loss=0.1155, simple_loss=0.1927, pruned_loss=0.01918, over 4817.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2078, pruned_loss=0.03055, over 972147.71 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 03:23:35,885 INFO [train.py:715] (7/8) Epoch 14, batch 20200, loss[loss=0.1637, simple_loss=0.2331, pruned_loss=0.0471, over 4799.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.0302, over 972410.68 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:24:16,360 INFO [train.py:715] (7/8) Epoch 14, batch 20250, loss[loss=0.1389, simple_loss=0.2151, pruned_loss=0.03133, over 4940.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03069, over 972590.24 frames.], batch size: 39, lr: 1.56e-04 +2022-05-08 03:24:56,508 INFO [train.py:715] (7/8) Epoch 14, batch 20300, loss[loss=0.1416, simple_loss=0.2151, pruned_loss=0.03405, over 4853.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.03087, over 971879.19 frames.], batch size: 32, lr: 1.56e-04 +2022-05-08 03:25:37,283 INFO [train.py:715] (7/8) Epoch 14, batch 20350, loss[loss=0.1141, simple_loss=0.1943, pruned_loss=0.017, over 4818.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.0306, over 972341.55 frames.], batch size: 26, lr: 1.56e-04 +2022-05-08 03:26:17,609 INFO [train.py:715] (7/8) Epoch 14, batch 20400, loss[loss=0.1429, simple_loss=0.228, pruned_loss=0.02894, over 4984.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03051, over 972824.51 frames.], batch size: 20, lr: 1.56e-04 +2022-05-08 03:26:58,061 INFO [train.py:715] (7/8) Epoch 14, batch 20450, loss[loss=0.1884, simple_loss=0.2471, pruned_loss=0.06485, over 4861.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.0311, over 972462.35 frames.], batch size: 38, lr: 1.56e-04 +2022-05-08 03:27:39,218 INFO [train.py:715] (7/8) Epoch 14, batch 20500, loss[loss=0.1282, simple_loss=0.2063, pruned_loss=0.02508, over 4834.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03091, over 972451.26 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 03:28:19,575 INFO [train.py:715] (7/8) Epoch 14, batch 20550, loss[loss=0.1235, simple_loss=0.2037, pruned_loss=0.02168, over 4984.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.03126, over 972453.21 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 03:29:00,476 INFO [train.py:715] (7/8) Epoch 14, batch 20600, loss[loss=0.155, simple_loss=0.2284, pruned_loss=0.04081, over 4887.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2109, pruned_loss=0.031, over 973014.72 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:29:41,275 INFO [train.py:715] (7/8) Epoch 14, batch 20650, loss[loss=0.1796, simple_loss=0.2469, pruned_loss=0.05619, over 4828.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2104, pruned_loss=0.03094, over 972713.59 frames.], batch size: 30, lr: 1.56e-04 +2022-05-08 03:30:22,926 INFO [train.py:715] (7/8) Epoch 14, batch 20700, loss[loss=0.1616, simple_loss=0.2318, pruned_loss=0.04565, over 4925.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03066, over 973120.01 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 03:31:03,264 INFO [train.py:715] (7/8) Epoch 14, batch 20750, loss[loss=0.1594, simple_loss=0.2248, pruned_loss=0.04697, over 4891.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03066, over 973575.17 frames.], batch size: 22, lr: 1.56e-04 +2022-05-08 03:31:43,461 INFO [train.py:715] (7/8) Epoch 14, batch 20800, loss[loss=0.1205, simple_loss=0.1993, pruned_loss=0.02088, over 4976.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03025, over 974424.95 frames.], batch size: 24, lr: 1.56e-04 +2022-05-08 03:32:24,162 INFO [train.py:715] (7/8) Epoch 14, batch 20850, loss[loss=0.1232, simple_loss=0.2054, pruned_loss=0.02047, over 4970.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.03042, over 974298.72 frames.], batch size: 39, lr: 1.56e-04 +2022-05-08 03:33:04,694 INFO [train.py:715] (7/8) Epoch 14, batch 20900, loss[loss=0.1213, simple_loss=0.1996, pruned_loss=0.02154, over 4831.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2089, pruned_loss=0.03073, over 974080.08 frames.], batch size: 26, lr: 1.56e-04 +2022-05-08 03:33:45,374 INFO [train.py:715] (7/8) Epoch 14, batch 20950, loss[loss=0.1366, simple_loss=0.2148, pruned_loss=0.02924, over 4752.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03092, over 973149.98 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 03:34:25,921 INFO [train.py:715] (7/8) Epoch 14, batch 21000, loss[loss=0.1318, simple_loss=0.2041, pruned_loss=0.02974, over 4843.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03109, over 973324.32 frames.], batch size: 30, lr: 1.56e-04 +2022-05-08 03:34:25,922 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 03:34:37,000 INFO [train.py:742] (7/8) Epoch 14, validation: loss=0.1051, simple_loss=0.1889, pruned_loss=0.0107, over 914524.00 frames. +2022-05-08 03:35:17,904 INFO [train.py:715] (7/8) Epoch 14, batch 21050, loss[loss=0.1716, simple_loss=0.2424, pruned_loss=0.05038, over 4857.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03102, over 973798.81 frames.], batch size: 30, lr: 1.56e-04 +2022-05-08 03:35:58,613 INFO [train.py:715] (7/8) Epoch 14, batch 21100, loss[loss=0.1678, simple_loss=0.2422, pruned_loss=0.04672, over 4923.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03079, over 973755.41 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 03:36:39,417 INFO [train.py:715] (7/8) Epoch 14, batch 21150, loss[loss=0.1373, simple_loss=0.207, pruned_loss=0.03385, over 4790.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03092, over 973266.16 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:37:18,917 INFO [train.py:715] (7/8) Epoch 14, batch 21200, loss[loss=0.1116, simple_loss=0.1911, pruned_loss=0.01606, over 4971.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.0309, over 973129.22 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 03:37:59,330 INFO [train.py:715] (7/8) Epoch 14, batch 21250, loss[loss=0.1711, simple_loss=0.2369, pruned_loss=0.05268, over 4773.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2093, pruned_loss=0.03097, over 973625.99 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 03:38:39,038 INFO [train.py:715] (7/8) Epoch 14, batch 21300, loss[loss=0.1445, simple_loss=0.2171, pruned_loss=0.03596, over 4638.00 frames.], tot_loss[loss=0.1365, simple_loss=0.2104, pruned_loss=0.03135, over 973228.24 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 03:39:17,955 INFO [train.py:715] (7/8) Epoch 14, batch 21350, loss[loss=0.1655, simple_loss=0.243, pruned_loss=0.04403, over 4782.00 frames.], tot_loss[loss=0.1369, simple_loss=0.2105, pruned_loss=0.03167, over 972270.19 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:39:58,397 INFO [train.py:715] (7/8) Epoch 14, batch 21400, loss[loss=0.1137, simple_loss=0.1878, pruned_loss=0.01983, over 4840.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2095, pruned_loss=0.03138, over 972206.63 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 03:40:38,645 INFO [train.py:715] (7/8) Epoch 14, batch 21450, loss[loss=0.1172, simple_loss=0.1912, pruned_loss=0.02156, over 4861.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03096, over 972683.18 frames.], batch size: 13, lr: 1.56e-04 +2022-05-08 03:41:18,066 INFO [train.py:715] (7/8) Epoch 14, batch 21500, loss[loss=0.1505, simple_loss=0.2201, pruned_loss=0.04046, over 4819.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.03088, over 973075.53 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 03:41:57,083 INFO [train.py:715] (7/8) Epoch 14, batch 21550, loss[loss=0.1556, simple_loss=0.2274, pruned_loss=0.04188, over 4961.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03059, over 973603.35 frames.], batch size: 35, lr: 1.56e-04 +2022-05-08 03:42:37,073 INFO [train.py:715] (7/8) Epoch 14, batch 21600, loss[loss=0.1329, simple_loss=0.2135, pruned_loss=0.02617, over 4773.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2092, pruned_loss=0.03045, over 972295.84 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 03:43:16,850 INFO [train.py:715] (7/8) Epoch 14, batch 21650, loss[loss=0.1294, simple_loss=0.2101, pruned_loss=0.02436, over 4803.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2092, pruned_loss=0.03017, over 973297.29 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 03:43:55,952 INFO [train.py:715] (7/8) Epoch 14, batch 21700, loss[loss=0.1237, simple_loss=0.1987, pruned_loss=0.0243, over 4929.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2101, pruned_loss=0.03104, over 973551.80 frames.], batch size: 23, lr: 1.56e-04 +2022-05-08 03:44:36,370 INFO [train.py:715] (7/8) Epoch 14, batch 21750, loss[loss=0.1195, simple_loss=0.1947, pruned_loss=0.02218, over 4819.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03072, over 973011.35 frames.], batch size: 26, lr: 1.56e-04 +2022-05-08 03:45:16,752 INFO [train.py:715] (7/8) Epoch 14, batch 21800, loss[loss=0.115, simple_loss=0.1846, pruned_loss=0.02273, over 4898.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03101, over 973688.93 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 03:45:56,146 INFO [train.py:715] (7/8) Epoch 14, batch 21850, loss[loss=0.1171, simple_loss=0.195, pruned_loss=0.01961, over 4866.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03054, over 973572.71 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 03:46:35,756 INFO [train.py:715] (7/8) Epoch 14, batch 21900, loss[loss=0.1542, simple_loss=0.2195, pruned_loss=0.04448, over 4989.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03061, over 972715.06 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:47:16,028 INFO [train.py:715] (7/8) Epoch 14, batch 21950, loss[loss=0.1037, simple_loss=0.1836, pruned_loss=0.01185, over 4982.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.0305, over 971828.30 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 03:47:55,290 INFO [train.py:715] (7/8) Epoch 14, batch 22000, loss[loss=0.1429, simple_loss=0.224, pruned_loss=0.03086, over 4780.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03071, over 971718.82 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 03:48:34,010 INFO [train.py:715] (7/8) Epoch 14, batch 22050, loss[loss=0.1618, simple_loss=0.2267, pruned_loss=0.04848, over 4947.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03057, over 972047.55 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:49:14,111 INFO [train.py:715] (7/8) Epoch 14, batch 22100, loss[loss=0.134, simple_loss=0.2056, pruned_loss=0.03114, over 4870.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03053, over 971687.64 frames.], batch size: 32, lr: 1.56e-04 +2022-05-08 03:49:53,820 INFO [train.py:715] (7/8) Epoch 14, batch 22150, loss[loss=0.1462, simple_loss=0.2129, pruned_loss=0.03972, over 4980.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03087, over 971393.95 frames.], batch size: 35, lr: 1.56e-04 +2022-05-08 03:50:32,843 INFO [train.py:715] (7/8) Epoch 14, batch 22200, loss[loss=0.1435, simple_loss=0.216, pruned_loss=0.03557, over 4930.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03069, over 970992.83 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:51:12,588 INFO [train.py:715] (7/8) Epoch 14, batch 22250, loss[loss=0.14, simple_loss=0.2066, pruned_loss=0.03677, over 4869.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03067, over 971029.36 frames.], batch size: 20, lr: 1.56e-04 +2022-05-08 03:51:52,766 INFO [train.py:715] (7/8) Epoch 14, batch 22300, loss[loss=0.1424, simple_loss=0.2324, pruned_loss=0.02617, over 4903.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2089, pruned_loss=0.03091, over 971209.74 frames.], batch size: 22, lr: 1.56e-04 +2022-05-08 03:52:32,254 INFO [train.py:715] (7/8) Epoch 14, batch 22350, loss[loss=0.1338, simple_loss=0.2043, pruned_loss=0.03167, over 4937.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.0309, over 971475.19 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 03:53:11,401 INFO [train.py:715] (7/8) Epoch 14, batch 22400, loss[loss=0.09596, simple_loss=0.1699, pruned_loss=0.01103, over 4911.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.03076, over 971161.84 frames.], batch size: 29, lr: 1.56e-04 +2022-05-08 03:53:51,753 INFO [train.py:715] (7/8) Epoch 14, batch 22450, loss[loss=0.1409, simple_loss=0.2127, pruned_loss=0.03458, over 4886.00 frames.], tot_loss[loss=0.136, simple_loss=0.2099, pruned_loss=0.03105, over 972273.90 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:54:31,165 INFO [train.py:715] (7/8) Epoch 14, batch 22500, loss[loss=0.141, simple_loss=0.2237, pruned_loss=0.02918, over 4908.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.03077, over 972481.12 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 03:55:10,459 INFO [train.py:715] (7/8) Epoch 14, batch 22550, loss[loss=0.1225, simple_loss=0.1924, pruned_loss=0.02626, over 4810.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03047, over 971415.43 frames.], batch size: 25, lr: 1.56e-04 +2022-05-08 03:55:50,819 INFO [train.py:715] (7/8) Epoch 14, batch 22600, loss[loss=0.1245, simple_loss=0.2039, pruned_loss=0.02253, over 4944.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03061, over 970984.64 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 03:56:31,699 INFO [train.py:715] (7/8) Epoch 14, batch 22650, loss[loss=0.1633, simple_loss=0.2336, pruned_loss=0.0465, over 4942.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03092, over 971651.59 frames.], batch size: 35, lr: 1.56e-04 +2022-05-08 03:57:11,535 INFO [train.py:715] (7/8) Epoch 14, batch 22700, loss[loss=0.1089, simple_loss=0.1912, pruned_loss=0.01331, over 4760.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2097, pruned_loss=0.0308, over 971214.44 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 03:57:50,670 INFO [train.py:715] (7/8) Epoch 14, batch 22750, loss[loss=0.1287, simple_loss=0.1908, pruned_loss=0.03333, over 4868.00 frames.], tot_loss[loss=0.136, simple_loss=0.2101, pruned_loss=0.03089, over 972040.07 frames.], batch size: 20, lr: 1.56e-04 +2022-05-08 03:58:31,999 INFO [train.py:715] (7/8) Epoch 14, batch 22800, loss[loss=0.152, simple_loss=0.2351, pruned_loss=0.0344, over 4780.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2098, pruned_loss=0.03061, over 972220.67 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 03:59:12,918 INFO [train.py:715] (7/8) Epoch 14, batch 22850, loss[loss=0.1859, simple_loss=0.2315, pruned_loss=0.07014, over 4977.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2098, pruned_loss=0.03102, over 972082.53 frames.], batch size: 31, lr: 1.56e-04 +2022-05-08 03:59:53,209 INFO [train.py:715] (7/8) Epoch 14, batch 22900, loss[loss=0.1344, simple_loss=0.2113, pruned_loss=0.0287, over 4945.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03065, over 972649.89 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 04:00:33,081 INFO [train.py:715] (7/8) Epoch 14, batch 22950, loss[loss=0.1275, simple_loss=0.2123, pruned_loss=0.02138, over 4790.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03039, over 972006.96 frames.], batch size: 14, lr: 1.56e-04 +2022-05-08 04:01:13,587 INFO [train.py:715] (7/8) Epoch 14, batch 23000, loss[loss=0.1418, simple_loss=0.2141, pruned_loss=0.0347, over 4914.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03049, over 971946.70 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 04:01:53,102 INFO [train.py:715] (7/8) Epoch 14, batch 23050, loss[loss=0.1474, simple_loss=0.2258, pruned_loss=0.03445, over 4915.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.02997, over 971551.04 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 04:02:32,416 INFO [train.py:715] (7/8) Epoch 14, batch 23100, loss[loss=0.1453, simple_loss=0.2185, pruned_loss=0.03607, over 4772.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03019, over 971778.14 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 04:03:13,071 INFO [train.py:715] (7/8) Epoch 14, batch 23150, loss[loss=0.1243, simple_loss=0.2002, pruned_loss=0.02418, over 4700.00 frames.], tot_loss[loss=0.1346, simple_loss=0.208, pruned_loss=0.03066, over 971263.75 frames.], batch size: 15, lr: 1.56e-04 +2022-05-08 04:03:54,324 INFO [train.py:715] (7/8) Epoch 14, batch 23200, loss[loss=0.1682, simple_loss=0.251, pruned_loss=0.04267, over 4947.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2076, pruned_loss=0.03013, over 971930.13 frames.], batch size: 29, lr: 1.56e-04 +2022-05-08 04:04:33,067 INFO [train.py:715] (7/8) Epoch 14, batch 23250, loss[loss=0.1293, simple_loss=0.2025, pruned_loss=0.02803, over 4922.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.0301, over 971779.84 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 04:05:13,478 INFO [train.py:715] (7/8) Epoch 14, batch 23300, loss[loss=0.1427, simple_loss=0.2138, pruned_loss=0.03578, over 4798.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2089, pruned_loss=0.03078, over 972159.29 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 04:05:54,158 INFO [train.py:715] (7/8) Epoch 14, batch 23350, loss[loss=0.139, simple_loss=0.2188, pruned_loss=0.02963, over 4878.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03007, over 972187.05 frames.], batch size: 22, lr: 1.56e-04 +2022-05-08 04:06:33,757 INFO [train.py:715] (7/8) Epoch 14, batch 23400, loss[loss=0.1263, simple_loss=0.2055, pruned_loss=0.02352, over 4890.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02977, over 972429.58 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 04:07:12,806 INFO [train.py:715] (7/8) Epoch 14, batch 23450, loss[loss=0.1343, simple_loss=0.2143, pruned_loss=0.0271, over 4808.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02985, over 972542.59 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 04:07:53,399 INFO [train.py:715] (7/8) Epoch 14, batch 23500, loss[loss=0.1193, simple_loss=0.1985, pruned_loss=0.01999, over 4909.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03047, over 972395.03 frames.], batch size: 19, lr: 1.56e-04 +2022-05-08 04:08:34,058 INFO [train.py:715] (7/8) Epoch 14, batch 23550, loss[loss=0.1367, simple_loss=0.2126, pruned_loss=0.03037, over 4850.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03018, over 971853.33 frames.], batch size: 38, lr: 1.56e-04 +2022-05-08 04:09:13,317 INFO [train.py:715] (7/8) Epoch 14, batch 23600, loss[loss=0.1368, simple_loss=0.2101, pruned_loss=0.03177, over 4877.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03005, over 971537.12 frames.], batch size: 16, lr: 1.56e-04 +2022-05-08 04:09:52,598 INFO [train.py:715] (7/8) Epoch 14, batch 23650, loss[loss=0.1244, simple_loss=0.2012, pruned_loss=0.02383, over 4962.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03018, over 971901.98 frames.], batch size: 35, lr: 1.56e-04 +2022-05-08 04:10:32,140 INFO [train.py:715] (7/8) Epoch 14, batch 23700, loss[loss=0.1316, simple_loss=0.1999, pruned_loss=0.0317, over 4821.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03028, over 972128.42 frames.], batch size: 12, lr: 1.56e-04 +2022-05-08 04:11:11,203 INFO [train.py:715] (7/8) Epoch 14, batch 23750, loss[loss=0.1248, simple_loss=0.1937, pruned_loss=0.02796, over 4907.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03016, over 971647.37 frames.], batch size: 17, lr: 1.56e-04 +2022-05-08 04:11:50,485 INFO [train.py:715] (7/8) Epoch 14, batch 23800, loss[loss=0.1669, simple_loss=0.2438, pruned_loss=0.04504, over 4928.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2088, pruned_loss=0.03013, over 972134.28 frames.], batch size: 18, lr: 1.56e-04 +2022-05-08 04:12:30,659 INFO [train.py:715] (7/8) Epoch 14, batch 23850, loss[loss=0.1447, simple_loss=0.2233, pruned_loss=0.03309, over 4959.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.03003, over 971987.76 frames.], batch size: 28, lr: 1.56e-04 +2022-05-08 04:13:10,491 INFO [train.py:715] (7/8) Epoch 14, batch 23900, loss[loss=0.1387, simple_loss=0.223, pruned_loss=0.02724, over 4802.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2093, pruned_loss=0.03066, over 971325.65 frames.], batch size: 21, lr: 1.56e-04 +2022-05-08 04:13:49,740 INFO [train.py:715] (7/8) Epoch 14, batch 23950, loss[loss=0.1243, simple_loss=0.1971, pruned_loss=0.02577, over 4954.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03001, over 971825.20 frames.], batch size: 35, lr: 1.55e-04 +2022-05-08 04:14:30,063 INFO [train.py:715] (7/8) Epoch 14, batch 24000, loss[loss=0.1201, simple_loss=0.2004, pruned_loss=0.0199, over 4895.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02967, over 972119.62 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 04:14:30,063 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 04:14:41,438 INFO [train.py:742] (7/8) Epoch 14, validation: loss=0.1052, simple_loss=0.1889, pruned_loss=0.01074, over 914524.00 frames. +2022-05-08 04:15:21,386 INFO [train.py:715] (7/8) Epoch 14, batch 24050, loss[loss=0.1098, simple_loss=0.1812, pruned_loss=0.01915, over 4695.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.02965, over 972329.15 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 04:16:02,435 INFO [train.py:715] (7/8) Epoch 14, batch 24100, loss[loss=0.1494, simple_loss=0.2213, pruned_loss=0.03873, over 4770.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02974, over 971599.16 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 04:16:41,514 INFO [train.py:715] (7/8) Epoch 14, batch 24150, loss[loss=0.1338, simple_loss=0.2127, pruned_loss=0.02748, over 4971.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2077, pruned_loss=0.03035, over 971867.13 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 04:17:21,104 INFO [train.py:715] (7/8) Epoch 14, batch 24200, loss[loss=0.115, simple_loss=0.1836, pruned_loss=0.02323, over 4694.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2065, pruned_loss=0.02955, over 971610.77 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 04:18:01,400 INFO [train.py:715] (7/8) Epoch 14, batch 24250, loss[loss=0.1879, simple_loss=0.2514, pruned_loss=0.06223, over 4697.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02972, over 972014.34 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 04:18:41,647 INFO [train.py:715] (7/8) Epoch 14, batch 24300, loss[loss=0.1177, simple_loss=0.1879, pruned_loss=0.02374, over 4831.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2073, pruned_loss=0.0301, over 971380.96 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 04:19:20,608 INFO [train.py:715] (7/8) Epoch 14, batch 24350, loss[loss=0.1406, simple_loss=0.2194, pruned_loss=0.03086, over 4838.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03026, over 971888.90 frames.], batch size: 26, lr: 1.55e-04 +2022-05-08 04:20:01,389 INFO [train.py:715] (7/8) Epoch 14, batch 24400, loss[loss=0.1285, simple_loss=0.1944, pruned_loss=0.03131, over 4863.00 frames.], tot_loss[loss=0.134, simple_loss=0.2075, pruned_loss=0.03029, over 971957.49 frames.], batch size: 32, lr: 1.55e-04 +2022-05-08 04:20:43,005 INFO [train.py:715] (7/8) Epoch 14, batch 24450, loss[loss=0.1303, simple_loss=0.1998, pruned_loss=0.03039, over 4883.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2076, pruned_loss=0.03013, over 971980.70 frames.], batch size: 22, lr: 1.55e-04 +2022-05-08 04:21:22,341 INFO [train.py:715] (7/8) Epoch 14, batch 24500, loss[loss=0.1756, simple_loss=0.2429, pruned_loss=0.05419, over 4886.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.0306, over 972039.59 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 04:22:02,601 INFO [train.py:715] (7/8) Epoch 14, batch 24550, loss[loss=0.1231, simple_loss=0.1929, pruned_loss=0.02663, over 4983.00 frames.], tot_loss[loss=0.1355, simple_loss=0.209, pruned_loss=0.03105, over 971857.41 frames.], batch size: 25, lr: 1.55e-04 +2022-05-08 04:22:43,755 INFO [train.py:715] (7/8) Epoch 14, batch 24600, loss[loss=0.1457, simple_loss=0.2247, pruned_loss=0.0333, over 4833.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2084, pruned_loss=0.03073, over 972316.97 frames.], batch size: 26, lr: 1.55e-04 +2022-05-08 04:23:25,378 INFO [train.py:715] (7/8) Epoch 14, batch 24650, loss[loss=0.1123, simple_loss=0.1841, pruned_loss=0.02029, over 4892.00 frames.], tot_loss[loss=0.135, simple_loss=0.2083, pruned_loss=0.03082, over 971935.51 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 04:24:07,559 INFO [train.py:715] (7/8) Epoch 14, batch 24700, loss[loss=0.1205, simple_loss=0.1875, pruned_loss=0.02678, over 4853.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2086, pruned_loss=0.03095, over 971091.96 frames.], batch size: 34, lr: 1.55e-04 +2022-05-08 04:24:48,468 INFO [train.py:715] (7/8) Epoch 14, batch 24750, loss[loss=0.1412, simple_loss=0.2191, pruned_loss=0.03165, over 4976.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2097, pruned_loss=0.03148, over 971988.25 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 04:25:30,058 INFO [train.py:715] (7/8) Epoch 14, batch 24800, loss[loss=0.1233, simple_loss=0.1933, pruned_loss=0.02671, over 4979.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2091, pruned_loss=0.03117, over 972529.36 frames.], batch size: 28, lr: 1.55e-04 +2022-05-08 04:26:10,641 INFO [train.py:715] (7/8) Epoch 14, batch 24850, loss[loss=0.1204, simple_loss=0.1896, pruned_loss=0.02557, over 4925.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2084, pruned_loss=0.03057, over 972106.14 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:26:50,223 INFO [train.py:715] (7/8) Epoch 14, batch 24900, loss[loss=0.1328, simple_loss=0.1988, pruned_loss=0.03337, over 4797.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03023, over 972207.12 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 04:27:31,163 INFO [train.py:715] (7/8) Epoch 14, batch 24950, loss[loss=0.129, simple_loss=0.2078, pruned_loss=0.02511, over 4822.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03059, over 972507.99 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 04:28:12,055 INFO [train.py:715] (7/8) Epoch 14, batch 25000, loss[loss=0.1563, simple_loss=0.2327, pruned_loss=0.03995, over 4773.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.0303, over 972054.14 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:28:51,329 INFO [train.py:715] (7/8) Epoch 14, batch 25050, loss[loss=0.111, simple_loss=0.1901, pruned_loss=0.01598, over 4971.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03028, over 972291.15 frames.], batch size: 28, lr: 1.55e-04 +2022-05-08 04:29:32,183 INFO [train.py:715] (7/8) Epoch 14, batch 25100, loss[loss=0.1269, simple_loss=0.2022, pruned_loss=0.02578, over 4713.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03004, over 972109.89 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 04:30:13,138 INFO [train.py:715] (7/8) Epoch 14, batch 25150, loss[loss=0.1318, simple_loss=0.2024, pruned_loss=0.03058, over 4911.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03013, over 973619.23 frames.], batch size: 39, lr: 1.55e-04 +2022-05-08 04:30:53,336 INFO [train.py:715] (7/8) Epoch 14, batch 25200, loss[loss=0.1242, simple_loss=0.1954, pruned_loss=0.02651, over 4738.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03024, over 973332.31 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 04:31:31,966 INFO [train.py:715] (7/8) Epoch 14, batch 25250, loss[loss=0.1265, simple_loss=0.2015, pruned_loss=0.02572, over 4815.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03105, over 973378.70 frames.], batch size: 25, lr: 1.55e-04 +2022-05-08 04:32:12,613 INFO [train.py:715] (7/8) Epoch 14, batch 25300, loss[loss=0.1421, simple_loss=0.2156, pruned_loss=0.03435, over 4835.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2098, pruned_loss=0.03091, over 972964.02 frames.], batch size: 30, lr: 1.55e-04 +2022-05-08 04:32:53,032 INFO [train.py:715] (7/8) Epoch 14, batch 25350, loss[loss=0.1252, simple_loss=0.1961, pruned_loss=0.02715, over 4975.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03078, over 973486.66 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 04:33:31,586 INFO [train.py:715] (7/8) Epoch 14, batch 25400, loss[loss=0.1176, simple_loss=0.189, pruned_loss=0.0231, over 4797.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2099, pruned_loss=0.03085, over 972835.35 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 04:34:11,987 INFO [train.py:715] (7/8) Epoch 14, batch 25450, loss[loss=0.1331, simple_loss=0.2129, pruned_loss=0.02667, over 4937.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2096, pruned_loss=0.03067, over 972585.83 frames.], batch size: 23, lr: 1.55e-04 +2022-05-08 04:34:52,385 INFO [train.py:715] (7/8) Epoch 14, batch 25500, loss[loss=0.1351, simple_loss=0.2016, pruned_loss=0.03428, over 4928.00 frames.], tot_loss[loss=0.136, simple_loss=0.21, pruned_loss=0.03097, over 972848.11 frames.], batch size: 29, lr: 1.55e-04 +2022-05-08 04:35:31,822 INFO [train.py:715] (7/8) Epoch 14, batch 25550, loss[loss=0.1324, simple_loss=0.2108, pruned_loss=0.02699, over 4937.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2094, pruned_loss=0.03056, over 972945.88 frames.], batch size: 23, lr: 1.55e-04 +2022-05-08 04:36:10,564 INFO [train.py:715] (7/8) Epoch 14, batch 25600, loss[loss=0.1284, simple_loss=0.2066, pruned_loss=0.02516, over 4737.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2097, pruned_loss=0.03056, over 972124.53 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 04:36:50,636 INFO [train.py:715] (7/8) Epoch 14, batch 25650, loss[loss=0.1166, simple_loss=0.1979, pruned_loss=0.01766, over 4789.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2098, pruned_loss=0.03057, over 972201.27 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 04:37:30,750 INFO [train.py:715] (7/8) Epoch 14, batch 25700, loss[loss=0.1314, simple_loss=0.2024, pruned_loss=0.03016, over 4831.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2095, pruned_loss=0.03033, over 972620.97 frames.], batch size: 26, lr: 1.55e-04 +2022-05-08 04:38:09,217 INFO [train.py:715] (7/8) Epoch 14, batch 25750, loss[loss=0.1549, simple_loss=0.2264, pruned_loss=0.04175, over 4878.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2099, pruned_loss=0.03024, over 972292.68 frames.], batch size: 39, lr: 1.55e-04 +2022-05-08 04:38:48,530 INFO [train.py:715] (7/8) Epoch 14, batch 25800, loss[loss=0.1706, simple_loss=0.238, pruned_loss=0.05159, over 4879.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2101, pruned_loss=0.03068, over 972928.38 frames.], batch size: 32, lr: 1.55e-04 +2022-05-08 04:39:28,744 INFO [train.py:715] (7/8) Epoch 14, batch 25850, loss[loss=0.123, simple_loss=0.2048, pruned_loss=0.02057, over 4945.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2104, pruned_loss=0.03102, over 972615.18 frames.], batch size: 35, lr: 1.55e-04 +2022-05-08 04:40:07,963 INFO [train.py:715] (7/8) Epoch 14, batch 25900, loss[loss=0.146, simple_loss=0.2186, pruned_loss=0.0367, over 4753.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2106, pruned_loss=0.03113, over 972986.19 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 04:40:46,743 INFO [train.py:715] (7/8) Epoch 14, batch 25950, loss[loss=0.1351, simple_loss=0.2111, pruned_loss=0.02951, over 4921.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2105, pruned_loss=0.03114, over 973079.42 frames.], batch size: 29, lr: 1.55e-04 +2022-05-08 04:41:26,886 INFO [train.py:715] (7/8) Epoch 14, batch 26000, loss[loss=0.1447, simple_loss=0.2247, pruned_loss=0.03234, over 4746.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2102, pruned_loss=0.03054, over 972056.63 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 04:42:06,874 INFO [train.py:715] (7/8) Epoch 14, batch 26050, loss[loss=0.1236, simple_loss=0.2084, pruned_loss=0.01936, over 4815.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2098, pruned_loss=0.03047, over 972688.49 frames.], batch size: 26, lr: 1.55e-04 +2022-05-08 04:42:44,779 INFO [train.py:715] (7/8) Epoch 14, batch 26100, loss[loss=0.1302, simple_loss=0.208, pruned_loss=0.02619, over 4945.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2091, pruned_loss=0.03032, over 972396.99 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 04:43:24,719 INFO [train.py:715] (7/8) Epoch 14, batch 26150, loss[loss=0.1438, simple_loss=0.2168, pruned_loss=0.0354, over 4941.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.0303, over 972613.89 frames.], batch size: 23, lr: 1.55e-04 +2022-05-08 04:44:05,201 INFO [train.py:715] (7/8) Epoch 14, batch 26200, loss[loss=0.1321, simple_loss=0.1996, pruned_loss=0.0323, over 4975.00 frames.], tot_loss[loss=0.135, simple_loss=0.2091, pruned_loss=0.03041, over 972250.59 frames.], batch size: 35, lr: 1.55e-04 +2022-05-08 04:44:44,004 INFO [train.py:715] (7/8) Epoch 14, batch 26250, loss[loss=0.1458, simple_loss=0.2206, pruned_loss=0.03556, over 4920.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03029, over 971881.29 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:45:23,188 INFO [train.py:715] (7/8) Epoch 14, batch 26300, loss[loss=0.1251, simple_loss=0.2021, pruned_loss=0.02408, over 4852.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03056, over 972613.47 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 04:46:03,665 INFO [train.py:715] (7/8) Epoch 14, batch 26350, loss[loss=0.1142, simple_loss=0.1849, pruned_loss=0.02168, over 4928.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2086, pruned_loss=0.03013, over 971650.07 frames.], batch size: 23, lr: 1.55e-04 +2022-05-08 04:46:43,191 INFO [train.py:715] (7/8) Epoch 14, batch 26400, loss[loss=0.1331, simple_loss=0.2052, pruned_loss=0.03057, over 4703.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03029, over 971540.99 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 04:47:21,827 INFO [train.py:715] (7/8) Epoch 14, batch 26450, loss[loss=0.1283, simple_loss=0.213, pruned_loss=0.02185, over 4812.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.0299, over 972057.99 frames.], batch size: 27, lr: 1.55e-04 +2022-05-08 04:48:02,188 INFO [train.py:715] (7/8) Epoch 14, batch 26500, loss[loss=0.1333, simple_loss=0.2026, pruned_loss=0.03197, over 4989.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.0298, over 972987.22 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 04:48:42,603 INFO [train.py:715] (7/8) Epoch 14, batch 26550, loss[loss=0.1194, simple_loss=0.1892, pruned_loss=0.02487, over 4770.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02985, over 972846.81 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 04:49:21,898 INFO [train.py:715] (7/8) Epoch 14, batch 26600, loss[loss=0.1514, simple_loss=0.226, pruned_loss=0.03842, over 4843.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02978, over 972815.90 frames.], batch size: 34, lr: 1.55e-04 +2022-05-08 04:50:00,871 INFO [train.py:715] (7/8) Epoch 14, batch 26650, loss[loss=0.1323, simple_loss=0.2133, pruned_loss=0.02561, over 4917.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03024, over 973273.44 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 04:50:41,181 INFO [train.py:715] (7/8) Epoch 14, batch 26700, loss[loss=0.1241, simple_loss=0.2031, pruned_loss=0.02254, over 4978.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02982, over 972479.72 frames.], batch size: 31, lr: 1.55e-04 +2022-05-08 04:51:21,681 INFO [train.py:715] (7/8) Epoch 14, batch 26750, loss[loss=0.1282, simple_loss=0.2082, pruned_loss=0.02415, over 4844.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02971, over 972500.76 frames.], batch size: 13, lr: 1.55e-04 +2022-05-08 04:52:00,703 INFO [train.py:715] (7/8) Epoch 14, batch 26800, loss[loss=0.1268, simple_loss=0.1986, pruned_loss=0.02753, over 4703.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.0297, over 972141.56 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 04:52:40,484 INFO [train.py:715] (7/8) Epoch 14, batch 26850, loss[loss=0.1151, simple_loss=0.1983, pruned_loss=0.01598, over 4863.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02989, over 972034.51 frames.], batch size: 22, lr: 1.55e-04 +2022-05-08 04:53:20,915 INFO [train.py:715] (7/8) Epoch 14, batch 26900, loss[loss=0.1155, simple_loss=0.1882, pruned_loss=0.0214, over 4865.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03004, over 971982.92 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 04:54:00,762 INFO [train.py:715] (7/8) Epoch 14, batch 26950, loss[loss=0.184, simple_loss=0.2484, pruned_loss=0.05983, over 4987.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03018, over 972317.12 frames.], batch size: 28, lr: 1.55e-04 +2022-05-08 04:54:39,968 INFO [train.py:715] (7/8) Epoch 14, batch 27000, loss[loss=0.1353, simple_loss=0.2034, pruned_loss=0.03364, over 4836.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03015, over 972044.31 frames.], batch size: 13, lr: 1.55e-04 +2022-05-08 04:54:39,969 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 04:54:49,614 INFO [train.py:742] (7/8) Epoch 14, validation: loss=0.1049, simple_loss=0.1886, pruned_loss=0.01053, over 914524.00 frames. +2022-05-08 04:55:29,149 INFO [train.py:715] (7/8) Epoch 14, batch 27050, loss[loss=0.1349, simple_loss=0.2198, pruned_loss=0.02498, over 4872.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03003, over 972466.47 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 04:56:09,803 INFO [train.py:715] (7/8) Epoch 14, batch 27100, loss[loss=0.1132, simple_loss=0.1934, pruned_loss=0.0165, over 4990.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03028, over 972041.18 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 04:56:50,330 INFO [train.py:715] (7/8) Epoch 14, batch 27150, loss[loss=0.1324, simple_loss=0.2057, pruned_loss=0.02953, over 4876.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03029, over 972357.80 frames.], batch size: 13, lr: 1.55e-04 +2022-05-08 04:57:29,051 INFO [train.py:715] (7/8) Epoch 14, batch 27200, loss[loss=0.1333, simple_loss=0.2124, pruned_loss=0.02715, over 4843.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03056, over 972786.83 frames.], batch size: 20, lr: 1.55e-04 +2022-05-08 04:58:08,439 INFO [train.py:715] (7/8) Epoch 14, batch 27250, loss[loss=0.1329, simple_loss=0.2136, pruned_loss=0.0261, over 4987.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.03085, over 973370.71 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 04:58:48,576 INFO [train.py:715] (7/8) Epoch 14, batch 27300, loss[loss=0.1213, simple_loss=0.1938, pruned_loss=0.02441, over 4801.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.03046, over 973308.82 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 04:59:28,196 INFO [train.py:715] (7/8) Epoch 14, batch 27350, loss[loss=0.118, simple_loss=0.1888, pruned_loss=0.02361, over 4852.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03041, over 973167.72 frames.], batch size: 32, lr: 1.55e-04 +2022-05-08 05:00:06,593 INFO [train.py:715] (7/8) Epoch 14, batch 27400, loss[loss=0.1086, simple_loss=0.1869, pruned_loss=0.01516, over 4938.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03012, over 974338.15 frames.], batch size: 29, lr: 1.55e-04 +2022-05-08 05:00:46,867 INFO [train.py:715] (7/8) Epoch 14, batch 27450, loss[loss=0.1294, simple_loss=0.2106, pruned_loss=0.02405, over 4941.00 frames.], tot_loss[loss=0.135, simple_loss=0.2093, pruned_loss=0.03034, over 973993.32 frames.], batch size: 29, lr: 1.55e-04 +2022-05-08 05:01:26,701 INFO [train.py:715] (7/8) Epoch 14, batch 27500, loss[loss=0.1493, simple_loss=0.2266, pruned_loss=0.03597, over 4841.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03029, over 974754.66 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:02:05,454 INFO [train.py:715] (7/8) Epoch 14, batch 27550, loss[loss=0.1299, simple_loss=0.1972, pruned_loss=0.03132, over 4882.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03045, over 974404.05 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 05:02:45,160 INFO [train.py:715] (7/8) Epoch 14, batch 27600, loss[loss=0.0958, simple_loss=0.1716, pruned_loss=0.009984, over 4803.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.03053, over 972967.23 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 05:03:25,492 INFO [train.py:715] (7/8) Epoch 14, batch 27650, loss[loss=0.127, simple_loss=0.2007, pruned_loss=0.0267, over 4969.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03043, over 972551.28 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 05:04:04,759 INFO [train.py:715] (7/8) Epoch 14, batch 27700, loss[loss=0.1196, simple_loss=0.2015, pruned_loss=0.01886, over 4960.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03074, over 972722.93 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 05:04:43,282 INFO [train.py:715] (7/8) Epoch 14, batch 27750, loss[loss=0.1432, simple_loss=0.2077, pruned_loss=0.03936, over 4969.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03038, over 971967.63 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:05:23,450 INFO [train.py:715] (7/8) Epoch 14, batch 27800, loss[loss=0.129, simple_loss=0.2108, pruned_loss=0.02355, over 4982.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.03056, over 971605.38 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 05:06:03,191 INFO [train.py:715] (7/8) Epoch 14, batch 27850, loss[loss=0.1206, simple_loss=0.1946, pruned_loss=0.02328, over 4905.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.0305, over 972219.21 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 05:06:41,706 INFO [train.py:715] (7/8) Epoch 14, batch 27900, loss[loss=0.1397, simple_loss=0.2195, pruned_loss=0.02999, over 4808.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03043, over 972373.02 frames.], batch size: 25, lr: 1.55e-04 +2022-05-08 05:07:21,730 INFO [train.py:715] (7/8) Epoch 14, batch 27950, loss[loss=0.1378, simple_loss=0.1993, pruned_loss=0.0382, over 4754.00 frames.], tot_loss[loss=0.1356, simple_loss=0.209, pruned_loss=0.03112, over 971983.60 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 05:08:01,580 INFO [train.py:715] (7/8) Epoch 14, batch 28000, loss[loss=0.1407, simple_loss=0.2185, pruned_loss=0.03148, over 4913.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03067, over 972013.75 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 05:08:40,623 INFO [train.py:715] (7/8) Epoch 14, batch 28050, loss[loss=0.1319, simple_loss=0.204, pruned_loss=0.0299, over 4888.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03066, over 971883.01 frames.], batch size: 22, lr: 1.55e-04 +2022-05-08 05:09:19,682 INFO [train.py:715] (7/8) Epoch 14, batch 28100, loss[loss=0.1492, simple_loss=0.2247, pruned_loss=0.03686, over 4754.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.03079, over 972525.48 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 05:10:00,238 INFO [train.py:715] (7/8) Epoch 14, batch 28150, loss[loss=0.15, simple_loss=0.2221, pruned_loss=0.039, over 4735.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.0312, over 971917.63 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 05:10:39,944 INFO [train.py:715] (7/8) Epoch 14, batch 28200, loss[loss=0.1434, simple_loss=0.2134, pruned_loss=0.03672, over 4897.00 frames.], tot_loss[loss=0.1362, simple_loss=0.21, pruned_loss=0.0312, over 971807.47 frames.], batch size: 39, lr: 1.55e-04 +2022-05-08 05:11:17,985 INFO [train.py:715] (7/8) Epoch 14, batch 28250, loss[loss=0.1255, simple_loss=0.1986, pruned_loss=0.02624, over 4979.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2101, pruned_loss=0.03127, over 972442.61 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 05:11:58,126 INFO [train.py:715] (7/8) Epoch 14, batch 28300, loss[loss=0.119, simple_loss=0.1961, pruned_loss=0.02093, over 4758.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2112, pruned_loss=0.0318, over 971748.26 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 05:12:38,005 INFO [train.py:715] (7/8) Epoch 14, batch 28350, loss[loss=0.1438, simple_loss=0.2225, pruned_loss=0.0326, over 4861.00 frames.], tot_loss[loss=0.1377, simple_loss=0.2113, pruned_loss=0.03205, over 972847.32 frames.], batch size: 32, lr: 1.55e-04 +2022-05-08 05:13:16,550 INFO [train.py:715] (7/8) Epoch 14, batch 28400, loss[loss=0.1111, simple_loss=0.1906, pruned_loss=0.01576, over 4973.00 frames.], tot_loss[loss=0.1374, simple_loss=0.2113, pruned_loss=0.03178, over 972117.07 frames.], batch size: 28, lr: 1.55e-04 +2022-05-08 05:13:56,136 INFO [train.py:715] (7/8) Epoch 14, batch 28450, loss[loss=0.1618, simple_loss=0.2202, pruned_loss=0.05175, over 4926.00 frames.], tot_loss[loss=0.1371, simple_loss=0.2109, pruned_loss=0.03165, over 972251.96 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 05:14:36,404 INFO [train.py:715] (7/8) Epoch 14, batch 28500, loss[loss=0.1549, simple_loss=0.2395, pruned_loss=0.03514, over 4980.00 frames.], tot_loss[loss=0.1366, simple_loss=0.2105, pruned_loss=0.03138, over 971921.14 frames.], batch size: 28, lr: 1.55e-04 +2022-05-08 05:15:15,666 INFO [train.py:715] (7/8) Epoch 14, batch 28550, loss[loss=0.1495, simple_loss=0.2236, pruned_loss=0.03771, over 4780.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2101, pruned_loss=0.03108, over 972468.10 frames.], batch size: 17, lr: 1.55e-04 +2022-05-08 05:15:54,184 INFO [train.py:715] (7/8) Epoch 14, batch 28600, loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03189, over 4743.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2102, pruned_loss=0.03117, over 971474.94 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 05:16:34,513 INFO [train.py:715] (7/8) Epoch 14, batch 28650, loss[loss=0.1185, simple_loss=0.1884, pruned_loss=0.02431, over 4753.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2096, pruned_loss=0.0309, over 971123.33 frames.], batch size: 12, lr: 1.55e-04 +2022-05-08 05:17:14,559 INFO [train.py:715] (7/8) Epoch 14, batch 28700, loss[loss=0.1133, simple_loss=0.1781, pruned_loss=0.02422, over 4981.00 frames.], tot_loss[loss=0.136, simple_loss=0.2096, pruned_loss=0.03123, over 972251.07 frames.], batch size: 14, lr: 1.55e-04 +2022-05-08 05:17:52,654 INFO [train.py:715] (7/8) Epoch 14, batch 28750, loss[loss=0.1112, simple_loss=0.1861, pruned_loss=0.0182, over 4778.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2092, pruned_loss=0.0313, over 971868.56 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 05:18:32,374 INFO [train.py:715] (7/8) Epoch 14, batch 28800, loss[loss=0.125, simple_loss=0.2073, pruned_loss=0.02132, over 4820.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2098, pruned_loss=0.03139, over 971846.02 frames.], batch size: 27, lr: 1.55e-04 +2022-05-08 05:19:12,486 INFO [train.py:715] (7/8) Epoch 14, batch 28850, loss[loss=0.1239, simple_loss=0.2098, pruned_loss=0.01904, over 4958.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2087, pruned_loss=0.03118, over 971853.67 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 05:19:52,375 INFO [train.py:715] (7/8) Epoch 14, batch 28900, loss[loss=0.1231, simple_loss=0.2046, pruned_loss=0.02074, over 4921.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2082, pruned_loss=0.03101, over 971532.32 frames.], batch size: 23, lr: 1.55e-04 +2022-05-08 05:20:30,227 INFO [train.py:715] (7/8) Epoch 14, batch 28950, loss[loss=0.1251, simple_loss=0.2093, pruned_loss=0.02043, over 4937.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2088, pruned_loss=0.03123, over 971642.36 frames.], batch size: 23, lr: 1.55e-04 +2022-05-08 05:21:10,701 INFO [train.py:715] (7/8) Epoch 14, batch 29000, loss[loss=0.1293, simple_loss=0.2062, pruned_loss=0.02621, over 4799.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2081, pruned_loss=0.0308, over 971752.83 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 05:21:50,338 INFO [train.py:715] (7/8) Epoch 14, batch 29050, loss[loss=0.147, simple_loss=0.2302, pruned_loss=0.03194, over 4764.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.0308, over 971767.92 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 05:22:29,107 INFO [train.py:715] (7/8) Epoch 14, batch 29100, loss[loss=0.1476, simple_loss=0.2167, pruned_loss=0.03926, over 4795.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03061, over 971214.74 frames.], batch size: 18, lr: 1.55e-04 +2022-05-08 05:23:08,496 INFO [train.py:715] (7/8) Epoch 14, batch 29150, loss[loss=0.1503, simple_loss=0.2213, pruned_loss=0.03969, over 4896.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03076, over 971023.15 frames.], batch size: 39, lr: 1.55e-04 +2022-05-08 05:23:48,535 INFO [train.py:715] (7/8) Epoch 14, batch 29200, loss[loss=0.1363, simple_loss=0.2037, pruned_loss=0.03447, over 4957.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03098, over 970982.53 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:24:28,396 INFO [train.py:715] (7/8) Epoch 14, batch 29250, loss[loss=0.1593, simple_loss=0.2188, pruned_loss=0.04996, over 4748.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03102, over 970535.40 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 05:25:06,492 INFO [train.py:715] (7/8) Epoch 14, batch 29300, loss[loss=0.1287, simple_loss=0.2047, pruned_loss=0.02639, over 4762.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03083, over 970294.06 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 05:25:46,611 INFO [train.py:715] (7/8) Epoch 14, batch 29350, loss[loss=0.1364, simple_loss=0.211, pruned_loss=0.03086, over 4956.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03023, over 971266.23 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 05:26:26,521 INFO [train.py:715] (7/8) Epoch 14, batch 29400, loss[loss=0.1342, simple_loss=0.1958, pruned_loss=0.03627, over 4917.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03017, over 971588.83 frames.], batch size: 32, lr: 1.55e-04 +2022-05-08 05:27:05,394 INFO [train.py:715] (7/8) Epoch 14, batch 29450, loss[loss=0.1432, simple_loss=0.2016, pruned_loss=0.04234, over 4710.00 frames.], tot_loss[loss=0.1346, simple_loss=0.208, pruned_loss=0.03066, over 971642.74 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:27:45,244 INFO [train.py:715] (7/8) Epoch 14, batch 29500, loss[loss=0.1154, simple_loss=0.1937, pruned_loss=0.01858, over 4926.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2074, pruned_loss=0.03016, over 971566.20 frames.], batch size: 21, lr: 1.55e-04 +2022-05-08 05:28:25,581 INFO [train.py:715] (7/8) Epoch 14, batch 29550, loss[loss=0.1596, simple_loss=0.2213, pruned_loss=0.04897, over 4720.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02976, over 972183.97 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 05:29:05,391 INFO [train.py:715] (7/8) Epoch 14, batch 29600, loss[loss=0.1147, simple_loss=0.1952, pruned_loss=0.01714, over 4903.00 frames.], tot_loss[loss=0.133, simple_loss=0.2067, pruned_loss=0.0296, over 972563.26 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 05:29:44,396 INFO [train.py:715] (7/8) Epoch 14, batch 29650, loss[loss=0.1421, simple_loss=0.2052, pruned_loss=0.03951, over 4953.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2071, pruned_loss=0.02981, over 972853.25 frames.], batch size: 24, lr: 1.55e-04 +2022-05-08 05:30:25,190 INFO [train.py:715] (7/8) Epoch 14, batch 29700, loss[loss=0.1365, simple_loss=0.2065, pruned_loss=0.0332, over 4840.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03013, over 972143.41 frames.], batch size: 30, lr: 1.55e-04 +2022-05-08 05:31:06,283 INFO [train.py:715] (7/8) Epoch 14, batch 29750, loss[loss=0.1413, simple_loss=0.2139, pruned_loss=0.03436, over 4976.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03029, over 971754.51 frames.], batch size: 31, lr: 1.55e-04 +2022-05-08 05:31:45,875 INFO [train.py:715] (7/8) Epoch 14, batch 29800, loss[loss=0.1396, simple_loss=0.2149, pruned_loss=0.03217, over 4748.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.0303, over 972790.24 frames.], batch size: 16, lr: 1.55e-04 +2022-05-08 05:32:26,697 INFO [train.py:715] (7/8) Epoch 14, batch 29850, loss[loss=0.133, simple_loss=0.2092, pruned_loss=0.0284, over 4812.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02979, over 973424.89 frames.], batch size: 25, lr: 1.55e-04 +2022-05-08 05:33:06,683 INFO [train.py:715] (7/8) Epoch 14, batch 29900, loss[loss=0.1056, simple_loss=0.1851, pruned_loss=0.01305, over 4933.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.0301, over 973101.39 frames.], batch size: 29, lr: 1.55e-04 +2022-05-08 05:33:46,337 INFO [train.py:715] (7/8) Epoch 14, batch 29950, loss[loss=0.1005, simple_loss=0.1704, pruned_loss=0.01532, over 4948.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03019, over 972877.92 frames.], batch size: 29, lr: 1.55e-04 +2022-05-08 05:34:25,101 INFO [train.py:715] (7/8) Epoch 14, batch 30000, loss[loss=0.1389, simple_loss=0.2199, pruned_loss=0.0289, over 4928.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2094, pruned_loss=0.03041, over 972370.76 frames.], batch size: 29, lr: 1.55e-04 +2022-05-08 05:34:25,101 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 05:34:42,243 INFO [train.py:742] (7/8) Epoch 14, validation: loss=0.1052, simple_loss=0.189, pruned_loss=0.01075, over 914524.00 frames. +2022-05-08 05:35:21,215 INFO [train.py:715] (7/8) Epoch 14, batch 30050, loss[loss=0.1233, simple_loss=0.1972, pruned_loss=0.02472, over 4777.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2094, pruned_loss=0.03046, over 971011.24 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 05:36:01,186 INFO [train.py:715] (7/8) Epoch 14, batch 30100, loss[loss=0.1428, simple_loss=0.2177, pruned_loss=0.03393, over 4755.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2092, pruned_loss=0.03054, over 971611.49 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 05:36:42,309 INFO [train.py:715] (7/8) Epoch 14, batch 30150, loss[loss=0.1269, simple_loss=0.1914, pruned_loss=0.03122, over 4918.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03112, over 971563.81 frames.], batch size: 23, lr: 1.55e-04 +2022-05-08 05:37:21,245 INFO [train.py:715] (7/8) Epoch 14, batch 30200, loss[loss=0.1501, simple_loss=0.2077, pruned_loss=0.04631, over 4766.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2094, pruned_loss=0.03115, over 971644.52 frames.], batch size: 19, lr: 1.55e-04 +2022-05-08 05:38:01,177 INFO [train.py:715] (7/8) Epoch 14, batch 30250, loss[loss=0.1202, simple_loss=0.1958, pruned_loss=0.02231, over 4821.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2099, pruned_loss=0.03127, over 971585.84 frames.], batch size: 27, lr: 1.55e-04 +2022-05-08 05:38:41,858 INFO [train.py:715] (7/8) Epoch 14, batch 30300, loss[loss=0.15, simple_loss=0.2246, pruned_loss=0.03772, over 4868.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03083, over 971960.04 frames.], batch size: 30, lr: 1.55e-04 +2022-05-08 05:39:21,370 INFO [train.py:715] (7/8) Epoch 14, batch 30350, loss[loss=0.1264, simple_loss=0.1921, pruned_loss=0.03031, over 4772.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2081, pruned_loss=0.03062, over 972225.96 frames.], batch size: 17, lr: 1.55e-04 +2022-05-08 05:40:00,594 INFO [train.py:715] (7/8) Epoch 14, batch 30400, loss[loss=0.1444, simple_loss=0.2107, pruned_loss=0.03901, over 4690.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03031, over 971966.53 frames.], batch size: 15, lr: 1.55e-04 +2022-05-08 05:40:40,497 INFO [train.py:715] (7/8) Epoch 14, batch 30450, loss[loss=0.1339, simple_loss=0.2112, pruned_loss=0.02834, over 4980.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02991, over 972380.99 frames.], batch size: 25, lr: 1.55e-04 +2022-05-08 05:41:20,822 INFO [train.py:715] (7/8) Epoch 14, batch 30500, loss[loss=0.1356, simple_loss=0.2053, pruned_loss=0.03292, over 4782.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02995, over 973084.04 frames.], batch size: 17, lr: 1.55e-04 +2022-05-08 05:41:59,761 INFO [train.py:715] (7/8) Epoch 14, batch 30550, loss[loss=0.1002, simple_loss=0.176, pruned_loss=0.0122, over 4799.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02972, over 972933.09 frames.], batch size: 24, lr: 1.54e-04 +2022-05-08 05:42:39,646 INFO [train.py:715] (7/8) Epoch 14, batch 30600, loss[loss=0.1342, simple_loss=0.2116, pruned_loss=0.02838, over 4873.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02989, over 973089.95 frames.], batch size: 22, lr: 1.54e-04 +2022-05-08 05:43:20,417 INFO [train.py:715] (7/8) Epoch 14, batch 30650, loss[loss=0.1236, simple_loss=0.2023, pruned_loss=0.02245, over 4820.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02965, over 972799.75 frames.], batch size: 13, lr: 1.54e-04 +2022-05-08 05:43:59,993 INFO [train.py:715] (7/8) Epoch 14, batch 30700, loss[loss=0.1402, simple_loss=0.2146, pruned_loss=0.03292, over 4948.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.02999, over 973022.47 frames.], batch size: 29, lr: 1.54e-04 +2022-05-08 05:44:39,758 INFO [train.py:715] (7/8) Epoch 14, batch 30750, loss[loss=0.1163, simple_loss=0.1934, pruned_loss=0.01959, over 4891.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02986, over 972771.30 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 05:45:19,657 INFO [train.py:715] (7/8) Epoch 14, batch 30800, loss[loss=0.154, simple_loss=0.2177, pruned_loss=0.04509, over 4701.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03035, over 972826.16 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 05:46:00,463 INFO [train.py:715] (7/8) Epoch 14, batch 30850, loss[loss=0.1257, simple_loss=0.2076, pruned_loss=0.02191, over 4988.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2083, pruned_loss=0.03065, over 972676.32 frames.], batch size: 25, lr: 1.54e-04 +2022-05-08 05:46:39,514 INFO [train.py:715] (7/8) Epoch 14, batch 30900, loss[loss=0.113, simple_loss=0.1902, pruned_loss=0.01791, over 4876.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03048, over 972903.40 frames.], batch size: 22, lr: 1.54e-04 +2022-05-08 05:47:18,043 INFO [train.py:715] (7/8) Epoch 14, batch 30950, loss[loss=0.1143, simple_loss=0.1921, pruned_loss=0.01827, over 4860.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2083, pruned_loss=0.03048, over 972830.79 frames.], batch size: 13, lr: 1.54e-04 +2022-05-08 05:47:57,806 INFO [train.py:715] (7/8) Epoch 14, batch 31000, loss[loss=0.1352, simple_loss=0.2122, pruned_loss=0.02906, over 4812.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.0305, over 972340.62 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 05:48:37,484 INFO [train.py:715] (7/8) Epoch 14, batch 31050, loss[loss=0.1453, simple_loss=0.2243, pruned_loss=0.03319, over 4703.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03039, over 972812.07 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 05:49:17,856 INFO [train.py:715] (7/8) Epoch 14, batch 31100, loss[loss=0.1259, simple_loss=0.2068, pruned_loss=0.02246, over 4831.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03055, over 972439.39 frames.], batch size: 27, lr: 1.54e-04 +2022-05-08 05:49:58,980 INFO [train.py:715] (7/8) Epoch 14, batch 31150, loss[loss=0.1357, simple_loss=0.2103, pruned_loss=0.03056, over 4901.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2095, pruned_loss=0.03104, over 971984.85 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 05:50:40,132 INFO [train.py:715] (7/8) Epoch 14, batch 31200, loss[loss=0.1511, simple_loss=0.2213, pruned_loss=0.04043, over 4983.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2097, pruned_loss=0.03095, over 972112.39 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 05:51:19,911 INFO [train.py:715] (7/8) Epoch 14, batch 31250, loss[loss=0.1321, simple_loss=0.1984, pruned_loss=0.03289, over 4806.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03077, over 972241.44 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 05:52:00,316 INFO [train.py:715] (7/8) Epoch 14, batch 31300, loss[loss=0.1432, simple_loss=0.2181, pruned_loss=0.03416, over 4898.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03024, over 971123.03 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 05:52:41,157 INFO [train.py:715] (7/8) Epoch 14, batch 31350, loss[loss=0.1152, simple_loss=0.1966, pruned_loss=0.01696, over 4800.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02991, over 970714.65 frames.], batch size: 25, lr: 1.54e-04 +2022-05-08 05:53:21,050 INFO [train.py:715] (7/8) Epoch 14, batch 31400, loss[loss=0.1172, simple_loss=0.196, pruned_loss=0.01917, over 4791.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02954, over 970772.46 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 05:54:00,723 INFO [train.py:715] (7/8) Epoch 14, batch 31450, loss[loss=0.1426, simple_loss=0.2245, pruned_loss=0.03036, over 4832.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02978, over 971512.10 frames.], batch size: 25, lr: 1.54e-04 +2022-05-08 05:54:40,752 INFO [train.py:715] (7/8) Epoch 14, batch 31500, loss[loss=0.1323, simple_loss=0.2099, pruned_loss=0.02739, over 4935.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02995, over 972124.47 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 05:55:21,340 INFO [train.py:715] (7/8) Epoch 14, batch 31550, loss[loss=0.1307, simple_loss=0.2022, pruned_loss=0.02956, over 4695.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03003, over 972339.97 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 05:56:01,192 INFO [train.py:715] (7/8) Epoch 14, batch 31600, loss[loss=0.1158, simple_loss=0.1871, pruned_loss=0.02227, over 4812.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03038, over 973131.45 frames.], batch size: 12, lr: 1.54e-04 +2022-05-08 05:56:40,703 INFO [train.py:715] (7/8) Epoch 14, batch 31650, loss[loss=0.1244, simple_loss=0.2051, pruned_loss=0.02179, over 4978.00 frames.], tot_loss[loss=0.135, simple_loss=0.2094, pruned_loss=0.03031, over 974685.96 frames.], batch size: 28, lr: 1.54e-04 +2022-05-08 05:57:21,078 INFO [train.py:715] (7/8) Epoch 14, batch 31700, loss[loss=0.1114, simple_loss=0.1865, pruned_loss=0.0181, over 4938.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2095, pruned_loss=0.03021, over 974024.05 frames.], batch size: 29, lr: 1.54e-04 +2022-05-08 05:58:00,668 INFO [train.py:715] (7/8) Epoch 14, batch 31750, loss[loss=0.1377, simple_loss=0.2163, pruned_loss=0.02959, over 4819.00 frames.], tot_loss[loss=0.135, simple_loss=0.2092, pruned_loss=0.03034, over 974139.59 frames.], batch size: 25, lr: 1.54e-04 +2022-05-08 05:58:40,579 INFO [train.py:715] (7/8) Epoch 14, batch 31800, loss[loss=0.1249, simple_loss=0.2061, pruned_loss=0.02188, over 4895.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02987, over 974274.36 frames.], batch size: 22, lr: 1.54e-04 +2022-05-08 05:59:20,885 INFO [train.py:715] (7/8) Epoch 14, batch 31850, loss[loss=0.1146, simple_loss=0.1974, pruned_loss=0.01584, over 4943.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02946, over 974748.83 frames.], batch size: 29, lr: 1.54e-04 +2022-05-08 06:00:01,591 INFO [train.py:715] (7/8) Epoch 14, batch 31900, loss[loss=0.1411, simple_loss=0.2121, pruned_loss=0.03501, over 4895.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02978, over 974805.95 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 06:00:40,986 INFO [train.py:715] (7/8) Epoch 14, batch 31950, loss[loss=0.1553, simple_loss=0.236, pruned_loss=0.03736, over 4750.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03052, over 974955.62 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 06:01:20,568 INFO [train.py:715] (7/8) Epoch 14, batch 32000, loss[loss=0.1291, simple_loss=0.1993, pruned_loss=0.0295, over 4916.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03026, over 974671.43 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 06:02:01,142 INFO [train.py:715] (7/8) Epoch 14, batch 32050, loss[loss=0.1287, simple_loss=0.2028, pruned_loss=0.02726, over 4780.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03028, over 974217.15 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 06:02:40,619 INFO [train.py:715] (7/8) Epoch 14, batch 32100, loss[loss=0.1424, simple_loss=0.2179, pruned_loss=0.03344, over 4804.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.03084, over 973560.34 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 06:03:20,383 INFO [train.py:715] (7/8) Epoch 14, batch 32150, loss[loss=0.1322, simple_loss=0.1973, pruned_loss=0.03357, over 4940.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2076, pruned_loss=0.03055, over 973171.51 frames.], batch size: 23, lr: 1.54e-04 +2022-05-08 06:04:00,812 INFO [train.py:715] (7/8) Epoch 14, batch 32200, loss[loss=0.1602, simple_loss=0.2242, pruned_loss=0.04811, over 4852.00 frames.], tot_loss[loss=0.135, simple_loss=0.2082, pruned_loss=0.03086, over 972138.79 frames.], batch size: 20, lr: 1.54e-04 +2022-05-08 06:04:41,249 INFO [train.py:715] (7/8) Epoch 14, batch 32250, loss[loss=0.1188, simple_loss=0.1977, pruned_loss=0.01995, over 4831.00 frames.], tot_loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03074, over 972092.83 frames.], batch size: 25, lr: 1.54e-04 +2022-05-08 06:05:20,520 INFO [train.py:715] (7/8) Epoch 14, batch 32300, loss[loss=0.1189, simple_loss=0.2016, pruned_loss=0.01812, over 4981.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2091, pruned_loss=0.03087, over 972005.31 frames.], batch size: 28, lr: 1.54e-04 +2022-05-08 06:06:00,152 INFO [train.py:715] (7/8) Epoch 14, batch 32350, loss[loss=0.1485, simple_loss=0.2217, pruned_loss=0.03766, over 4792.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03077, over 971583.57 frames.], batch size: 24, lr: 1.54e-04 +2022-05-08 06:06:40,262 INFO [train.py:715] (7/8) Epoch 14, batch 32400, loss[loss=0.1514, simple_loss=0.2226, pruned_loss=0.04015, over 4967.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2102, pruned_loss=0.03165, over 971929.17 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 06:07:19,954 INFO [train.py:715] (7/8) Epoch 14, batch 32450, loss[loss=0.1396, simple_loss=0.2127, pruned_loss=0.03329, over 4969.00 frames.], tot_loss[loss=0.1372, simple_loss=0.2109, pruned_loss=0.0318, over 972523.94 frames.], batch size: 24, lr: 1.54e-04 +2022-05-08 06:07:59,621 INFO [train.py:715] (7/8) Epoch 14, batch 32500, loss[loss=0.1451, simple_loss=0.2092, pruned_loss=0.04053, over 4932.00 frames.], tot_loss[loss=0.1368, simple_loss=0.2104, pruned_loss=0.03164, over 973378.68 frames.], batch size: 29, lr: 1.54e-04 +2022-05-08 06:08:39,996 INFO [train.py:715] (7/8) Epoch 14, batch 32550, loss[loss=0.1366, simple_loss=0.2159, pruned_loss=0.02867, over 4763.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2102, pruned_loss=0.03109, over 972881.47 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:09:20,735 INFO [train.py:715] (7/8) Epoch 14, batch 32600, loss[loss=0.1439, simple_loss=0.2199, pruned_loss=0.03395, over 4661.00 frames.], tot_loss[loss=0.136, simple_loss=0.2098, pruned_loss=0.03111, over 972648.58 frames.], batch size: 14, lr: 1.54e-04 +2022-05-08 06:10:00,314 INFO [train.py:715] (7/8) Epoch 14, batch 32650, loss[loss=0.1664, simple_loss=0.2343, pruned_loss=0.04927, over 4982.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2094, pruned_loss=0.03101, over 972195.27 frames.], batch size: 31, lr: 1.54e-04 +2022-05-08 06:10:43,612 INFO [train.py:715] (7/8) Epoch 14, batch 32700, loss[loss=0.1311, simple_loss=0.2116, pruned_loss=0.02529, over 4785.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2083, pruned_loss=0.03065, over 972225.95 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 06:11:24,773 INFO [train.py:715] (7/8) Epoch 14, batch 32750, loss[loss=0.125, simple_loss=0.1934, pruned_loss=0.02827, over 4877.00 frames.], tot_loss[loss=0.135, simple_loss=0.2082, pruned_loss=0.03089, over 972327.96 frames.], batch size: 32, lr: 1.54e-04 +2022-05-08 06:12:05,090 INFO [train.py:715] (7/8) Epoch 14, batch 32800, loss[loss=0.1406, simple_loss=0.2078, pruned_loss=0.0367, over 4899.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.03088, over 972726.56 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 06:12:45,517 INFO [train.py:715] (7/8) Epoch 14, batch 32850, loss[loss=0.1208, simple_loss=0.1941, pruned_loss=0.0237, over 4778.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03055, over 972492.29 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 06:13:26,811 INFO [train.py:715] (7/8) Epoch 14, batch 32900, loss[loss=0.1466, simple_loss=0.2137, pruned_loss=0.03979, over 4957.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03052, over 973241.15 frames.], batch size: 35, lr: 1.54e-04 +2022-05-08 06:14:07,971 INFO [train.py:715] (7/8) Epoch 14, batch 32950, loss[loss=0.1419, simple_loss=0.2253, pruned_loss=0.02923, over 4688.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03019, over 972379.28 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 06:14:47,673 INFO [train.py:715] (7/8) Epoch 14, batch 33000, loss[loss=0.1374, simple_loss=0.2141, pruned_loss=0.03031, over 4956.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03035, over 972519.24 frames.], batch size: 14, lr: 1.54e-04 +2022-05-08 06:14:47,674 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 06:15:25,560 INFO [train.py:742] (7/8) Epoch 14, validation: loss=0.1051, simple_loss=0.1889, pruned_loss=0.01071, over 914524.00 frames. +2022-05-08 06:16:05,301 INFO [train.py:715] (7/8) Epoch 14, batch 33050, loss[loss=0.1426, simple_loss=0.2109, pruned_loss=0.03716, over 4978.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.03007, over 973542.50 frames.], batch size: 35, lr: 1.54e-04 +2022-05-08 06:16:46,147 INFO [train.py:715] (7/8) Epoch 14, batch 33100, loss[loss=0.125, simple_loss=0.208, pruned_loss=0.02096, over 4937.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2097, pruned_loss=0.03054, over 973211.63 frames.], batch size: 29, lr: 1.54e-04 +2022-05-08 06:17:27,373 INFO [train.py:715] (7/8) Epoch 14, batch 33150, loss[loss=0.1433, simple_loss=0.2161, pruned_loss=0.03531, over 4739.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02988, over 973004.87 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:18:07,432 INFO [train.py:715] (7/8) Epoch 14, batch 33200, loss[loss=0.1437, simple_loss=0.217, pruned_loss=0.03517, over 4744.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03018, over 972478.60 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:18:47,787 INFO [train.py:715] (7/8) Epoch 14, batch 33250, loss[loss=0.1361, simple_loss=0.2152, pruned_loss=0.02851, over 4869.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2096, pruned_loss=0.03077, over 972315.55 frames.], batch size: 20, lr: 1.54e-04 +2022-05-08 06:19:28,529 INFO [train.py:715] (7/8) Epoch 14, batch 33300, loss[loss=0.1285, simple_loss=0.1991, pruned_loss=0.02891, over 4758.00 frames.], tot_loss[loss=0.135, simple_loss=0.2092, pruned_loss=0.03037, over 972752.84 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:20:09,726 INFO [train.py:715] (7/8) Epoch 14, batch 33350, loss[loss=0.114, simple_loss=0.1967, pruned_loss=0.01563, over 4748.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2092, pruned_loss=0.03022, over 971928.84 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:20:49,901 INFO [train.py:715] (7/8) Epoch 14, batch 33400, loss[loss=0.134, simple_loss=0.1892, pruned_loss=0.0394, over 4804.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2096, pruned_loss=0.03027, over 972419.97 frames.], batch size: 13, lr: 1.54e-04 +2022-05-08 06:21:30,273 INFO [train.py:715] (7/8) Epoch 14, batch 33450, loss[loss=0.1332, simple_loss=0.2018, pruned_loss=0.03236, over 4637.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2101, pruned_loss=0.03084, over 971927.51 frames.], batch size: 13, lr: 1.54e-04 +2022-05-08 06:22:11,513 INFO [train.py:715] (7/8) Epoch 14, batch 33500, loss[loss=0.123, simple_loss=0.2019, pruned_loss=0.02202, over 4759.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2102, pruned_loss=0.0308, over 971253.65 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 06:22:51,812 INFO [train.py:715] (7/8) Epoch 14, batch 33550, loss[loss=0.1297, simple_loss=0.2055, pruned_loss=0.02697, over 4892.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2093, pruned_loss=0.03063, over 971121.38 frames.], batch size: 22, lr: 1.54e-04 +2022-05-08 06:23:33,015 INFO [train.py:715] (7/8) Epoch 14, batch 33600, loss[loss=0.1288, simple_loss=0.2004, pruned_loss=0.02864, over 4861.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.02998, over 971291.79 frames.], batch size: 20, lr: 1.54e-04 +2022-05-08 06:24:14,054 INFO [train.py:715] (7/8) Epoch 14, batch 33650, loss[loss=0.1116, simple_loss=0.19, pruned_loss=0.0166, over 4786.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03004, over 970997.87 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 06:24:54,975 INFO [train.py:715] (7/8) Epoch 14, batch 33700, loss[loss=0.1491, simple_loss=0.2216, pruned_loss=0.03833, over 4984.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02986, over 970806.90 frames.], batch size: 28, lr: 1.54e-04 +2022-05-08 06:25:35,120 INFO [train.py:715] (7/8) Epoch 14, batch 33750, loss[loss=0.1245, simple_loss=0.1915, pruned_loss=0.02876, over 4738.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03001, over 970972.63 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:26:15,688 INFO [train.py:715] (7/8) Epoch 14, batch 33800, loss[loss=0.1447, simple_loss=0.2163, pruned_loss=0.03651, over 4968.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03011, over 971612.52 frames.], batch size: 35, lr: 1.54e-04 +2022-05-08 06:26:56,943 INFO [train.py:715] (7/8) Epoch 14, batch 33850, loss[loss=0.1588, simple_loss=0.216, pruned_loss=0.05077, over 4767.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03011, over 972003.38 frames.], batch size: 14, lr: 1.54e-04 +2022-05-08 06:27:37,014 INFO [train.py:715] (7/8) Epoch 14, batch 33900, loss[loss=0.1432, simple_loss=0.213, pruned_loss=0.03672, over 4929.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.0296, over 972591.58 frames.], batch size: 39, lr: 1.54e-04 +2022-05-08 06:28:17,554 INFO [train.py:715] (7/8) Epoch 14, batch 33950, loss[loss=0.1121, simple_loss=0.1858, pruned_loss=0.01924, over 4823.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02966, over 972591.23 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 06:28:58,274 INFO [train.py:715] (7/8) Epoch 14, batch 34000, loss[loss=0.1224, simple_loss=0.2088, pruned_loss=0.01797, over 4918.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03013, over 972976.98 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 06:29:39,257 INFO [train.py:715] (7/8) Epoch 14, batch 34050, loss[loss=0.1115, simple_loss=0.1783, pruned_loss=0.02238, over 4850.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03014, over 973189.34 frames.], batch size: 13, lr: 1.54e-04 +2022-05-08 06:30:19,214 INFO [train.py:715] (7/8) Epoch 14, batch 34100, loss[loss=0.129, simple_loss=0.1982, pruned_loss=0.0299, over 4916.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03026, over 972959.99 frames.], batch size: 18, lr: 1.54e-04 +2022-05-08 06:30:59,701 INFO [train.py:715] (7/8) Epoch 14, batch 34150, loss[loss=0.1359, simple_loss=0.2077, pruned_loss=0.03209, over 4749.00 frames.], tot_loss[loss=0.1352, simple_loss=0.209, pruned_loss=0.0307, over 972393.79 frames.], batch size: 16, lr: 1.54e-04 +2022-05-08 06:31:40,135 INFO [train.py:715] (7/8) Epoch 14, batch 34200, loss[loss=0.1442, simple_loss=0.2225, pruned_loss=0.033, over 4815.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2092, pruned_loss=0.03092, over 971658.33 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 06:32:20,302 INFO [train.py:715] (7/8) Epoch 14, batch 34250, loss[loss=0.1173, simple_loss=0.2053, pruned_loss=0.01464, over 4824.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03069, over 971288.17 frames.], batch size: 26, lr: 1.54e-04 +2022-05-08 06:33:00,840 INFO [train.py:715] (7/8) Epoch 14, batch 34300, loss[loss=0.1232, simple_loss=0.2096, pruned_loss=0.01838, over 4922.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03011, over 971320.54 frames.], batch size: 39, lr: 1.54e-04 +2022-05-08 06:33:41,488 INFO [train.py:715] (7/8) Epoch 14, batch 34350, loss[loss=0.1305, simple_loss=0.2066, pruned_loss=0.02718, over 4808.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.0305, over 971285.89 frames.], batch size: 26, lr: 1.54e-04 +2022-05-08 06:34:22,187 INFO [train.py:715] (7/8) Epoch 14, batch 34400, loss[loss=0.1624, simple_loss=0.2237, pruned_loss=0.05057, over 4856.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.0309, over 972224.66 frames.], batch size: 32, lr: 1.54e-04 +2022-05-08 06:35:01,775 INFO [train.py:715] (7/8) Epoch 14, batch 34450, loss[loss=0.1123, simple_loss=0.1903, pruned_loss=0.01712, over 4945.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2081, pruned_loss=0.03067, over 972217.36 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 06:35:42,632 INFO [train.py:715] (7/8) Epoch 14, batch 34500, loss[loss=0.169, simple_loss=0.2296, pruned_loss=0.05426, over 4833.00 frames.], tot_loss[loss=0.135, simple_loss=0.2083, pruned_loss=0.03082, over 972042.65 frames.], batch size: 32, lr: 1.54e-04 +2022-05-08 06:36:23,338 INFO [train.py:715] (7/8) Epoch 14, batch 34550, loss[loss=0.1373, simple_loss=0.2144, pruned_loss=0.0301, over 4811.00 frames.], tot_loss[loss=0.135, simple_loss=0.2084, pruned_loss=0.03077, over 971995.64 frames.], batch size: 27, lr: 1.54e-04 +2022-05-08 06:37:03,435 INFO [train.py:715] (7/8) Epoch 14, batch 34600, loss[loss=0.1255, simple_loss=0.1864, pruned_loss=0.03232, over 4875.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03073, over 971405.35 frames.], batch size: 32, lr: 1.54e-04 +2022-05-08 06:37:43,655 INFO [train.py:715] (7/8) Epoch 14, batch 34650, loss[loss=0.1281, simple_loss=0.2036, pruned_loss=0.02627, over 4758.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.03068, over 971854.76 frames.], batch size: 17, lr: 1.54e-04 +2022-05-08 06:38:24,401 INFO [train.py:715] (7/8) Epoch 14, batch 34700, loss[loss=0.1393, simple_loss=0.2126, pruned_loss=0.03295, over 4691.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2082, pruned_loss=0.03065, over 972252.35 frames.], batch size: 15, lr: 1.54e-04 +2022-05-08 06:39:03,272 INFO [train.py:715] (7/8) Epoch 14, batch 34750, loss[loss=0.133, simple_loss=0.2028, pruned_loss=0.03158, over 4934.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03053, over 972511.46 frames.], batch size: 21, lr: 1.54e-04 +2022-05-08 06:39:40,049 INFO [train.py:715] (7/8) Epoch 14, batch 34800, loss[loss=0.1725, simple_loss=0.2488, pruned_loss=0.04805, over 4896.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03025, over 972451.84 frames.], batch size: 19, lr: 1.54e-04 +2022-05-08 06:40:33,624 INFO [train.py:715] (7/8) Epoch 15, batch 0, loss[loss=0.1298, simple_loss=0.2077, pruned_loss=0.02591, over 4819.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2077, pruned_loss=0.02591, over 4819.00 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 06:41:12,925 INFO [train.py:715] (7/8) Epoch 15, batch 50, loss[loss=0.1388, simple_loss=0.213, pruned_loss=0.03228, over 4849.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2068, pruned_loss=0.03016, over 219300.37 frames.], batch size: 20, lr: 1.49e-04 +2022-05-08 06:41:54,171 INFO [train.py:715] (7/8) Epoch 15, batch 100, loss[loss=0.1251, simple_loss=0.1971, pruned_loss=0.02652, over 4759.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2076, pruned_loss=0.03041, over 386947.95 frames.], batch size: 19, lr: 1.49e-04 +2022-05-08 06:42:35,662 INFO [train.py:715] (7/8) Epoch 15, batch 150, loss[loss=0.1578, simple_loss=0.2278, pruned_loss=0.04386, over 4871.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02979, over 517376.44 frames.], batch size: 32, lr: 1.49e-04 +2022-05-08 06:43:15,922 INFO [train.py:715] (7/8) Epoch 15, batch 200, loss[loss=0.1378, simple_loss=0.1971, pruned_loss=0.03925, over 4858.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2072, pruned_loss=0.0301, over 619062.99 frames.], batch size: 32, lr: 1.49e-04 +2022-05-08 06:43:56,391 INFO [train.py:715] (7/8) Epoch 15, batch 250, loss[loss=0.1419, simple_loss=0.2239, pruned_loss=0.02996, over 4824.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2072, pruned_loss=0.02999, over 697239.28 frames.], batch size: 26, lr: 1.49e-04 +2022-05-08 06:44:37,772 INFO [train.py:715] (7/8) Epoch 15, batch 300, loss[loss=0.1262, simple_loss=0.1972, pruned_loss=0.0276, over 4728.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2074, pruned_loss=0.03024, over 757669.77 frames.], batch size: 16, lr: 1.49e-04 +2022-05-08 06:45:18,789 INFO [train.py:715] (7/8) Epoch 15, batch 350, loss[loss=0.1124, simple_loss=0.1776, pruned_loss=0.02356, over 4828.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2076, pruned_loss=0.03052, over 804955.70 frames.], batch size: 13, lr: 1.49e-04 +2022-05-08 06:45:58,478 INFO [train.py:715] (7/8) Epoch 15, batch 400, loss[loss=0.1331, simple_loss=0.206, pruned_loss=0.03011, over 4805.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2081, pruned_loss=0.03046, over 841537.11 frames.], batch size: 14, lr: 1.49e-04 +2022-05-08 06:46:39,371 INFO [train.py:715] (7/8) Epoch 15, batch 450, loss[loss=0.143, simple_loss=0.2182, pruned_loss=0.0339, over 4805.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2091, pruned_loss=0.03096, over 871109.30 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 06:47:20,105 INFO [train.py:715] (7/8) Epoch 15, batch 500, loss[loss=0.1168, simple_loss=0.192, pruned_loss=0.02081, over 4768.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03089, over 894239.31 frames.], batch size: 18, lr: 1.49e-04 +2022-05-08 06:48:00,515 INFO [train.py:715] (7/8) Epoch 15, batch 550, loss[loss=0.1477, simple_loss=0.2211, pruned_loss=0.03722, over 4874.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2078, pruned_loss=0.03052, over 911487.68 frames.], batch size: 16, lr: 1.49e-04 +2022-05-08 06:48:40,069 INFO [train.py:715] (7/8) Epoch 15, batch 600, loss[loss=0.1085, simple_loss=0.1819, pruned_loss=0.01749, over 4795.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2075, pruned_loss=0.03048, over 925003.28 frames.], batch size: 12, lr: 1.49e-04 +2022-05-08 06:49:21,147 INFO [train.py:715] (7/8) Epoch 15, batch 650, loss[loss=0.1801, simple_loss=0.2487, pruned_loss=0.05579, over 4975.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2092, pruned_loss=0.031, over 936040.85 frames.], batch size: 14, lr: 1.49e-04 +2022-05-08 06:50:01,510 INFO [train.py:715] (7/8) Epoch 15, batch 700, loss[loss=0.1199, simple_loss=0.1912, pruned_loss=0.02432, over 4783.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03081, over 943811.83 frames.], batch size: 17, lr: 1.49e-04 +2022-05-08 06:50:41,532 INFO [train.py:715] (7/8) Epoch 15, batch 750, loss[loss=0.1286, simple_loss=0.1977, pruned_loss=0.02974, over 4951.00 frames.], tot_loss[loss=0.1364, simple_loss=0.2099, pruned_loss=0.03148, over 950878.65 frames.], batch size: 23, lr: 1.49e-04 +2022-05-08 06:51:22,009 INFO [train.py:715] (7/8) Epoch 15, batch 800, loss[loss=0.1658, simple_loss=0.2364, pruned_loss=0.04758, over 4943.00 frames.], tot_loss[loss=0.1362, simple_loss=0.2097, pruned_loss=0.03132, over 955927.26 frames.], batch size: 39, lr: 1.49e-04 +2022-05-08 06:52:02,791 INFO [train.py:715] (7/8) Epoch 15, batch 850, loss[loss=0.1121, simple_loss=0.1888, pruned_loss=0.01767, over 4858.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.03071, over 959935.72 frames.], batch size: 13, lr: 1.49e-04 +2022-05-08 06:52:43,867 INFO [train.py:715] (7/8) Epoch 15, batch 900, loss[loss=0.1226, simple_loss=0.2075, pruned_loss=0.01881, over 4938.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03001, over 962277.16 frames.], batch size: 23, lr: 1.49e-04 +2022-05-08 06:53:23,532 INFO [train.py:715] (7/8) Epoch 15, batch 950, loss[loss=0.118, simple_loss=0.1928, pruned_loss=0.02158, over 4912.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02976, over 964376.04 frames.], batch size: 22, lr: 1.49e-04 +2022-05-08 06:54:04,078 INFO [train.py:715] (7/8) Epoch 15, batch 1000, loss[loss=0.152, simple_loss=0.2186, pruned_loss=0.04268, over 4983.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.0307, over 967026.20 frames.], batch size: 14, lr: 1.49e-04 +2022-05-08 06:54:44,305 INFO [train.py:715] (7/8) Epoch 15, batch 1050, loss[loss=0.1507, simple_loss=0.2225, pruned_loss=0.03942, over 4893.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03085, over 968728.53 frames.], batch size: 19, lr: 1.49e-04 +2022-05-08 06:55:23,592 INFO [train.py:715] (7/8) Epoch 15, batch 1100, loss[loss=0.1643, simple_loss=0.2385, pruned_loss=0.04511, over 4781.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03056, over 969721.66 frames.], batch size: 18, lr: 1.49e-04 +2022-05-08 06:56:04,692 INFO [train.py:715] (7/8) Epoch 15, batch 1150, loss[loss=0.1755, simple_loss=0.2346, pruned_loss=0.05817, over 4919.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.0308, over 970674.52 frames.], batch size: 17, lr: 1.49e-04 +2022-05-08 06:56:45,830 INFO [train.py:715] (7/8) Epoch 15, batch 1200, loss[loss=0.1025, simple_loss=0.1687, pruned_loss=0.01822, over 4690.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03007, over 970267.89 frames.], batch size: 12, lr: 1.49e-04 +2022-05-08 06:57:26,543 INFO [train.py:715] (7/8) Epoch 15, batch 1250, loss[loss=0.1765, simple_loss=0.2345, pruned_loss=0.0593, over 4758.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03012, over 971036.66 frames.], batch size: 16, lr: 1.49e-04 +2022-05-08 06:58:06,007 INFO [train.py:715] (7/8) Epoch 15, batch 1300, loss[loss=0.1265, simple_loss=0.2088, pruned_loss=0.02214, over 4822.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02983, over 971758.98 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 06:58:46,691 INFO [train.py:715] (7/8) Epoch 15, batch 1350, loss[loss=0.137, simple_loss=0.2128, pruned_loss=0.03061, over 4866.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02996, over 971723.81 frames.], batch size: 20, lr: 1.49e-04 +2022-05-08 06:59:27,348 INFO [train.py:715] (7/8) Epoch 15, batch 1400, loss[loss=0.1534, simple_loss=0.2295, pruned_loss=0.03862, over 4786.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2086, pruned_loss=0.03014, over 972040.04 frames.], batch size: 17, lr: 1.49e-04 +2022-05-08 07:00:07,540 INFO [train.py:715] (7/8) Epoch 15, batch 1450, loss[loss=0.1273, simple_loss=0.1994, pruned_loss=0.02765, over 4932.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03004, over 972067.92 frames.], batch size: 23, lr: 1.49e-04 +2022-05-08 07:00:47,345 INFO [train.py:715] (7/8) Epoch 15, batch 1500, loss[loss=0.1288, simple_loss=0.199, pruned_loss=0.02933, over 4796.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03008, over 972305.72 frames.], batch size: 17, lr: 1.49e-04 +2022-05-08 07:01:28,520 INFO [train.py:715] (7/8) Epoch 15, batch 1550, loss[loss=0.134, simple_loss=0.2097, pruned_loss=0.02913, over 4957.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03043, over 971572.36 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 07:02:08,747 INFO [train.py:715] (7/8) Epoch 15, batch 1600, loss[loss=0.132, simple_loss=0.2115, pruned_loss=0.02621, over 4937.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03028, over 971953.53 frames.], batch size: 23, lr: 1.49e-04 +2022-05-08 07:02:47,780 INFO [train.py:715] (7/8) Epoch 15, batch 1650, loss[loss=0.1313, simple_loss=0.2084, pruned_loss=0.0271, over 4922.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2089, pruned_loss=0.02997, over 972459.22 frames.], batch size: 23, lr: 1.49e-04 +2022-05-08 07:03:28,307 INFO [train.py:715] (7/8) Epoch 15, batch 1700, loss[loss=0.1344, simple_loss=0.2117, pruned_loss=0.02855, over 4817.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02975, over 972170.49 frames.], batch size: 27, lr: 1.49e-04 +2022-05-08 07:04:08,889 INFO [train.py:715] (7/8) Epoch 15, batch 1750, loss[loss=0.1393, simple_loss=0.2121, pruned_loss=0.03324, over 4917.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02974, over 972544.18 frames.], batch size: 23, lr: 1.49e-04 +2022-05-08 07:04:48,968 INFO [train.py:715] (7/8) Epoch 15, batch 1800, loss[loss=0.103, simple_loss=0.1861, pruned_loss=0.009994, over 4947.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02963, over 973009.34 frames.], batch size: 21, lr: 1.49e-04 +2022-05-08 07:05:28,947 INFO [train.py:715] (7/8) Epoch 15, batch 1850, loss[loss=0.1242, simple_loss=0.1977, pruned_loss=0.02537, over 4894.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2087, pruned_loss=0.02997, over 972962.57 frames.], batch size: 18, lr: 1.49e-04 +2022-05-08 07:06:09,788 INFO [train.py:715] (7/8) Epoch 15, batch 1900, loss[loss=0.1241, simple_loss=0.2027, pruned_loss=0.02269, over 4804.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2087, pruned_loss=0.03, over 972695.45 frames.], batch size: 21, lr: 1.49e-04 +2022-05-08 07:06:50,240 INFO [train.py:715] (7/8) Epoch 15, batch 1950, loss[loss=0.138, simple_loss=0.2194, pruned_loss=0.02826, over 4884.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02984, over 973285.48 frames.], batch size: 22, lr: 1.49e-04 +2022-05-08 07:07:29,416 INFO [train.py:715] (7/8) Epoch 15, batch 2000, loss[loss=0.1433, simple_loss=0.2151, pruned_loss=0.03581, over 4855.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03036, over 972658.54 frames.], batch size: 30, lr: 1.49e-04 +2022-05-08 07:08:10,506 INFO [train.py:715] (7/8) Epoch 15, batch 2050, loss[loss=0.1353, simple_loss=0.2147, pruned_loss=0.0279, over 4884.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2093, pruned_loss=0.03025, over 972226.40 frames.], batch size: 22, lr: 1.49e-04 +2022-05-08 07:08:50,819 INFO [train.py:715] (7/8) Epoch 15, batch 2100, loss[loss=0.1403, simple_loss=0.2149, pruned_loss=0.03286, over 4807.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02992, over 972029.91 frames.], batch size: 21, lr: 1.49e-04 +2022-05-08 07:09:30,728 INFO [train.py:715] (7/8) Epoch 15, batch 2150, loss[loss=0.1535, simple_loss=0.2265, pruned_loss=0.04022, over 4916.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03013, over 972828.65 frames.], batch size: 29, lr: 1.49e-04 +2022-05-08 07:10:10,981 INFO [train.py:715] (7/8) Epoch 15, batch 2200, loss[loss=0.1452, simple_loss=0.2331, pruned_loss=0.02865, over 4943.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2087, pruned_loss=0.02997, over 973663.50 frames.], batch size: 29, lr: 1.49e-04 +2022-05-08 07:10:51,408 INFO [train.py:715] (7/8) Epoch 15, batch 2250, loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02907, over 4874.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.02993, over 972980.38 frames.], batch size: 16, lr: 1.49e-04 +2022-05-08 07:11:31,553 INFO [train.py:715] (7/8) Epoch 15, batch 2300, loss[loss=0.1325, simple_loss=0.2092, pruned_loss=0.02791, over 4913.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2072, pruned_loss=0.02963, over 972014.11 frames.], batch size: 17, lr: 1.49e-04 +2022-05-08 07:12:11,049 INFO [train.py:715] (7/8) Epoch 15, batch 2350, loss[loss=0.1289, simple_loss=0.207, pruned_loss=0.02538, over 4839.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.0297, over 972931.37 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 07:12:51,321 INFO [train.py:715] (7/8) Epoch 15, batch 2400, loss[loss=0.1183, simple_loss=0.1923, pruned_loss=0.02216, over 4884.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2083, pruned_loss=0.03057, over 972282.10 frames.], batch size: 22, lr: 1.49e-04 +2022-05-08 07:13:31,564 INFO [train.py:715] (7/8) Epoch 15, batch 2450, loss[loss=0.159, simple_loss=0.2275, pruned_loss=0.04531, over 4974.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03026, over 973291.00 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 07:14:11,492 INFO [train.py:715] (7/8) Epoch 15, batch 2500, loss[loss=0.1423, simple_loss=0.2117, pruned_loss=0.0364, over 4886.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.0303, over 972561.31 frames.], batch size: 32, lr: 1.49e-04 +2022-05-08 07:14:50,610 INFO [train.py:715] (7/8) Epoch 15, batch 2550, loss[loss=0.1438, simple_loss=0.2262, pruned_loss=0.03066, over 4807.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2089, pruned_loss=0.0308, over 972531.27 frames.], batch size: 25, lr: 1.49e-04 +2022-05-08 07:15:31,417 INFO [train.py:715] (7/8) Epoch 15, batch 2600, loss[loss=0.144, simple_loss=0.2252, pruned_loss=0.03137, over 4812.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.03063, over 973438.61 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 07:16:12,108 INFO [train.py:715] (7/8) Epoch 15, batch 2650, loss[loss=0.1115, simple_loss=0.192, pruned_loss=0.01549, over 4980.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02994, over 973167.19 frames.], batch size: 28, lr: 1.49e-04 +2022-05-08 07:16:51,600 INFO [train.py:715] (7/8) Epoch 15, batch 2700, loss[loss=0.1149, simple_loss=0.1855, pruned_loss=0.02212, over 4856.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03025, over 973487.42 frames.], batch size: 20, lr: 1.49e-04 +2022-05-08 07:17:33,120 INFO [train.py:715] (7/8) Epoch 15, batch 2750, loss[loss=0.1454, simple_loss=0.2247, pruned_loss=0.03302, over 4881.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03033, over 972890.51 frames.], batch size: 22, lr: 1.49e-04 +2022-05-08 07:18:14,191 INFO [train.py:715] (7/8) Epoch 15, batch 2800, loss[loss=0.1465, simple_loss=0.2195, pruned_loss=0.03675, over 4771.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2092, pruned_loss=0.03082, over 973012.14 frames.], batch size: 14, lr: 1.49e-04 +2022-05-08 07:18:54,892 INFO [train.py:715] (7/8) Epoch 15, batch 2850, loss[loss=0.1622, simple_loss=0.2455, pruned_loss=0.0395, over 4789.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03025, over 973048.51 frames.], batch size: 21, lr: 1.49e-04 +2022-05-08 07:19:34,219 INFO [train.py:715] (7/8) Epoch 15, batch 2900, loss[loss=0.137, simple_loss=0.2127, pruned_loss=0.03064, over 4878.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2088, pruned_loss=0.03001, over 972585.01 frames.], batch size: 16, lr: 1.49e-04 +2022-05-08 07:20:14,835 INFO [train.py:715] (7/8) Epoch 15, batch 2950, loss[loss=0.1154, simple_loss=0.1819, pruned_loss=0.02448, over 4976.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03006, over 971672.10 frames.], batch size: 14, lr: 1.49e-04 +2022-05-08 07:20:55,621 INFO [train.py:715] (7/8) Epoch 15, batch 3000, loss[loss=0.1323, simple_loss=0.1995, pruned_loss=0.03255, over 4853.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03024, over 971112.13 frames.], batch size: 32, lr: 1.49e-04 +2022-05-08 07:20:55,622 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 07:21:13,097 INFO [train.py:742] (7/8) Epoch 15, validation: loss=0.1049, simple_loss=0.1887, pruned_loss=0.01057, over 914524.00 frames. +2022-05-08 07:21:54,025 INFO [train.py:715] (7/8) Epoch 15, batch 3050, loss[loss=0.1501, simple_loss=0.2258, pruned_loss=0.03721, over 4761.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2085, pruned_loss=0.02981, over 971996.68 frames.], batch size: 14, lr: 1.49e-04 +2022-05-08 07:22:33,942 INFO [train.py:715] (7/8) Epoch 15, batch 3100, loss[loss=0.1093, simple_loss=0.184, pruned_loss=0.01729, over 4925.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02984, over 971778.75 frames.], batch size: 29, lr: 1.49e-04 +2022-05-08 07:23:14,660 INFO [train.py:715] (7/8) Epoch 15, batch 3150, loss[loss=0.1214, simple_loss=0.2, pruned_loss=0.02134, over 4825.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.0299, over 970978.98 frames.], batch size: 15, lr: 1.49e-04 +2022-05-08 07:23:55,205 INFO [train.py:715] (7/8) Epoch 15, batch 3200, loss[loss=0.149, simple_loss=0.2284, pruned_loss=0.03477, over 4961.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2083, pruned_loss=0.02953, over 972117.80 frames.], batch size: 39, lr: 1.49e-04 +2022-05-08 07:24:35,385 INFO [train.py:715] (7/8) Epoch 15, batch 3250, loss[loss=0.1409, simple_loss=0.2297, pruned_loss=0.026, over 4928.00 frames.], tot_loss[loss=0.134, simple_loss=0.2089, pruned_loss=0.02958, over 972529.02 frames.], batch size: 23, lr: 1.49e-04 +2022-05-08 07:25:15,327 INFO [train.py:715] (7/8) Epoch 15, batch 3300, loss[loss=0.1214, simple_loss=0.1915, pruned_loss=0.02567, over 4827.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2092, pruned_loss=0.03009, over 971934.20 frames.], batch size: 26, lr: 1.49e-04 +2022-05-08 07:25:56,115 INFO [train.py:715] (7/8) Epoch 15, batch 3350, loss[loss=0.1257, simple_loss=0.1931, pruned_loss=0.02915, over 4862.00 frames.], tot_loss[loss=0.1346, simple_loss=0.209, pruned_loss=0.03005, over 972462.36 frames.], batch size: 30, lr: 1.49e-04 +2022-05-08 07:26:36,444 INFO [train.py:715] (7/8) Epoch 15, batch 3400, loss[loss=0.1365, simple_loss=0.2091, pruned_loss=0.03195, over 4975.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2083, pruned_loss=0.02972, over 972291.50 frames.], batch size: 24, lr: 1.49e-04 +2022-05-08 07:27:16,667 INFO [train.py:715] (7/8) Epoch 15, batch 3450, loss[loss=0.1156, simple_loss=0.1965, pruned_loss=0.01739, over 4916.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2086, pruned_loss=0.02961, over 971819.33 frames.], batch size: 29, lr: 1.49e-04 +2022-05-08 07:27:56,932 INFO [train.py:715] (7/8) Epoch 15, batch 3500, loss[loss=0.1229, simple_loss=0.2053, pruned_loss=0.02024, over 4874.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2082, pruned_loss=0.02943, over 971742.85 frames.], batch size: 32, lr: 1.49e-04 +2022-05-08 07:28:37,349 INFO [train.py:715] (7/8) Epoch 15, batch 3550, loss[loss=0.1464, simple_loss=0.2203, pruned_loss=0.03623, over 4905.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2083, pruned_loss=0.02947, over 970753.81 frames.], batch size: 29, lr: 1.49e-04 +2022-05-08 07:29:17,844 INFO [train.py:715] (7/8) Epoch 15, batch 3600, loss[loss=0.1345, simple_loss=0.2033, pruned_loss=0.0329, over 4906.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03015, over 970678.34 frames.], batch size: 17, lr: 1.49e-04 +2022-05-08 07:29:57,642 INFO [train.py:715] (7/8) Epoch 15, batch 3650, loss[loss=0.1213, simple_loss=0.1941, pruned_loss=0.02423, over 4781.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03006, over 970226.02 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 07:30:38,283 INFO [train.py:715] (7/8) Epoch 15, batch 3700, loss[loss=0.1628, simple_loss=0.2371, pruned_loss=0.04421, over 4924.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02965, over 971691.88 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 07:31:19,135 INFO [train.py:715] (7/8) Epoch 15, batch 3750, loss[loss=0.1605, simple_loss=0.2342, pruned_loss=0.04346, over 4862.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.02925, over 971673.29 frames.], batch size: 32, lr: 1.48e-04 +2022-05-08 07:31:58,800 INFO [train.py:715] (7/8) Epoch 15, batch 3800, loss[loss=0.1521, simple_loss=0.2257, pruned_loss=0.03927, over 4928.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02945, over 971973.73 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 07:32:38,804 INFO [train.py:715] (7/8) Epoch 15, batch 3850, loss[loss=0.1224, simple_loss=0.1943, pruned_loss=0.02529, over 4804.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02907, over 972169.62 frames.], batch size: 13, lr: 1.48e-04 +2022-05-08 07:33:19,085 INFO [train.py:715] (7/8) Epoch 15, batch 3900, loss[loss=0.1458, simple_loss=0.2081, pruned_loss=0.04176, over 4978.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02961, over 972005.15 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 07:33:58,254 INFO [train.py:715] (7/8) Epoch 15, batch 3950, loss[loss=0.138, simple_loss=0.2184, pruned_loss=0.02882, over 4947.00 frames.], tot_loss[loss=0.1347, simple_loss=0.209, pruned_loss=0.03019, over 972324.25 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 07:34:37,994 INFO [train.py:715] (7/8) Epoch 15, batch 4000, loss[loss=0.1352, simple_loss=0.1972, pruned_loss=0.0366, over 4852.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02994, over 973172.07 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 07:35:17,773 INFO [train.py:715] (7/8) Epoch 15, batch 4050, loss[loss=0.1473, simple_loss=0.2318, pruned_loss=0.03137, over 4754.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02971, over 973523.18 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 07:35:58,783 INFO [train.py:715] (7/8) Epoch 15, batch 4100, loss[loss=0.132, simple_loss=0.1983, pruned_loss=0.0328, over 4938.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2081, pruned_loss=0.02959, over 972974.75 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 07:36:37,614 INFO [train.py:715] (7/8) Epoch 15, batch 4150, loss[loss=0.12, simple_loss=0.2031, pruned_loss=0.01842, over 4931.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02967, over 973655.20 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 07:37:17,779 INFO [train.py:715] (7/8) Epoch 15, batch 4200, loss[loss=0.1259, simple_loss=0.2067, pruned_loss=0.02254, over 4819.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2085, pruned_loss=0.02986, over 972918.14 frames.], batch size: 26, lr: 1.48e-04 +2022-05-08 07:37:58,204 INFO [train.py:715] (7/8) Epoch 15, batch 4250, loss[loss=0.1291, simple_loss=0.2012, pruned_loss=0.02851, over 4873.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03039, over 972706.52 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 07:38:38,206 INFO [train.py:715] (7/8) Epoch 15, batch 4300, loss[loss=0.1707, simple_loss=0.2468, pruned_loss=0.04727, over 4736.00 frames.], tot_loss[loss=0.1347, simple_loss=0.209, pruned_loss=0.03023, over 972480.37 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 07:39:18,228 INFO [train.py:715] (7/8) Epoch 15, batch 4350, loss[loss=0.1408, simple_loss=0.2109, pruned_loss=0.03537, over 4899.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2088, pruned_loss=0.02995, over 972646.42 frames.], batch size: 22, lr: 1.48e-04 +2022-05-08 07:39:58,272 INFO [train.py:715] (7/8) Epoch 15, batch 4400, loss[loss=0.1399, simple_loss=0.2135, pruned_loss=0.03316, over 4900.00 frames.], tot_loss[loss=0.134, simple_loss=0.2085, pruned_loss=0.02977, over 973017.95 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 07:40:38,808 INFO [train.py:715] (7/8) Epoch 15, batch 4450, loss[loss=0.1401, simple_loss=0.2087, pruned_loss=0.03573, over 4966.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03046, over 972494.69 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 07:41:18,472 INFO [train.py:715] (7/8) Epoch 15, batch 4500, loss[loss=0.1147, simple_loss=0.1996, pruned_loss=0.01493, over 4899.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03036, over 972264.66 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 07:41:58,885 INFO [train.py:715] (7/8) Epoch 15, batch 4550, loss[loss=0.1446, simple_loss=0.2253, pruned_loss=0.03197, over 4894.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03014, over 973114.89 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 07:42:39,503 INFO [train.py:715] (7/8) Epoch 15, batch 4600, loss[loss=0.1798, simple_loss=0.2341, pruned_loss=0.06275, over 4962.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03001, over 972914.95 frames.], batch size: 39, lr: 1.48e-04 +2022-05-08 07:43:19,672 INFO [train.py:715] (7/8) Epoch 15, batch 4650, loss[loss=0.1207, simple_loss=0.1967, pruned_loss=0.02238, over 4810.00 frames.], tot_loss[loss=0.1347, simple_loss=0.209, pruned_loss=0.03018, over 973208.03 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 07:43:59,058 INFO [train.py:715] (7/8) Epoch 15, batch 4700, loss[loss=0.1434, simple_loss=0.2156, pruned_loss=0.03559, over 4690.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03024, over 973613.70 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 07:44:39,327 INFO [train.py:715] (7/8) Epoch 15, batch 4750, loss[loss=0.1412, simple_loss=0.2289, pruned_loss=0.0268, over 4890.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03047, over 973672.26 frames.], batch size: 39, lr: 1.48e-04 +2022-05-08 07:45:20,568 INFO [train.py:715] (7/8) Epoch 15, batch 4800, loss[loss=0.1799, simple_loss=0.2587, pruned_loss=0.05048, over 4919.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03011, over 974102.25 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 07:46:00,537 INFO [train.py:715] (7/8) Epoch 15, batch 4850, loss[loss=0.1239, simple_loss=0.1978, pruned_loss=0.02501, over 4931.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03001, over 973718.43 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 07:46:41,241 INFO [train.py:715] (7/8) Epoch 15, batch 4900, loss[loss=0.1329, simple_loss=0.2015, pruned_loss=0.03213, over 4858.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03034, over 973155.76 frames.], batch size: 32, lr: 1.48e-04 +2022-05-08 07:47:21,682 INFO [train.py:715] (7/8) Epoch 15, batch 4950, loss[loss=0.1204, simple_loss=0.1838, pruned_loss=0.02847, over 4838.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2068, pruned_loss=0.02989, over 973006.21 frames.], batch size: 13, lr: 1.48e-04 +2022-05-08 07:48:02,262 INFO [train.py:715] (7/8) Epoch 15, batch 5000, loss[loss=0.1441, simple_loss=0.2214, pruned_loss=0.03341, over 4955.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2071, pruned_loss=0.03005, over 973195.28 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 07:48:41,751 INFO [train.py:715] (7/8) Epoch 15, batch 5050, loss[loss=0.1329, simple_loss=0.2121, pruned_loss=0.02691, over 4823.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03034, over 973869.20 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 07:49:21,836 INFO [train.py:715] (7/8) Epoch 15, batch 5100, loss[loss=0.1441, simple_loss=0.2178, pruned_loss=0.03517, over 4979.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03075, over 974653.96 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 07:50:02,145 INFO [train.py:715] (7/8) Epoch 15, batch 5150, loss[loss=0.1384, simple_loss=0.2179, pruned_loss=0.0295, over 4751.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03025, over 974256.12 frames.], batch size: 12, lr: 1.48e-04 +2022-05-08 07:50:42,065 INFO [train.py:715] (7/8) Epoch 15, batch 5200, loss[loss=0.1452, simple_loss=0.2264, pruned_loss=0.03195, over 4851.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03023, over 974332.08 frames.], batch size: 30, lr: 1.48e-04 +2022-05-08 07:51:22,088 INFO [train.py:715] (7/8) Epoch 15, batch 5250, loss[loss=0.1336, simple_loss=0.2064, pruned_loss=0.03038, over 4911.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03003, over 973537.57 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 07:52:03,620 INFO [train.py:715] (7/8) Epoch 15, batch 5300, loss[loss=0.1312, simple_loss=0.211, pruned_loss=0.0257, over 4910.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03028, over 973861.32 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 07:52:45,861 INFO [train.py:715] (7/8) Epoch 15, batch 5350, loss[loss=0.1347, simple_loss=0.2174, pruned_loss=0.02595, over 4824.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03032, over 973157.44 frames.], batch size: 27, lr: 1.48e-04 +2022-05-08 07:53:26,861 INFO [train.py:715] (7/8) Epoch 15, batch 5400, loss[loss=0.1579, simple_loss=0.2347, pruned_loss=0.04053, over 4974.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03015, over 973266.17 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 07:54:08,832 INFO [train.py:715] (7/8) Epoch 15, batch 5450, loss[loss=0.1265, simple_loss=0.1918, pruned_loss=0.03058, over 4738.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.0304, over 972504.73 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 07:54:50,465 INFO [train.py:715] (7/8) Epoch 15, batch 5500, loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02861, over 4767.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2096, pruned_loss=0.03046, over 972242.85 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 07:55:32,152 INFO [train.py:715] (7/8) Epoch 15, batch 5550, loss[loss=0.1447, simple_loss=0.2165, pruned_loss=0.03647, over 4954.00 frames.], tot_loss[loss=0.1346, simple_loss=0.209, pruned_loss=0.03013, over 973140.65 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 07:56:12,916 INFO [train.py:715] (7/8) Epoch 15, batch 5600, loss[loss=0.1688, simple_loss=0.2429, pruned_loss=0.0474, over 4828.00 frames.], tot_loss[loss=0.135, simple_loss=0.2094, pruned_loss=0.03031, over 973819.90 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 07:56:54,784 INFO [train.py:715] (7/8) Epoch 15, batch 5650, loss[loss=0.1185, simple_loss=0.201, pruned_loss=0.01803, over 4888.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03043, over 972062.21 frames.], batch size: 22, lr: 1.48e-04 +2022-05-08 07:57:37,295 INFO [train.py:715] (7/8) Epoch 15, batch 5700, loss[loss=0.1166, simple_loss=0.1926, pruned_loss=0.02028, over 4939.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02975, over 972618.23 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 07:58:18,519 INFO [train.py:715] (7/8) Epoch 15, batch 5750, loss[loss=0.1486, simple_loss=0.2318, pruned_loss=0.03266, over 4977.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02972, over 972826.98 frames.], batch size: 24, lr: 1.48e-04 +2022-05-08 07:58:59,971 INFO [train.py:715] (7/8) Epoch 15, batch 5800, loss[loss=0.1282, simple_loss=0.2044, pruned_loss=0.02598, over 4825.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03001, over 973029.10 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 07:59:41,227 INFO [train.py:715] (7/8) Epoch 15, batch 5850, loss[loss=0.1207, simple_loss=0.1998, pruned_loss=0.02082, over 4937.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02976, over 973079.46 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 08:00:25,528 INFO [train.py:715] (7/8) Epoch 15, batch 5900, loss[loss=0.1209, simple_loss=0.1988, pruned_loss=0.02149, over 4821.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02961, over 972355.57 frames.], batch size: 26, lr: 1.48e-04 +2022-05-08 08:01:06,121 INFO [train.py:715] (7/8) Epoch 15, batch 5950, loss[loss=0.1323, simple_loss=0.203, pruned_loss=0.03078, over 4973.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02974, over 972553.05 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:01:47,586 INFO [train.py:715] (7/8) Epoch 15, batch 6000, loss[loss=0.1337, simple_loss=0.2041, pruned_loss=0.03163, over 4971.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2062, pruned_loss=0.02941, over 971848.40 frames.], batch size: 35, lr: 1.48e-04 +2022-05-08 08:01:47,587 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 08:01:57,158 INFO [train.py:742] (7/8) Epoch 15, validation: loss=0.1051, simple_loss=0.1887, pruned_loss=0.01077, over 914524.00 frames. +2022-05-08 08:02:38,328 INFO [train.py:715] (7/8) Epoch 15, batch 6050, loss[loss=0.1418, simple_loss=0.2181, pruned_loss=0.03278, over 4943.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2071, pruned_loss=0.02987, over 971540.06 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 08:03:20,377 INFO [train.py:715] (7/8) Epoch 15, batch 6100, loss[loss=0.1339, simple_loss=0.2169, pruned_loss=0.02545, over 4705.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03024, over 971687.38 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:04:00,135 INFO [train.py:715] (7/8) Epoch 15, batch 6150, loss[loss=0.1509, simple_loss=0.2235, pruned_loss=0.0392, over 4969.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2083, pruned_loss=0.03039, over 971662.05 frames.], batch size: 35, lr: 1.48e-04 +2022-05-08 08:04:41,010 INFO [train.py:715] (7/8) Epoch 15, batch 6200, loss[loss=0.1191, simple_loss=0.1942, pruned_loss=0.022, over 4851.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03033, over 972281.45 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 08:05:20,640 INFO [train.py:715] (7/8) Epoch 15, batch 6250, loss[loss=0.1093, simple_loss=0.1921, pruned_loss=0.01324, over 4939.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02979, over 972025.90 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:06:01,396 INFO [train.py:715] (7/8) Epoch 15, batch 6300, loss[loss=0.1461, simple_loss=0.2286, pruned_loss=0.03179, over 4767.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02968, over 971862.22 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:06:41,234 INFO [train.py:715] (7/8) Epoch 15, batch 6350, loss[loss=0.1358, simple_loss=0.2185, pruned_loss=0.02655, over 4753.00 frames.], tot_loss[loss=0.134, simple_loss=0.2085, pruned_loss=0.02973, over 971610.97 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:07:21,275 INFO [train.py:715] (7/8) Epoch 15, batch 6400, loss[loss=0.1221, simple_loss=0.1919, pruned_loss=0.02621, over 4978.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02969, over 971984.53 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 08:08:01,848 INFO [train.py:715] (7/8) Epoch 15, batch 6450, loss[loss=0.1486, simple_loss=0.2258, pruned_loss=0.03571, over 4914.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02947, over 971706.02 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 08:08:41,389 INFO [train.py:715] (7/8) Epoch 15, batch 6500, loss[loss=0.1239, simple_loss=0.1902, pruned_loss=0.02885, over 4849.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.0296, over 971384.13 frames.], batch size: 13, lr: 1.48e-04 +2022-05-08 08:09:21,820 INFO [train.py:715] (7/8) Epoch 15, batch 6550, loss[loss=0.1652, simple_loss=0.2504, pruned_loss=0.04002, over 4902.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02999, over 970909.35 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 08:10:02,133 INFO [train.py:715] (7/8) Epoch 15, batch 6600, loss[loss=0.115, simple_loss=0.1921, pruned_loss=0.01898, over 4808.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02963, over 971309.18 frames.], batch size: 13, lr: 1.48e-04 +2022-05-08 08:10:42,817 INFO [train.py:715] (7/8) Epoch 15, batch 6650, loss[loss=0.1348, simple_loss=0.2005, pruned_loss=0.03457, over 4965.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2071, pruned_loss=0.02988, over 971683.97 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:11:22,259 INFO [train.py:715] (7/8) Epoch 15, batch 6700, loss[loss=0.1241, simple_loss=0.2056, pruned_loss=0.02128, over 4810.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2071, pruned_loss=0.02989, over 972122.55 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 08:12:02,761 INFO [train.py:715] (7/8) Epoch 15, batch 6750, loss[loss=0.1251, simple_loss=0.196, pruned_loss=0.02708, over 4813.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02973, over 972273.01 frames.], batch size: 26, lr: 1.48e-04 +2022-05-08 08:12:44,118 INFO [train.py:715] (7/8) Epoch 15, batch 6800, loss[loss=0.1432, simple_loss=0.2205, pruned_loss=0.03296, over 4911.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.0298, over 971800.62 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:13:23,951 INFO [train.py:715] (7/8) Epoch 15, batch 6850, loss[loss=0.1367, simple_loss=0.2139, pruned_loss=0.02971, over 4811.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03014, over 971703.41 frames.], batch size: 27, lr: 1.48e-04 +2022-05-08 08:14:03,536 INFO [train.py:715] (7/8) Epoch 15, batch 6900, loss[loss=0.133, simple_loss=0.1939, pruned_loss=0.03603, over 4833.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2088, pruned_loss=0.03046, over 971951.75 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:14:44,362 INFO [train.py:715] (7/8) Epoch 15, batch 6950, loss[loss=0.1484, simple_loss=0.2098, pruned_loss=0.04347, over 4979.00 frames.], tot_loss[loss=0.1361, simple_loss=0.2102, pruned_loss=0.03106, over 973074.85 frames.], batch size: 35, lr: 1.48e-04 +2022-05-08 08:15:24,998 INFO [train.py:715] (7/8) Epoch 15, batch 7000, loss[loss=0.1636, simple_loss=0.2511, pruned_loss=0.0381, over 4959.00 frames.], tot_loss[loss=0.1363, simple_loss=0.2104, pruned_loss=0.0311, over 972979.61 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 08:16:03,964 INFO [train.py:715] (7/8) Epoch 15, batch 7050, loss[loss=0.1194, simple_loss=0.1947, pruned_loss=0.02205, over 4939.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03057, over 973301.40 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:16:44,710 INFO [train.py:715] (7/8) Epoch 15, batch 7100, loss[loss=0.1433, simple_loss=0.2243, pruned_loss=0.03119, over 4989.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03043, over 972420.18 frames.], batch size: 26, lr: 1.48e-04 +2022-05-08 08:17:25,233 INFO [train.py:715] (7/8) Epoch 15, batch 7150, loss[loss=0.1317, simple_loss=0.203, pruned_loss=0.03015, over 4769.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03023, over 972198.36 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 08:18:05,135 INFO [train.py:715] (7/8) Epoch 15, batch 7200, loss[loss=0.1425, simple_loss=0.2149, pruned_loss=0.03504, over 4876.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02986, over 971879.95 frames.], batch size: 32, lr: 1.48e-04 +2022-05-08 08:18:44,342 INFO [train.py:715] (7/8) Epoch 15, batch 7250, loss[loss=0.1211, simple_loss=0.1931, pruned_loss=0.02455, over 4764.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02985, over 971906.90 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 08:19:25,090 INFO [train.py:715] (7/8) Epoch 15, batch 7300, loss[loss=0.1675, simple_loss=0.2323, pruned_loss=0.05138, over 4976.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03016, over 972711.90 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 08:20:06,076 INFO [train.py:715] (7/8) Epoch 15, batch 7350, loss[loss=0.1446, simple_loss=0.2286, pruned_loss=0.03033, over 4926.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03006, over 973299.69 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 08:20:45,520 INFO [train.py:715] (7/8) Epoch 15, batch 7400, loss[loss=0.1234, simple_loss=0.2054, pruned_loss=0.02065, over 4935.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2079, pruned_loss=0.03042, over 973458.52 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 08:21:25,990 INFO [train.py:715] (7/8) Epoch 15, batch 7450, loss[loss=0.1199, simple_loss=0.206, pruned_loss=0.0169, over 4844.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2083, pruned_loss=0.03077, over 973390.05 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 08:22:06,381 INFO [train.py:715] (7/8) Epoch 15, batch 7500, loss[loss=0.1304, simple_loss=0.204, pruned_loss=0.02842, over 4907.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2083, pruned_loss=0.03061, over 973076.07 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:22:46,670 INFO [train.py:715] (7/8) Epoch 15, batch 7550, loss[loss=0.153, simple_loss=0.2321, pruned_loss=0.03695, over 4807.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2075, pruned_loss=0.03046, over 972651.41 frames.], batch size: 24, lr: 1.48e-04 +2022-05-08 08:23:25,907 INFO [train.py:715] (7/8) Epoch 15, batch 7600, loss[loss=0.1673, simple_loss=0.2393, pruned_loss=0.04769, over 4809.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2078, pruned_loss=0.03035, over 973087.70 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 08:24:05,911 INFO [train.py:715] (7/8) Epoch 15, batch 7650, loss[loss=0.1537, simple_loss=0.2352, pruned_loss=0.03615, over 4880.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03046, over 972779.24 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:24:45,956 INFO [train.py:715] (7/8) Epoch 15, batch 7700, loss[loss=0.1126, simple_loss=0.1919, pruned_loss=0.01663, over 4784.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03013, over 972051.34 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:25:24,896 INFO [train.py:715] (7/8) Epoch 15, batch 7750, loss[loss=0.09843, simple_loss=0.1722, pruned_loss=0.01231, over 4980.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.0302, over 972445.15 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 08:26:04,532 INFO [train.py:715] (7/8) Epoch 15, batch 7800, loss[loss=0.1592, simple_loss=0.2359, pruned_loss=0.04121, over 4789.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.0306, over 971414.06 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 08:26:43,785 INFO [train.py:715] (7/8) Epoch 15, batch 7850, loss[loss=0.1394, simple_loss=0.209, pruned_loss=0.03495, over 4766.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03092, over 971370.03 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 08:27:23,764 INFO [train.py:715] (7/8) Epoch 15, batch 7900, loss[loss=0.105, simple_loss=0.1823, pruned_loss=0.01382, over 4928.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2082, pruned_loss=0.03076, over 971545.05 frames.], batch size: 29, lr: 1.48e-04 +2022-05-08 08:28:01,920 INFO [train.py:715] (7/8) Epoch 15, batch 7950, loss[loss=0.1267, simple_loss=0.2033, pruned_loss=0.02508, over 4835.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2079, pruned_loss=0.03045, over 972007.25 frames.], batch size: 32, lr: 1.48e-04 +2022-05-08 08:28:41,234 INFO [train.py:715] (7/8) Epoch 15, batch 8000, loss[loss=0.1399, simple_loss=0.2038, pruned_loss=0.03798, over 4771.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2072, pruned_loss=0.03014, over 971954.00 frames.], batch size: 12, lr: 1.48e-04 +2022-05-08 08:29:20,796 INFO [train.py:715] (7/8) Epoch 15, batch 8050, loss[loss=0.1313, simple_loss=0.2119, pruned_loss=0.0253, over 4938.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2077, pruned_loss=0.03029, over 972897.13 frames.], batch size: 35, lr: 1.48e-04 +2022-05-08 08:29:59,767 INFO [train.py:715] (7/8) Epoch 15, batch 8100, loss[loss=0.1637, simple_loss=0.232, pruned_loss=0.04771, over 4767.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.0301, over 972826.43 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:30:38,766 INFO [train.py:715] (7/8) Epoch 15, batch 8150, loss[loss=0.1406, simple_loss=0.2157, pruned_loss=0.03273, over 4922.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02978, over 972039.73 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 08:31:18,841 INFO [train.py:715] (7/8) Epoch 15, batch 8200, loss[loss=0.1347, simple_loss=0.2125, pruned_loss=0.02845, over 4859.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03028, over 971790.67 frames.], batch size: 13, lr: 1.48e-04 +2022-05-08 08:31:57,570 INFO [train.py:715] (7/8) Epoch 15, batch 8250, loss[loss=0.1364, simple_loss=0.2134, pruned_loss=0.02973, over 4870.00 frames.], tot_loss[loss=0.1344, simple_loss=0.208, pruned_loss=0.03036, over 972658.10 frames.], batch size: 22, lr: 1.48e-04 +2022-05-08 08:32:36,550 INFO [train.py:715] (7/8) Epoch 15, batch 8300, loss[loss=0.1592, simple_loss=0.2448, pruned_loss=0.03683, over 4905.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2093, pruned_loss=0.0306, over 973164.57 frames.], batch size: 39, lr: 1.48e-04 +2022-05-08 08:33:15,771 INFO [train.py:715] (7/8) Epoch 15, batch 8350, loss[loss=0.1301, simple_loss=0.2232, pruned_loss=0.01849, over 4984.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.0307, over 973880.60 frames.], batch size: 25, lr: 1.48e-04 +2022-05-08 08:33:55,968 INFO [train.py:715] (7/8) Epoch 15, batch 8400, loss[loss=0.1398, simple_loss=0.2113, pruned_loss=0.03417, over 4974.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03033, over 974355.69 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 08:34:35,511 INFO [train.py:715] (7/8) Epoch 15, batch 8450, loss[loss=0.1502, simple_loss=0.2289, pruned_loss=0.03581, over 4777.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02992, over 974272.62 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:35:14,652 INFO [train.py:715] (7/8) Epoch 15, batch 8500, loss[loss=0.136, simple_loss=0.2191, pruned_loss=0.02651, over 4933.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03041, over 973557.22 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 08:35:54,836 INFO [train.py:715] (7/8) Epoch 15, batch 8550, loss[loss=0.1481, simple_loss=0.2249, pruned_loss=0.03569, over 4781.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03042, over 974227.38 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:36:33,509 INFO [train.py:715] (7/8) Epoch 15, batch 8600, loss[loss=0.1301, simple_loss=0.1986, pruned_loss=0.03082, over 4860.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03055, over 974049.58 frames.], batch size: 32, lr: 1.48e-04 +2022-05-08 08:37:12,323 INFO [train.py:715] (7/8) Epoch 15, batch 8650, loss[loss=0.1175, simple_loss=0.1957, pruned_loss=0.01965, over 4889.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2091, pruned_loss=0.03066, over 973915.50 frames.], batch size: 22, lr: 1.48e-04 +2022-05-08 08:37:51,173 INFO [train.py:715] (7/8) Epoch 15, batch 8700, loss[loss=0.1186, simple_loss=0.1926, pruned_loss=0.02227, over 4793.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2094, pruned_loss=0.03068, over 973635.72 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:38:30,426 INFO [train.py:715] (7/8) Epoch 15, batch 8750, loss[loss=0.126, simple_loss=0.1987, pruned_loss=0.02669, over 4968.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03052, over 973693.97 frames.], batch size: 35, lr: 1.48e-04 +2022-05-08 08:39:08,913 INFO [train.py:715] (7/8) Epoch 15, batch 8800, loss[loss=0.1346, simple_loss=0.2066, pruned_loss=0.03125, over 4792.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2097, pruned_loss=0.03081, over 973807.85 frames.], batch size: 24, lr: 1.48e-04 +2022-05-08 08:39:47,404 INFO [train.py:715] (7/8) Epoch 15, batch 8850, loss[loss=0.115, simple_loss=0.1921, pruned_loss=0.01893, over 4987.00 frames.], tot_loss[loss=0.135, simple_loss=0.2092, pruned_loss=0.03041, over 973773.13 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 08:40:26,831 INFO [train.py:715] (7/8) Epoch 15, batch 8900, loss[loss=0.1273, simple_loss=0.1908, pruned_loss=0.03192, over 4796.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2098, pruned_loss=0.03065, over 973858.23 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 08:41:06,365 INFO [train.py:715] (7/8) Epoch 15, batch 8950, loss[loss=0.1667, simple_loss=0.2406, pruned_loss=0.04638, over 4705.00 frames.], tot_loss[loss=0.1359, simple_loss=0.2096, pruned_loss=0.03112, over 973136.23 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:41:45,472 INFO [train.py:715] (7/8) Epoch 15, batch 9000, loss[loss=0.1299, simple_loss=0.214, pruned_loss=0.02287, over 4948.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03058, over 973180.71 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:41:45,472 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 08:42:05,029 INFO [train.py:742] (7/8) Epoch 15, validation: loss=0.1051, simple_loss=0.1887, pruned_loss=0.01074, over 914524.00 frames. +2022-05-08 08:42:44,052 INFO [train.py:715] (7/8) Epoch 15, batch 9050, loss[loss=0.1489, simple_loss=0.2214, pruned_loss=0.03818, over 4850.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03017, over 972503.91 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 08:43:23,565 INFO [train.py:715] (7/8) Epoch 15, batch 9100, loss[loss=0.1245, simple_loss=0.1906, pruned_loss=0.02917, over 4934.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03045, over 972659.49 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 08:44:03,262 INFO [train.py:715] (7/8) Epoch 15, batch 9150, loss[loss=0.1339, simple_loss=0.2033, pruned_loss=0.03223, over 4893.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2085, pruned_loss=0.03084, over 972727.32 frames.], batch size: 22, lr: 1.48e-04 +2022-05-08 08:44:42,059 INFO [train.py:715] (7/8) Epoch 15, batch 9200, loss[loss=0.189, simple_loss=0.2685, pruned_loss=0.05473, over 4785.00 frames.], tot_loss[loss=0.1354, simple_loss=0.209, pruned_loss=0.03089, over 972202.64 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 08:45:21,338 INFO [train.py:715] (7/8) Epoch 15, batch 9250, loss[loss=0.128, simple_loss=0.2002, pruned_loss=0.02785, over 4973.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.03036, over 973209.51 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:46:01,214 INFO [train.py:715] (7/8) Epoch 15, batch 9300, loss[loss=0.1552, simple_loss=0.2243, pruned_loss=0.04299, over 4940.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03053, over 972691.01 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 08:46:41,140 INFO [train.py:715] (7/8) Epoch 15, batch 9350, loss[loss=0.1416, simple_loss=0.2189, pruned_loss=0.03216, over 4754.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.0307, over 972437.37 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:47:19,983 INFO [train.py:715] (7/8) Epoch 15, batch 9400, loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02899, over 4855.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.0302, over 971731.92 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 08:47:59,296 INFO [train.py:715] (7/8) Epoch 15, batch 9450, loss[loss=0.1506, simple_loss=0.2345, pruned_loss=0.03336, over 4848.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03047, over 972100.46 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:48:38,583 INFO [train.py:715] (7/8) Epoch 15, batch 9500, loss[loss=0.1243, simple_loss=0.2005, pruned_loss=0.02402, over 4894.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2075, pruned_loss=0.03017, over 971702.06 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:49:16,969 INFO [train.py:715] (7/8) Epoch 15, batch 9550, loss[loss=0.1343, simple_loss=0.2003, pruned_loss=0.03408, over 4783.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03024, over 971209.75 frames.], batch size: 17, lr: 1.48e-04 +2022-05-08 08:49:56,304 INFO [train.py:715] (7/8) Epoch 15, batch 9600, loss[loss=0.1201, simple_loss=0.1996, pruned_loss=0.02032, over 4919.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2077, pruned_loss=0.03021, over 971580.19 frames.], batch size: 18, lr: 1.48e-04 +2022-05-08 08:50:35,896 INFO [train.py:715] (7/8) Epoch 15, batch 9650, loss[loss=0.131, simple_loss=0.1981, pruned_loss=0.03199, over 4984.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2071, pruned_loss=0.03, over 971092.18 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:51:15,446 INFO [train.py:715] (7/8) Epoch 15, batch 9700, loss[loss=0.1383, simple_loss=0.2099, pruned_loss=0.03339, over 4824.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.02997, over 972660.36 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:51:53,981 INFO [train.py:715] (7/8) Epoch 15, batch 9750, loss[loss=0.1231, simple_loss=0.2027, pruned_loss=0.02173, over 4918.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.02995, over 972804.71 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 08:52:33,223 INFO [train.py:715] (7/8) Epoch 15, batch 9800, loss[loss=0.1293, simple_loss=0.2032, pruned_loss=0.02768, over 4767.00 frames.], tot_loss[loss=0.135, simple_loss=0.2091, pruned_loss=0.03041, over 972674.61 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:53:12,410 INFO [train.py:715] (7/8) Epoch 15, batch 9850, loss[loss=0.1188, simple_loss=0.1899, pruned_loss=0.02388, over 4843.00 frames.], tot_loss[loss=0.135, simple_loss=0.2092, pruned_loss=0.03038, over 973007.86 frames.], batch size: 20, lr: 1.48e-04 +2022-05-08 08:53:50,990 INFO [train.py:715] (7/8) Epoch 15, batch 9900, loss[loss=0.1506, simple_loss=0.2239, pruned_loss=0.03867, over 4875.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2095, pruned_loss=0.03076, over 973140.80 frames.], batch size: 32, lr: 1.48e-04 +2022-05-08 08:54:30,415 INFO [train.py:715] (7/8) Epoch 15, batch 9950, loss[loss=0.1369, simple_loss=0.2179, pruned_loss=0.02796, over 4900.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.03001, over 973867.61 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:55:09,387 INFO [train.py:715] (7/8) Epoch 15, batch 10000, loss[loss=0.1268, simple_loss=0.2012, pruned_loss=0.02625, over 4942.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2088, pruned_loss=0.03003, over 974453.28 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 08:55:48,601 INFO [train.py:715] (7/8) Epoch 15, batch 10050, loss[loss=0.1234, simple_loss=0.2004, pruned_loss=0.02319, over 4690.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02997, over 974319.36 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:56:26,959 INFO [train.py:715] (7/8) Epoch 15, batch 10100, loss[loss=0.1471, simple_loss=0.2134, pruned_loss=0.04037, over 4953.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02972, over 974273.03 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 08:57:05,757 INFO [train.py:715] (7/8) Epoch 15, batch 10150, loss[loss=0.1338, simple_loss=0.2195, pruned_loss=0.02403, over 4903.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02941, over 972638.45 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:57:45,600 INFO [train.py:715] (7/8) Epoch 15, batch 10200, loss[loss=0.1299, simple_loss=0.2109, pruned_loss=0.02451, over 4908.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03012, over 973807.42 frames.], batch size: 19, lr: 1.48e-04 +2022-05-08 08:58:23,926 INFO [train.py:715] (7/8) Epoch 15, batch 10250, loss[loss=0.1299, simple_loss=0.2014, pruned_loss=0.02915, over 4874.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.03034, over 973382.37 frames.], batch size: 16, lr: 1.48e-04 +2022-05-08 08:59:03,220 INFO [train.py:715] (7/8) Epoch 15, batch 10300, loss[loss=0.1435, simple_loss=0.2099, pruned_loss=0.03858, over 4967.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03014, over 974198.08 frames.], batch size: 21, lr: 1.48e-04 +2022-05-08 08:59:42,682 INFO [train.py:715] (7/8) Epoch 15, batch 10350, loss[loss=0.1372, simple_loss=0.2133, pruned_loss=0.0305, over 4963.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.0301, over 973432.55 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 09:00:21,805 INFO [train.py:715] (7/8) Epoch 15, batch 10400, loss[loss=0.1307, simple_loss=0.1962, pruned_loss=0.03263, over 4960.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03022, over 973030.57 frames.], batch size: 15, lr: 1.48e-04 +2022-05-08 09:00:59,822 INFO [train.py:715] (7/8) Epoch 15, batch 10450, loss[loss=0.1267, simple_loss=0.1985, pruned_loss=0.02743, over 4983.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.03035, over 972747.96 frames.], batch size: 14, lr: 1.48e-04 +2022-05-08 09:01:38,804 INFO [train.py:715] (7/8) Epoch 15, batch 10500, loss[loss=0.1395, simple_loss=0.2172, pruned_loss=0.03093, over 4856.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2087, pruned_loss=0.03094, over 971606.58 frames.], batch size: 32, lr: 1.48e-04 +2022-05-08 09:02:18,519 INFO [train.py:715] (7/8) Epoch 15, batch 10550, loss[loss=0.1345, simple_loss=0.2234, pruned_loss=0.02274, over 4875.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03075, over 971216.36 frames.], batch size: 22, lr: 1.48e-04 +2022-05-08 09:02:56,679 INFO [train.py:715] (7/8) Epoch 15, batch 10600, loss[loss=0.1378, simple_loss=0.2173, pruned_loss=0.02916, over 4942.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03065, over 971766.53 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 09:03:35,332 INFO [train.py:715] (7/8) Epoch 15, batch 10650, loss[loss=0.1352, simple_loss=0.2112, pruned_loss=0.02961, over 4945.00 frames.], tot_loss[loss=0.135, simple_loss=0.2091, pruned_loss=0.03041, over 972256.79 frames.], batch size: 35, lr: 1.48e-04 +2022-05-08 09:04:14,416 INFO [train.py:715] (7/8) Epoch 15, batch 10700, loss[loss=0.135, simple_loss=0.2037, pruned_loss=0.03312, over 4767.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03038, over 972384.52 frames.], batch size: 12, lr: 1.48e-04 +2022-05-08 09:04:53,626 INFO [train.py:715] (7/8) Epoch 15, batch 10750, loss[loss=0.1176, simple_loss=0.1855, pruned_loss=0.02485, over 4936.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03066, over 972864.76 frames.], batch size: 23, lr: 1.48e-04 +2022-05-08 09:05:31,576 INFO [train.py:715] (7/8) Epoch 15, batch 10800, loss[loss=0.1345, simple_loss=0.2113, pruned_loss=0.02879, over 4895.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03039, over 973311.97 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:06:11,097 INFO [train.py:715] (7/8) Epoch 15, batch 10850, loss[loss=0.1313, simple_loss=0.2027, pruned_loss=0.02999, over 4828.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03016, over 973606.61 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 09:06:50,385 INFO [train.py:715] (7/8) Epoch 15, batch 10900, loss[loss=0.1512, simple_loss=0.2375, pruned_loss=0.03249, over 4754.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02983, over 974319.58 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:07:28,755 INFO [train.py:715] (7/8) Epoch 15, batch 10950, loss[loss=0.1232, simple_loss=0.1981, pruned_loss=0.0242, over 4780.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02969, over 973704.76 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:08:06,754 INFO [train.py:715] (7/8) Epoch 15, batch 11000, loss[loss=0.1332, simple_loss=0.2092, pruned_loss=0.02859, over 4977.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02969, over 973505.24 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 09:08:45,832 INFO [train.py:715] (7/8) Epoch 15, batch 11050, loss[loss=0.1196, simple_loss=0.1954, pruned_loss=0.02186, over 4778.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02987, over 972595.69 frames.], batch size: 14, lr: 1.47e-04 +2022-05-08 09:09:25,315 INFO [train.py:715] (7/8) Epoch 15, batch 11100, loss[loss=0.1142, simple_loss=0.1895, pruned_loss=0.01941, over 4908.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02971, over 971763.90 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:10:03,226 INFO [train.py:715] (7/8) Epoch 15, batch 11150, loss[loss=0.1424, simple_loss=0.204, pruned_loss=0.04039, over 4818.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02947, over 970895.98 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:10:41,861 INFO [train.py:715] (7/8) Epoch 15, batch 11200, loss[loss=0.1318, simple_loss=0.202, pruned_loss=0.03075, over 4890.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02955, over 970921.22 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:11:20,815 INFO [train.py:715] (7/8) Epoch 15, batch 11250, loss[loss=0.1175, simple_loss=0.1873, pruned_loss=0.02385, over 4771.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.0301, over 971217.08 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:11:59,331 INFO [train.py:715] (7/8) Epoch 15, batch 11300, loss[loss=0.1224, simple_loss=0.2043, pruned_loss=0.0203, over 4910.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03016, over 971791.79 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 09:12:37,835 INFO [train.py:715] (7/8) Epoch 15, batch 11350, loss[loss=0.1293, simple_loss=0.2129, pruned_loss=0.0228, over 4815.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2091, pruned_loss=0.0307, over 972503.48 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 09:13:17,188 INFO [train.py:715] (7/8) Epoch 15, batch 11400, loss[loss=0.1366, simple_loss=0.2129, pruned_loss=0.03009, over 4781.00 frames.], tot_loss[loss=0.135, simple_loss=0.2086, pruned_loss=0.0307, over 972093.95 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:13:55,483 INFO [train.py:715] (7/8) Epoch 15, batch 11450, loss[loss=0.1377, simple_loss=0.2103, pruned_loss=0.03252, over 4828.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03022, over 972445.57 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:14:34,170 INFO [train.py:715] (7/8) Epoch 15, batch 11500, loss[loss=0.1457, simple_loss=0.221, pruned_loss=0.03524, over 4914.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03015, over 972053.79 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:15:13,131 INFO [train.py:715] (7/8) Epoch 15, batch 11550, loss[loss=0.1382, simple_loss=0.2088, pruned_loss=0.03378, over 4977.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03019, over 972808.96 frames.], batch size: 28, lr: 1.47e-04 +2022-05-08 09:15:52,416 INFO [train.py:715] (7/8) Epoch 15, batch 11600, loss[loss=0.1215, simple_loss=0.1919, pruned_loss=0.02553, over 4686.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03034, over 973799.95 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:16:30,744 INFO [train.py:715] (7/8) Epoch 15, batch 11650, loss[loss=0.139, simple_loss=0.2033, pruned_loss=0.03733, over 4865.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2081, pruned_loss=0.03043, over 974156.71 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:17:09,220 INFO [train.py:715] (7/8) Epoch 15, batch 11700, loss[loss=0.1424, simple_loss=0.2134, pruned_loss=0.0357, over 4973.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03035, over 973867.26 frames.], batch size: 24, lr: 1.47e-04 +2022-05-08 09:17:48,442 INFO [train.py:715] (7/8) Epoch 15, batch 11750, loss[loss=0.135, simple_loss=0.2077, pruned_loss=0.03112, over 4793.00 frames.], tot_loss[loss=0.1335, simple_loss=0.207, pruned_loss=0.02998, over 974232.46 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:18:27,446 INFO [train.py:715] (7/8) Epoch 15, batch 11800, loss[loss=0.1267, simple_loss=0.2045, pruned_loss=0.02448, over 4986.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02972, over 973868.34 frames.], batch size: 14, lr: 1.47e-04 +2022-05-08 09:19:05,512 INFO [train.py:715] (7/8) Epoch 15, batch 11850, loss[loss=0.1449, simple_loss=0.2157, pruned_loss=0.03704, over 4743.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03052, over 973579.73 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:19:45,039 INFO [train.py:715] (7/8) Epoch 15, batch 11900, loss[loss=0.1168, simple_loss=0.1831, pruned_loss=0.02521, over 4789.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2082, pruned_loss=0.03038, over 972980.63 frames.], batch size: 12, lr: 1.47e-04 +2022-05-08 09:20:25,121 INFO [train.py:715] (7/8) Epoch 15, batch 11950, loss[loss=0.1639, simple_loss=0.2353, pruned_loss=0.04626, over 4913.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2072, pruned_loss=0.02976, over 972914.09 frames.], batch size: 39, lr: 1.47e-04 +2022-05-08 09:21:03,708 INFO [train.py:715] (7/8) Epoch 15, batch 12000, loss[loss=0.1225, simple_loss=0.1957, pruned_loss=0.02459, over 4940.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.0303, over 972600.66 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 09:21:03,708 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 09:21:20,396 INFO [train.py:742] (7/8) Epoch 15, validation: loss=0.105, simple_loss=0.1887, pruned_loss=0.01066, over 914524.00 frames. +2022-05-08 09:21:59,113 INFO [train.py:715] (7/8) Epoch 15, batch 12050, loss[loss=0.1045, simple_loss=0.1797, pruned_loss=0.01465, over 4890.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.0302, over 972253.54 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:22:38,254 INFO [train.py:715] (7/8) Epoch 15, batch 12100, loss[loss=0.1516, simple_loss=0.2287, pruned_loss=0.03724, over 4888.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02982, over 972902.60 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 09:23:17,964 INFO [train.py:715] (7/8) Epoch 15, batch 12150, loss[loss=0.1661, simple_loss=0.2395, pruned_loss=0.04637, over 4945.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03045, over 973050.33 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 09:23:56,409 INFO [train.py:715] (7/8) Epoch 15, batch 12200, loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03054, over 4778.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02978, over 972011.02 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:24:35,179 INFO [train.py:715] (7/8) Epoch 15, batch 12250, loss[loss=0.1165, simple_loss=0.1936, pruned_loss=0.0197, over 4764.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02952, over 971445.91 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:25:14,195 INFO [train.py:715] (7/8) Epoch 15, batch 12300, loss[loss=0.1211, simple_loss=0.2033, pruned_loss=0.01943, over 4790.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02972, over 970786.58 frames.], batch size: 24, lr: 1.47e-04 +2022-05-08 09:25:54,057 INFO [train.py:715] (7/8) Epoch 15, batch 12350, loss[loss=0.1202, simple_loss=0.1995, pruned_loss=0.02045, over 4929.00 frames.], tot_loss[loss=0.134, simple_loss=0.2084, pruned_loss=0.02983, over 970812.78 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:26:32,323 INFO [train.py:715] (7/8) Epoch 15, batch 12400, loss[loss=0.1409, simple_loss=0.2197, pruned_loss=0.03109, over 4688.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2084, pruned_loss=0.02987, over 970616.52 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:27:11,075 INFO [train.py:715] (7/8) Epoch 15, batch 12450, loss[loss=0.1289, simple_loss=0.2057, pruned_loss=0.02609, over 4892.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02975, over 970936.54 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:27:51,092 INFO [train.py:715] (7/8) Epoch 15, batch 12500, loss[loss=0.1732, simple_loss=0.2468, pruned_loss=0.04975, over 4886.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03048, over 971245.83 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:28:29,287 INFO [train.py:715] (7/8) Epoch 15, batch 12550, loss[loss=0.1322, simple_loss=0.2102, pruned_loss=0.02716, over 4855.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03006, over 971016.74 frames.], batch size: 30, lr: 1.47e-04 +2022-05-08 09:29:08,351 INFO [train.py:715] (7/8) Epoch 15, batch 12600, loss[loss=0.1526, simple_loss=0.2218, pruned_loss=0.04166, over 4744.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03018, over 971369.82 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:29:46,867 INFO [train.py:715] (7/8) Epoch 15, batch 12650, loss[loss=0.1556, simple_loss=0.2288, pruned_loss=0.04118, over 4705.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03041, over 971227.41 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:30:26,459 INFO [train.py:715] (7/8) Epoch 15, batch 12700, loss[loss=0.1291, simple_loss=0.2016, pruned_loss=0.0283, over 4991.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03033, over 971388.67 frames.], batch size: 14, lr: 1.47e-04 +2022-05-08 09:31:04,801 INFO [train.py:715] (7/8) Epoch 15, batch 12750, loss[loss=0.1261, simple_loss=0.2034, pruned_loss=0.02439, over 4985.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03019, over 972658.52 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 09:31:43,635 INFO [train.py:715] (7/8) Epoch 15, batch 12800, loss[loss=0.1216, simple_loss=0.1981, pruned_loss=0.02256, over 4696.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02995, over 973405.87 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:32:23,166 INFO [train.py:715] (7/8) Epoch 15, batch 12850, loss[loss=0.1463, simple_loss=0.2237, pruned_loss=0.03444, over 4887.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03023, over 972714.72 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 09:33:01,779 INFO [train.py:715] (7/8) Epoch 15, batch 12900, loss[loss=0.1481, simple_loss=0.2242, pruned_loss=0.03598, over 4854.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03018, over 972160.78 frames.], batch size: 32, lr: 1.47e-04 +2022-05-08 09:33:40,809 INFO [train.py:715] (7/8) Epoch 15, batch 12950, loss[loss=0.1272, simple_loss=0.206, pruned_loss=0.02423, over 4830.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02976, over 971817.47 frames.], batch size: 27, lr: 1.47e-04 +2022-05-08 09:34:20,128 INFO [train.py:715] (7/8) Epoch 15, batch 13000, loss[loss=0.1193, simple_loss=0.1843, pruned_loss=0.02712, over 4938.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2082, pruned_loss=0.03035, over 971477.59 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 09:34:59,663 INFO [train.py:715] (7/8) Epoch 15, batch 13050, loss[loss=0.1155, simple_loss=0.1904, pruned_loss=0.02032, over 4817.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2079, pruned_loss=0.03065, over 971913.01 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 09:35:38,147 INFO [train.py:715] (7/8) Epoch 15, batch 13100, loss[loss=0.1302, simple_loss=0.2064, pruned_loss=0.02703, over 4916.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2079, pruned_loss=0.03053, over 971144.91 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 09:36:17,622 INFO [train.py:715] (7/8) Epoch 15, batch 13150, loss[loss=0.119, simple_loss=0.1957, pruned_loss=0.02112, over 4865.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2073, pruned_loss=0.03007, over 971479.09 frames.], batch size: 20, lr: 1.47e-04 +2022-05-08 09:36:57,406 INFO [train.py:715] (7/8) Epoch 15, batch 13200, loss[loss=0.1379, simple_loss=0.2087, pruned_loss=0.0335, over 4881.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02994, over 971652.19 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 09:37:35,199 INFO [train.py:715] (7/8) Epoch 15, batch 13250, loss[loss=0.1101, simple_loss=0.1864, pruned_loss=0.01689, over 4821.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.03, over 971477.77 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 09:38:14,328 INFO [train.py:715] (7/8) Epoch 15, batch 13300, loss[loss=0.1412, simple_loss=0.2164, pruned_loss=0.03303, over 4905.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02972, over 971671.63 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:38:53,946 INFO [train.py:715] (7/8) Epoch 15, batch 13350, loss[loss=0.133, simple_loss=0.1955, pruned_loss=0.03529, over 4869.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2077, pruned_loss=0.03023, over 971797.07 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:39:34,554 INFO [train.py:715] (7/8) Epoch 15, batch 13400, loss[loss=0.1357, simple_loss=0.2099, pruned_loss=0.03071, over 4884.00 frames.], tot_loss[loss=0.1346, simple_loss=0.208, pruned_loss=0.03056, over 972703.94 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 09:40:13,183 INFO [train.py:715] (7/8) Epoch 15, batch 13450, loss[loss=0.1505, simple_loss=0.2364, pruned_loss=0.03224, over 4753.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03016, over 973304.20 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 09:40:51,764 INFO [train.py:715] (7/8) Epoch 15, batch 13500, loss[loss=0.1562, simple_loss=0.2304, pruned_loss=0.04099, over 4767.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03014, over 972853.95 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:41:31,304 INFO [train.py:715] (7/8) Epoch 15, batch 13550, loss[loss=0.1179, simple_loss=0.2039, pruned_loss=0.01595, over 4811.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03013, over 971803.38 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 09:42:09,582 INFO [train.py:715] (7/8) Epoch 15, batch 13600, loss[loss=0.09144, simple_loss=0.1612, pruned_loss=0.01086, over 4790.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03019, over 972347.50 frames.], batch size: 14, lr: 1.47e-04 +2022-05-08 09:42:48,566 INFO [train.py:715] (7/8) Epoch 15, batch 13650, loss[loss=0.1444, simple_loss=0.2051, pruned_loss=0.04187, over 4960.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.0302, over 972825.33 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:43:27,803 INFO [train.py:715] (7/8) Epoch 15, batch 13700, loss[loss=0.1156, simple_loss=0.1918, pruned_loss=0.01972, over 4771.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2089, pruned_loss=0.03006, over 973181.03 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:44:06,259 INFO [train.py:715] (7/8) Epoch 15, batch 13750, loss[loss=0.1457, simple_loss=0.214, pruned_loss=0.03874, over 4783.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2086, pruned_loss=0.02998, over 972698.99 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:44:44,979 INFO [train.py:715] (7/8) Epoch 15, batch 13800, loss[loss=0.1157, simple_loss=0.1862, pruned_loss=0.02263, over 4900.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03, over 973196.79 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:45:23,199 INFO [train.py:715] (7/8) Epoch 15, batch 13850, loss[loss=0.1123, simple_loss=0.1871, pruned_loss=0.01877, over 4743.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03037, over 972971.43 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:46:05,205 INFO [train.py:715] (7/8) Epoch 15, batch 13900, loss[loss=0.131, simple_loss=0.2096, pruned_loss=0.02619, over 4937.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03022, over 972914.78 frames.], batch size: 29, lr: 1.47e-04 +2022-05-08 09:46:43,312 INFO [train.py:715] (7/8) Epoch 15, batch 13950, loss[loss=0.1274, simple_loss=0.204, pruned_loss=0.02536, over 4798.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03038, over 972751.97 frames.], batch size: 24, lr: 1.47e-04 +2022-05-08 09:47:21,601 INFO [train.py:715] (7/8) Epoch 15, batch 14000, loss[loss=0.1256, simple_loss=0.2018, pruned_loss=0.02474, over 4822.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03035, over 972823.40 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 09:48:00,881 INFO [train.py:715] (7/8) Epoch 15, batch 14050, loss[loss=0.1484, simple_loss=0.2209, pruned_loss=0.0379, over 4963.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02984, over 972457.55 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:48:38,845 INFO [train.py:715] (7/8) Epoch 15, batch 14100, loss[loss=0.1355, simple_loss=0.2085, pruned_loss=0.03127, over 4942.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02978, over 972654.03 frames.], batch size: 35, lr: 1.47e-04 +2022-05-08 09:49:17,894 INFO [train.py:715] (7/8) Epoch 15, batch 14150, loss[loss=0.1231, simple_loss=0.1973, pruned_loss=0.02441, over 4959.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03056, over 973265.65 frames.], batch size: 29, lr: 1.47e-04 +2022-05-08 09:49:56,535 INFO [train.py:715] (7/8) Epoch 15, batch 14200, loss[loss=0.1365, simple_loss=0.2136, pruned_loss=0.02969, over 4846.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03006, over 972046.91 frames.], batch size: 32, lr: 1.47e-04 +2022-05-08 09:50:35,493 INFO [train.py:715] (7/8) Epoch 15, batch 14250, loss[loss=0.1245, simple_loss=0.1983, pruned_loss=0.02539, over 4703.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2078, pruned_loss=0.03028, over 972254.48 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:51:13,320 INFO [train.py:715] (7/8) Epoch 15, batch 14300, loss[loss=0.1168, simple_loss=0.1991, pruned_loss=0.01724, over 4812.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03038, over 971693.01 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 09:51:51,766 INFO [train.py:715] (7/8) Epoch 15, batch 14350, loss[loss=0.1293, simple_loss=0.2151, pruned_loss=0.02181, over 4968.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03021, over 971699.24 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 09:52:30,862 INFO [train.py:715] (7/8) Epoch 15, batch 14400, loss[loss=0.1323, simple_loss=0.2077, pruned_loss=0.02848, over 4935.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.0301, over 972357.44 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 09:53:08,607 INFO [train.py:715] (7/8) Epoch 15, batch 14450, loss[loss=0.1151, simple_loss=0.1915, pruned_loss=0.01931, over 4832.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02968, over 971547.75 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 09:53:47,585 INFO [train.py:715] (7/8) Epoch 15, batch 14500, loss[loss=0.1305, simple_loss=0.2008, pruned_loss=0.0301, over 4942.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2088, pruned_loss=0.02991, over 972490.11 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 09:54:25,857 INFO [train.py:715] (7/8) Epoch 15, batch 14550, loss[loss=0.1311, simple_loss=0.2093, pruned_loss=0.02648, over 4827.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2092, pruned_loss=0.03011, over 971932.84 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 09:55:04,848 INFO [train.py:715] (7/8) Epoch 15, batch 14600, loss[loss=0.1639, simple_loss=0.2405, pruned_loss=0.0436, over 4916.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2086, pruned_loss=0.03004, over 972255.68 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 09:55:42,672 INFO [train.py:715] (7/8) Epoch 15, batch 14650, loss[loss=0.1664, simple_loss=0.2372, pruned_loss=0.04775, over 4951.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03017, over 972541.86 frames.], batch size: 35, lr: 1.47e-04 +2022-05-08 09:56:20,651 INFO [train.py:715] (7/8) Epoch 15, batch 14700, loss[loss=0.1281, simple_loss=0.2064, pruned_loss=0.02489, over 4773.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03062, over 972251.37 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 09:56:59,721 INFO [train.py:715] (7/8) Epoch 15, batch 14750, loss[loss=0.1417, simple_loss=0.2161, pruned_loss=0.03365, over 4822.00 frames.], tot_loss[loss=0.135, simple_loss=0.2093, pruned_loss=0.03029, over 971908.69 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 09:57:37,352 INFO [train.py:715] (7/8) Epoch 15, batch 14800, loss[loss=0.09912, simple_loss=0.1765, pruned_loss=0.01089, over 4759.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2088, pruned_loss=0.02999, over 972468.32 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 09:58:16,193 INFO [train.py:715] (7/8) Epoch 15, batch 14850, loss[loss=0.1408, simple_loss=0.226, pruned_loss=0.02785, over 4990.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02966, over 972589.36 frames.], batch size: 28, lr: 1.47e-04 +2022-05-08 09:58:55,097 INFO [train.py:715] (7/8) Epoch 15, batch 14900, loss[loss=0.1358, simple_loss=0.2085, pruned_loss=0.03157, over 4922.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2078, pruned_loss=0.02941, over 972672.35 frames.], batch size: 29, lr: 1.47e-04 +2022-05-08 09:59:33,276 INFO [train.py:715] (7/8) Epoch 15, batch 14950, loss[loss=0.1376, simple_loss=0.2197, pruned_loss=0.02781, over 4824.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.0299, over 971862.04 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 10:00:11,564 INFO [train.py:715] (7/8) Epoch 15, batch 15000, loss[loss=0.1215, simple_loss=0.2016, pruned_loss=0.02073, over 4825.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02922, over 971806.38 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 10:00:11,564 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 10:00:26,345 INFO [train.py:742] (7/8) Epoch 15, validation: loss=0.1051, simple_loss=0.1887, pruned_loss=0.01077, over 914524.00 frames. +2022-05-08 10:01:05,812 INFO [train.py:715] (7/8) Epoch 15, batch 15050, loss[loss=0.1242, simple_loss=0.1894, pruned_loss=0.02947, over 4776.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02956, over 971505.35 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:01:43,979 INFO [train.py:715] (7/8) Epoch 15, batch 15100, loss[loss=0.1454, simple_loss=0.2144, pruned_loss=0.03824, over 4865.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2085, pruned_loss=0.02982, over 971476.43 frames.], batch size: 20, lr: 1.47e-04 +2022-05-08 10:02:23,331 INFO [train.py:715] (7/8) Epoch 15, batch 15150, loss[loss=0.1333, simple_loss=0.2059, pruned_loss=0.03036, over 4768.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2095, pruned_loss=0.03035, over 971657.30 frames.], batch size: 19, lr: 1.47e-04 +2022-05-08 10:03:01,053 INFO [train.py:715] (7/8) Epoch 15, batch 15200, loss[loss=0.1319, simple_loss=0.2051, pruned_loss=0.02939, over 4693.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2089, pruned_loss=0.03007, over 971761.55 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:03:39,351 INFO [train.py:715] (7/8) Epoch 15, batch 15250, loss[loss=0.1416, simple_loss=0.2154, pruned_loss=0.03386, over 4911.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03039, over 971693.30 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:04:18,903 INFO [train.py:715] (7/8) Epoch 15, batch 15300, loss[loss=0.1251, simple_loss=0.2001, pruned_loss=0.02505, over 4986.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2092, pruned_loss=0.03024, over 971905.79 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 10:04:56,984 INFO [train.py:715] (7/8) Epoch 15, batch 15350, loss[loss=0.1145, simple_loss=0.1813, pruned_loss=0.02381, over 4790.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2091, pruned_loss=0.03019, over 971676.69 frames.], batch size: 14, lr: 1.47e-04 +2022-05-08 10:05:35,891 INFO [train.py:715] (7/8) Epoch 15, batch 15400, loss[loss=0.1257, simple_loss=0.2133, pruned_loss=0.01903, over 4797.00 frames.], tot_loss[loss=0.135, simple_loss=0.2095, pruned_loss=0.03025, over 972563.71 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:06:13,987 INFO [train.py:715] (7/8) Epoch 15, batch 15450, loss[loss=0.1176, simple_loss=0.2018, pruned_loss=0.01671, over 4929.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2089, pruned_loss=0.02985, over 972364.09 frames.], batch size: 29, lr: 1.47e-04 +2022-05-08 10:06:52,880 INFO [train.py:715] (7/8) Epoch 15, batch 15500, loss[loss=0.119, simple_loss=0.2091, pruned_loss=0.01444, over 4825.00 frames.], tot_loss[loss=0.1344, simple_loss=0.209, pruned_loss=0.02988, over 972658.16 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 10:07:31,445 INFO [train.py:715] (7/8) Epoch 15, batch 15550, loss[loss=0.1385, simple_loss=0.2059, pruned_loss=0.03554, over 4818.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2087, pruned_loss=0.02989, over 972072.33 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:08:10,327 INFO [train.py:715] (7/8) Epoch 15, batch 15600, loss[loss=0.1562, simple_loss=0.2249, pruned_loss=0.04375, over 4687.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2091, pruned_loss=0.03012, over 972161.66 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:08:49,134 INFO [train.py:715] (7/8) Epoch 15, batch 15650, loss[loss=0.1269, simple_loss=0.1951, pruned_loss=0.02929, over 4840.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2087, pruned_loss=0.02995, over 972275.47 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 10:09:27,216 INFO [train.py:715] (7/8) Epoch 15, batch 15700, loss[loss=0.1019, simple_loss=0.1718, pruned_loss=0.016, over 4791.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02955, over 972194.08 frames.], batch size: 12, lr: 1.47e-04 +2022-05-08 10:10:05,788 INFO [train.py:715] (7/8) Epoch 15, batch 15750, loss[loss=0.141, simple_loss=0.2121, pruned_loss=0.03497, over 4840.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02961, over 972106.49 frames.], batch size: 30, lr: 1.47e-04 +2022-05-08 10:10:44,349 INFO [train.py:715] (7/8) Epoch 15, batch 15800, loss[loss=0.1436, simple_loss=0.2101, pruned_loss=0.03854, over 4986.00 frames.], tot_loss[loss=0.134, simple_loss=0.2084, pruned_loss=0.02981, over 971495.16 frames.], batch size: 31, lr: 1.47e-04 +2022-05-08 10:11:23,020 INFO [train.py:715] (7/8) Epoch 15, batch 15850, loss[loss=0.1576, simple_loss=0.2169, pruned_loss=0.04922, over 4912.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02974, over 970858.78 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 10:12:01,146 INFO [train.py:715] (7/8) Epoch 15, batch 15900, loss[loss=0.1173, simple_loss=0.1906, pruned_loss=0.02207, over 4915.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03036, over 971272.73 frames.], batch size: 35, lr: 1.47e-04 +2022-05-08 10:12:39,306 INFO [train.py:715] (7/8) Epoch 15, batch 15950, loss[loss=0.1335, simple_loss=0.2175, pruned_loss=0.02477, over 4852.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03053, over 970836.03 frames.], batch size: 34, lr: 1.47e-04 +2022-05-08 10:13:18,369 INFO [train.py:715] (7/8) Epoch 15, batch 16000, loss[loss=0.1197, simple_loss=0.1967, pruned_loss=0.02135, over 4984.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2088, pruned_loss=0.03068, over 971244.90 frames.], batch size: 27, lr: 1.47e-04 +2022-05-08 10:13:56,002 INFO [train.py:715] (7/8) Epoch 15, batch 16050, loss[loss=0.1215, simple_loss=0.2006, pruned_loss=0.02119, over 4908.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03011, over 972029.00 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:14:34,591 INFO [train.py:715] (7/8) Epoch 15, batch 16100, loss[loss=0.09999, simple_loss=0.1706, pruned_loss=0.01468, over 4758.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2091, pruned_loss=0.03056, over 971935.54 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 10:15:13,039 INFO [train.py:715] (7/8) Epoch 15, batch 16150, loss[loss=0.1369, simple_loss=0.2052, pruned_loss=0.0343, over 4940.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.0303, over 972254.25 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 10:15:51,556 INFO [train.py:715] (7/8) Epoch 15, batch 16200, loss[loss=0.1594, simple_loss=0.2247, pruned_loss=0.04703, over 4929.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2095, pruned_loss=0.03083, over 972930.39 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:16:29,836 INFO [train.py:715] (7/8) Epoch 15, batch 16250, loss[loss=0.1214, simple_loss=0.2005, pruned_loss=0.02117, over 4773.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2086, pruned_loss=0.03011, over 972309.07 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 10:17:08,215 INFO [train.py:715] (7/8) Epoch 15, batch 16300, loss[loss=0.1557, simple_loss=0.2339, pruned_loss=0.03875, over 4689.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02974, over 972459.80 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:17:46,752 INFO [train.py:715] (7/8) Epoch 15, batch 16350, loss[loss=0.1334, simple_loss=0.2092, pruned_loss=0.02881, over 4850.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2072, pruned_loss=0.02981, over 972075.01 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:18:24,621 INFO [train.py:715] (7/8) Epoch 15, batch 16400, loss[loss=0.1184, simple_loss=0.1948, pruned_loss=0.02102, over 4807.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02915, over 971330.68 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:19:03,504 INFO [train.py:715] (7/8) Epoch 15, batch 16450, loss[loss=0.1272, simple_loss=0.198, pruned_loss=0.02822, over 4887.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02889, over 971663.26 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 10:19:41,759 INFO [train.py:715] (7/8) Epoch 15, batch 16500, loss[loss=0.124, simple_loss=0.207, pruned_loss=0.02053, over 4740.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02901, over 971137.79 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 10:20:20,131 INFO [train.py:715] (7/8) Epoch 15, batch 16550, loss[loss=0.12, simple_loss=0.2085, pruned_loss=0.01577, over 4985.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02944, over 972338.35 frames.], batch size: 24, lr: 1.47e-04 +2022-05-08 10:20:58,278 INFO [train.py:715] (7/8) Epoch 15, batch 16600, loss[loss=0.1064, simple_loss=0.1845, pruned_loss=0.01414, over 4807.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.02955, over 972068.46 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 10:21:37,035 INFO [train.py:715] (7/8) Epoch 15, batch 16650, loss[loss=0.1276, simple_loss=0.198, pruned_loss=0.02859, over 4949.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02955, over 972084.69 frames.], batch size: 35, lr: 1.47e-04 +2022-05-08 10:22:16,799 INFO [train.py:715] (7/8) Epoch 15, batch 16700, loss[loss=0.1462, simple_loss=0.2181, pruned_loss=0.03719, over 4860.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02991, over 971063.95 frames.], batch size: 32, lr: 1.47e-04 +2022-05-08 10:22:55,521 INFO [train.py:715] (7/8) Epoch 15, batch 16750, loss[loss=0.1342, simple_loss=0.2034, pruned_loss=0.03255, over 4804.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02956, over 971297.36 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 10:23:34,512 INFO [train.py:715] (7/8) Epoch 15, batch 16800, loss[loss=0.1476, simple_loss=0.2287, pruned_loss=0.0332, over 4861.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.0292, over 971922.13 frames.], batch size: 20, lr: 1.47e-04 +2022-05-08 10:24:13,668 INFO [train.py:715] (7/8) Epoch 15, batch 16850, loss[loss=0.1422, simple_loss=0.2155, pruned_loss=0.03442, over 4914.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02988, over 972769.24 frames.], batch size: 23, lr: 1.47e-04 +2022-05-08 10:24:52,752 INFO [train.py:715] (7/8) Epoch 15, batch 16900, loss[loss=0.1134, simple_loss=0.1892, pruned_loss=0.01882, over 4941.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.02947, over 972468.63 frames.], batch size: 24, lr: 1.47e-04 +2022-05-08 10:25:31,721 INFO [train.py:715] (7/8) Epoch 15, batch 16950, loss[loss=0.1285, simple_loss=0.204, pruned_loss=0.02652, over 4825.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02987, over 972533.08 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 10:26:10,072 INFO [train.py:715] (7/8) Epoch 15, batch 17000, loss[loss=0.1418, simple_loss=0.2226, pruned_loss=0.03048, over 4757.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03006, over 972380.05 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 10:26:49,334 INFO [train.py:715] (7/8) Epoch 15, batch 17050, loss[loss=0.1562, simple_loss=0.2215, pruned_loss=0.04548, over 4889.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03028, over 972220.75 frames.], batch size: 32, lr: 1.47e-04 +2022-05-08 10:27:27,262 INFO [train.py:715] (7/8) Epoch 15, batch 17100, loss[loss=0.1397, simple_loss=0.2212, pruned_loss=0.02912, over 4755.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02991, over 971602.88 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 10:28:06,076 INFO [train.py:715] (7/8) Epoch 15, batch 17150, loss[loss=0.1413, simple_loss=0.2049, pruned_loss=0.03883, over 4886.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03005, over 971501.91 frames.], batch size: 32, lr: 1.47e-04 +2022-05-08 10:28:44,478 INFO [train.py:715] (7/8) Epoch 15, batch 17200, loss[loss=0.1472, simple_loss=0.2115, pruned_loss=0.04147, over 4841.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03055, over 971028.92 frames.], batch size: 30, lr: 1.47e-04 +2022-05-08 10:29:23,147 INFO [train.py:715] (7/8) Epoch 15, batch 17250, loss[loss=0.1264, simple_loss=0.1945, pruned_loss=0.02918, over 4931.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03029, over 971312.49 frames.], batch size: 39, lr: 1.47e-04 +2022-05-08 10:30:01,722 INFO [train.py:715] (7/8) Epoch 15, batch 17300, loss[loss=0.1224, simple_loss=0.1955, pruned_loss=0.02468, over 4829.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.0299, over 971372.31 frames.], batch size: 26, lr: 1.47e-04 +2022-05-08 10:30:40,362 INFO [train.py:715] (7/8) Epoch 15, batch 17350, loss[loss=0.1411, simple_loss=0.2108, pruned_loss=0.03573, over 4960.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02989, over 972016.32 frames.], batch size: 35, lr: 1.47e-04 +2022-05-08 10:31:19,951 INFO [train.py:715] (7/8) Epoch 15, batch 17400, loss[loss=0.1185, simple_loss=0.1943, pruned_loss=0.0213, over 4932.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02987, over 972192.91 frames.], batch size: 21, lr: 1.47e-04 +2022-05-08 10:31:57,891 INFO [train.py:715] (7/8) Epoch 15, batch 17450, loss[loss=0.1428, simple_loss=0.2168, pruned_loss=0.03434, over 4946.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02986, over 972925.52 frames.], batch size: 24, lr: 1.47e-04 +2022-05-08 10:32:36,870 INFO [train.py:715] (7/8) Epoch 15, batch 17500, loss[loss=0.1221, simple_loss=0.1955, pruned_loss=0.02432, over 4828.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.02997, over 971235.67 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 10:33:15,844 INFO [train.py:715] (7/8) Epoch 15, batch 17550, loss[loss=0.1307, simple_loss=0.2193, pruned_loss=0.02102, over 4883.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02987, over 970656.37 frames.], batch size: 22, lr: 1.47e-04 +2022-05-08 10:33:54,433 INFO [train.py:715] (7/8) Epoch 15, batch 17600, loss[loss=0.1221, simple_loss=0.1886, pruned_loss=0.02782, over 4979.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03006, over 971214.59 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:34:32,812 INFO [train.py:715] (7/8) Epoch 15, batch 17650, loss[loss=0.1474, simple_loss=0.2199, pruned_loss=0.03747, over 4762.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02988, over 971046.87 frames.], batch size: 16, lr: 1.47e-04 +2022-05-08 10:35:11,435 INFO [train.py:715] (7/8) Epoch 15, batch 17700, loss[loss=0.15, simple_loss=0.2405, pruned_loss=0.02971, over 4985.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2084, pruned_loss=0.02987, over 970912.46 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 10:35:50,323 INFO [train.py:715] (7/8) Epoch 15, batch 17750, loss[loss=0.1322, simple_loss=0.2149, pruned_loss=0.02481, over 4775.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03005, over 970929.99 frames.], batch size: 18, lr: 1.47e-04 +2022-05-08 10:36:28,684 INFO [train.py:715] (7/8) Epoch 15, batch 17800, loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02971, over 4980.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02989, over 971589.46 frames.], batch size: 25, lr: 1.47e-04 +2022-05-08 10:37:07,666 INFO [train.py:715] (7/8) Epoch 15, batch 17850, loss[loss=0.1175, simple_loss=0.1953, pruned_loss=0.01982, over 4823.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02992, over 971323.94 frames.], batch size: 13, lr: 1.47e-04 +2022-05-08 10:37:46,657 INFO [train.py:715] (7/8) Epoch 15, batch 17900, loss[loss=0.1512, simple_loss=0.2203, pruned_loss=0.04101, over 4908.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2088, pruned_loss=0.02997, over 971646.63 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:38:25,488 INFO [train.py:715] (7/8) Epoch 15, batch 17950, loss[loss=0.1293, simple_loss=0.2037, pruned_loss=0.02741, over 4914.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2088, pruned_loss=0.02994, over 971738.97 frames.], batch size: 17, lr: 1.47e-04 +2022-05-08 10:39:03,819 INFO [train.py:715] (7/8) Epoch 15, batch 18000, loss[loss=0.159, simple_loss=0.2296, pruned_loss=0.04418, over 4983.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2091, pruned_loss=0.0299, over 971797.13 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:39:03,820 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 10:39:13,329 INFO [train.py:742] (7/8) Epoch 15, validation: loss=0.1048, simple_loss=0.1885, pruned_loss=0.01059, over 914524.00 frames. +2022-05-08 10:39:51,810 INFO [train.py:715] (7/8) Epoch 15, batch 18050, loss[loss=0.1368, simple_loss=0.2116, pruned_loss=0.03103, over 4967.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2087, pruned_loss=0.02974, over 971786.10 frames.], batch size: 15, lr: 1.47e-04 +2022-05-08 10:40:30,479 INFO [train.py:715] (7/8) Epoch 15, batch 18100, loss[loss=0.1254, simple_loss=0.2015, pruned_loss=0.02466, over 4781.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02992, over 972108.71 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 10:41:09,235 INFO [train.py:715] (7/8) Epoch 15, batch 18150, loss[loss=0.1145, simple_loss=0.2004, pruned_loss=0.01428, over 4833.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03004, over 972211.57 frames.], batch size: 26, lr: 1.46e-04 +2022-05-08 10:41:47,122 INFO [train.py:715] (7/8) Epoch 15, batch 18200, loss[loss=0.1294, simple_loss=0.195, pruned_loss=0.03188, over 4944.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03013, over 971660.10 frames.], batch size: 29, lr: 1.46e-04 +2022-05-08 10:42:25,783 INFO [train.py:715] (7/8) Epoch 15, batch 18250, loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03586, over 4936.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02997, over 971483.79 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 10:43:04,445 INFO [train.py:715] (7/8) Epoch 15, batch 18300, loss[loss=0.146, simple_loss=0.2217, pruned_loss=0.03511, over 4824.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2091, pruned_loss=0.02996, over 971612.16 frames.], batch size: 26, lr: 1.46e-04 +2022-05-08 10:43:42,534 INFO [train.py:715] (7/8) Epoch 15, batch 18350, loss[loss=0.1203, simple_loss=0.2015, pruned_loss=0.01952, over 4843.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2089, pruned_loss=0.02969, over 971728.24 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 10:44:21,121 INFO [train.py:715] (7/8) Epoch 15, batch 18400, loss[loss=0.124, simple_loss=0.196, pruned_loss=0.02603, over 4926.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2089, pruned_loss=0.02991, over 971866.28 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 10:44:59,618 INFO [train.py:715] (7/8) Epoch 15, batch 18450, loss[loss=0.1445, simple_loss=0.2093, pruned_loss=0.03986, over 4963.00 frames.], tot_loss[loss=0.1342, simple_loss=0.209, pruned_loss=0.02966, over 972539.09 frames.], batch size: 31, lr: 1.46e-04 +2022-05-08 10:45:38,880 INFO [train.py:715] (7/8) Epoch 15, batch 18500, loss[loss=0.1129, simple_loss=0.1816, pruned_loss=0.02209, over 4748.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2087, pruned_loss=0.0298, over 972454.34 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 10:46:17,374 INFO [train.py:715] (7/8) Epoch 15, batch 18550, loss[loss=0.1455, simple_loss=0.2244, pruned_loss=0.03331, over 4800.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2088, pruned_loss=0.02999, over 972961.80 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 10:46:55,963 INFO [train.py:715] (7/8) Epoch 15, batch 18600, loss[loss=0.1271, simple_loss=0.1977, pruned_loss=0.0282, over 4771.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2088, pruned_loss=0.02975, over 972606.03 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 10:47:34,880 INFO [train.py:715] (7/8) Epoch 15, batch 18650, loss[loss=0.1399, simple_loss=0.2156, pruned_loss=0.03212, over 4785.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02972, over 972495.57 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 10:48:13,536 INFO [train.py:715] (7/8) Epoch 15, batch 18700, loss[loss=0.1519, simple_loss=0.2337, pruned_loss=0.03503, over 4964.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02971, over 973742.80 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 10:48:52,385 INFO [train.py:715] (7/8) Epoch 15, batch 18750, loss[loss=0.1181, simple_loss=0.194, pruned_loss=0.02105, over 4826.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.03002, over 973396.04 frames.], batch size: 27, lr: 1.46e-04 +2022-05-08 10:49:31,674 INFO [train.py:715] (7/8) Epoch 15, batch 18800, loss[loss=0.1525, simple_loss=0.2238, pruned_loss=0.04057, over 4954.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03014, over 973597.68 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 10:50:10,917 INFO [train.py:715] (7/8) Epoch 15, batch 18850, loss[loss=0.1302, simple_loss=0.2129, pruned_loss=0.0238, over 4742.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03004, over 973343.90 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 10:50:49,338 INFO [train.py:715] (7/8) Epoch 15, batch 18900, loss[loss=0.128, simple_loss=0.2046, pruned_loss=0.02567, over 4820.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2086, pruned_loss=0.03046, over 973237.83 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 10:51:28,550 INFO [train.py:715] (7/8) Epoch 15, batch 18950, loss[loss=0.1541, simple_loss=0.2225, pruned_loss=0.0428, over 4969.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03045, over 974191.97 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 10:52:07,868 INFO [train.py:715] (7/8) Epoch 15, batch 19000, loss[loss=0.1337, simple_loss=0.2151, pruned_loss=0.02617, over 4890.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02998, over 973927.25 frames.], batch size: 22, lr: 1.46e-04 +2022-05-08 10:52:46,222 INFO [train.py:715] (7/8) Epoch 15, batch 19050, loss[loss=0.1575, simple_loss=0.2146, pruned_loss=0.05021, over 4784.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02988, over 973288.66 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 10:53:25,395 INFO [train.py:715] (7/8) Epoch 15, batch 19100, loss[loss=0.1108, simple_loss=0.1841, pruned_loss=0.01872, over 4962.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02935, over 973586.76 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 10:54:03,697 INFO [train.py:715] (7/8) Epoch 15, batch 19150, loss[loss=0.1441, simple_loss=0.2158, pruned_loss=0.03622, over 4733.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02938, over 973054.04 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 10:54:41,930 INFO [train.py:715] (7/8) Epoch 15, batch 19200, loss[loss=0.1498, simple_loss=0.2287, pruned_loss=0.03547, over 4884.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2072, pruned_loss=0.02969, over 972831.53 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 10:55:19,946 INFO [train.py:715] (7/8) Epoch 15, batch 19250, loss[loss=0.165, simple_loss=0.2248, pruned_loss=0.05258, over 4872.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02952, over 973067.53 frames.], batch size: 32, lr: 1.46e-04 +2022-05-08 10:55:58,064 INFO [train.py:715] (7/8) Epoch 15, batch 19300, loss[loss=0.1411, simple_loss=0.2207, pruned_loss=0.03077, over 4876.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03031, over 971874.31 frames.], batch size: 22, lr: 1.46e-04 +2022-05-08 10:56:36,950 INFO [train.py:715] (7/8) Epoch 15, batch 19350, loss[loss=0.1141, simple_loss=0.1971, pruned_loss=0.01559, over 4817.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2083, pruned_loss=0.0296, over 972507.45 frames.], batch size: 27, lr: 1.46e-04 +2022-05-08 10:57:14,727 INFO [train.py:715] (7/8) Epoch 15, batch 19400, loss[loss=0.1293, simple_loss=0.2089, pruned_loss=0.02483, over 4758.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2083, pruned_loss=0.02981, over 971908.77 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 10:57:53,613 INFO [train.py:715] (7/8) Epoch 15, batch 19450, loss[loss=0.1342, simple_loss=0.2161, pruned_loss=0.02617, over 4892.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2086, pruned_loss=0.03001, over 971983.42 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 10:58:31,636 INFO [train.py:715] (7/8) Epoch 15, batch 19500, loss[loss=0.1225, simple_loss=0.1973, pruned_loss=0.02384, over 4934.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2085, pruned_loss=0.02982, over 972260.25 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 10:59:09,770 INFO [train.py:715] (7/8) Epoch 15, batch 19550, loss[loss=0.1377, simple_loss=0.2117, pruned_loss=0.03186, over 4961.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2085, pruned_loss=0.02963, over 971495.96 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 10:59:48,218 INFO [train.py:715] (7/8) Epoch 15, batch 19600, loss[loss=0.1607, simple_loss=0.238, pruned_loss=0.04172, over 4986.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2088, pruned_loss=0.02964, over 971270.22 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 11:00:26,255 INFO [train.py:715] (7/8) Epoch 15, batch 19650, loss[loss=0.1184, simple_loss=0.1938, pruned_loss=0.02155, over 4915.00 frames.], tot_loss[loss=0.1344, simple_loss=0.209, pruned_loss=0.02992, over 971780.37 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:01:05,292 INFO [train.py:715] (7/8) Epoch 15, batch 19700, loss[loss=0.1325, simple_loss=0.2121, pruned_loss=0.02646, over 4983.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02991, over 972214.14 frames.], batch size: 25, lr: 1.46e-04 +2022-05-08 11:01:42,988 INFO [train.py:715] (7/8) Epoch 15, batch 19750, loss[loss=0.1634, simple_loss=0.2241, pruned_loss=0.05129, over 4856.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2086, pruned_loss=0.02993, over 972832.57 frames.], batch size: 32, lr: 1.46e-04 +2022-05-08 11:02:21,391 INFO [train.py:715] (7/8) Epoch 15, batch 19800, loss[loss=0.1164, simple_loss=0.1946, pruned_loss=0.01908, over 4985.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02993, over 972439.29 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 11:02:59,698 INFO [train.py:715] (7/8) Epoch 15, batch 19850, loss[loss=0.1238, simple_loss=0.1987, pruned_loss=0.02439, over 4918.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03037, over 971522.86 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 11:03:37,785 INFO [train.py:715] (7/8) Epoch 15, batch 19900, loss[loss=0.1309, simple_loss=0.2065, pruned_loss=0.02761, over 4917.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.02999, over 971685.23 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 11:04:16,957 INFO [train.py:715] (7/8) Epoch 15, batch 19950, loss[loss=0.1451, simple_loss=0.217, pruned_loss=0.0366, over 4778.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03061, over 971330.15 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 11:04:55,169 INFO [train.py:715] (7/8) Epoch 15, batch 20000, loss[loss=0.1243, simple_loss=0.1911, pruned_loss=0.02875, over 4745.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03053, over 972233.84 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:05:33,567 INFO [train.py:715] (7/8) Epoch 15, batch 20050, loss[loss=0.1461, simple_loss=0.2245, pruned_loss=0.03387, over 4772.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03081, over 971897.94 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:06:11,838 INFO [train.py:715] (7/8) Epoch 15, batch 20100, loss[loss=0.109, simple_loss=0.1741, pruned_loss=0.02193, over 4863.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2089, pruned_loss=0.03044, over 973014.71 frames.], batch size: 13, lr: 1.46e-04 +2022-05-08 11:06:50,118 INFO [train.py:715] (7/8) Epoch 15, batch 20150, loss[loss=0.1376, simple_loss=0.2202, pruned_loss=0.02755, over 4806.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03017, over 972782.39 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 11:07:28,124 INFO [train.py:715] (7/8) Epoch 15, batch 20200, loss[loss=0.1371, simple_loss=0.2149, pruned_loss=0.02964, over 4791.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03002, over 973008.57 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:08:05,819 INFO [train.py:715] (7/8) Epoch 15, batch 20250, loss[loss=0.1537, simple_loss=0.2201, pruned_loss=0.04368, over 4923.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2092, pruned_loss=0.03072, over 972753.65 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 11:08:44,517 INFO [train.py:715] (7/8) Epoch 15, batch 20300, loss[loss=0.1703, simple_loss=0.2433, pruned_loss=0.04862, over 4868.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.0301, over 972388.41 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:09:22,705 INFO [train.py:715] (7/8) Epoch 15, batch 20350, loss[loss=0.1394, simple_loss=0.2131, pruned_loss=0.03281, over 4986.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03012, over 972317.94 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 11:10:01,091 INFO [train.py:715] (7/8) Epoch 15, batch 20400, loss[loss=0.129, simple_loss=0.2007, pruned_loss=0.02868, over 4988.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02996, over 972374.49 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:10:38,947 INFO [train.py:715] (7/8) Epoch 15, batch 20450, loss[loss=0.1367, simple_loss=0.2195, pruned_loss=0.02696, over 4956.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03005, over 971911.50 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 11:11:17,697 INFO [train.py:715] (7/8) Epoch 15, batch 20500, loss[loss=0.1295, simple_loss=0.2033, pruned_loss=0.02789, over 4781.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.02998, over 972210.42 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:11:55,871 INFO [train.py:715] (7/8) Epoch 15, batch 20550, loss[loss=0.1481, simple_loss=0.21, pruned_loss=0.04316, over 4853.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02997, over 971324.71 frames.], batch size: 30, lr: 1.46e-04 +2022-05-08 11:12:33,920 INFO [train.py:715] (7/8) Epoch 15, batch 20600, loss[loss=0.1329, simple_loss=0.2018, pruned_loss=0.032, over 4945.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2086, pruned_loss=0.02997, over 971914.17 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 11:13:12,979 INFO [train.py:715] (7/8) Epoch 15, batch 20650, loss[loss=0.1353, simple_loss=0.2168, pruned_loss=0.02689, over 4815.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2079, pruned_loss=0.02937, over 971914.18 frames.], batch size: 27, lr: 1.46e-04 +2022-05-08 11:13:51,737 INFO [train.py:715] (7/8) Epoch 15, batch 20700, loss[loss=0.108, simple_loss=0.1869, pruned_loss=0.01455, over 4810.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02913, over 972657.46 frames.], batch size: 13, lr: 1.46e-04 +2022-05-08 11:14:31,082 INFO [train.py:715] (7/8) Epoch 15, batch 20750, loss[loss=0.1043, simple_loss=0.1721, pruned_loss=0.01823, over 4829.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02934, over 972834.49 frames.], batch size: 13, lr: 1.46e-04 +2022-05-08 11:15:09,386 INFO [train.py:715] (7/8) Epoch 15, batch 20800, loss[loss=0.1234, simple_loss=0.2012, pruned_loss=0.02282, over 4934.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02923, over 972707.44 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 11:15:48,762 INFO [train.py:715] (7/8) Epoch 15, batch 20850, loss[loss=0.1392, simple_loss=0.2093, pruned_loss=0.03448, over 4895.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02935, over 971594.08 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 11:16:27,990 INFO [train.py:715] (7/8) Epoch 15, batch 20900, loss[loss=0.1237, simple_loss=0.2037, pruned_loss=0.02188, over 4820.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02939, over 971745.92 frames.], batch size: 26, lr: 1.46e-04 +2022-05-08 11:17:06,242 INFO [train.py:715] (7/8) Epoch 15, batch 20950, loss[loss=0.1459, simple_loss=0.2236, pruned_loss=0.03406, over 4870.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03025, over 972296.25 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:17:45,526 INFO [train.py:715] (7/8) Epoch 15, batch 21000, loss[loss=0.1296, simple_loss=0.2038, pruned_loss=0.02772, over 4738.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03056, over 973182.96 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:17:45,526 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 11:17:56,038 INFO [train.py:742] (7/8) Epoch 15, validation: loss=0.1051, simple_loss=0.1887, pruned_loss=0.01075, over 914524.00 frames. +2022-05-08 11:18:35,286 INFO [train.py:715] (7/8) Epoch 15, batch 21050, loss[loss=0.09675, simple_loss=0.1707, pruned_loss=0.01142, over 4834.00 frames.], tot_loss[loss=0.1351, simple_loss=0.209, pruned_loss=0.03057, over 973023.91 frames.], batch size: 26, lr: 1.46e-04 +2022-05-08 11:19:14,770 INFO [train.py:715] (7/8) Epoch 15, batch 21100, loss[loss=0.1578, simple_loss=0.2181, pruned_loss=0.04872, over 4791.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.0302, over 973393.61 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:19:53,788 INFO [train.py:715] (7/8) Epoch 15, batch 21150, loss[loss=0.1592, simple_loss=0.2276, pruned_loss=0.04535, over 4855.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.0302, over 973164.48 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 11:20:32,267 INFO [train.py:715] (7/8) Epoch 15, batch 21200, loss[loss=0.1405, simple_loss=0.2139, pruned_loss=0.03354, over 4815.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.03011, over 973084.15 frames.], batch size: 13, lr: 1.46e-04 +2022-05-08 11:21:11,106 INFO [train.py:715] (7/8) Epoch 15, batch 21250, loss[loss=0.1328, simple_loss=0.2046, pruned_loss=0.03047, over 4799.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03027, over 972430.80 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:21:49,145 INFO [train.py:715] (7/8) Epoch 15, batch 21300, loss[loss=0.1339, simple_loss=0.2005, pruned_loss=0.03369, over 4817.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03011, over 971944.99 frames.], batch size: 13, lr: 1.46e-04 +2022-05-08 11:22:26,788 INFO [train.py:715] (7/8) Epoch 15, batch 21350, loss[loss=0.1215, simple_loss=0.2027, pruned_loss=0.02012, over 4781.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.0295, over 972867.93 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:23:05,096 INFO [train.py:715] (7/8) Epoch 15, batch 21400, loss[loss=0.1369, simple_loss=0.2123, pruned_loss=0.03073, over 4963.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02972, over 973096.20 frames.], batch size: 35, lr: 1.46e-04 +2022-05-08 11:23:43,352 INFO [train.py:715] (7/8) Epoch 15, batch 21450, loss[loss=0.1173, simple_loss=0.1964, pruned_loss=0.01907, over 4892.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02967, over 973090.49 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 11:24:21,357 INFO [train.py:715] (7/8) Epoch 15, batch 21500, loss[loss=0.1566, simple_loss=0.2281, pruned_loss=0.04248, over 4749.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02971, over 972552.97 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:24:59,647 INFO [train.py:715] (7/8) Epoch 15, batch 21550, loss[loss=0.1538, simple_loss=0.2305, pruned_loss=0.0385, over 4766.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.02999, over 972460.14 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:25:38,151 INFO [train.py:715] (7/8) Epoch 15, batch 21600, loss[loss=0.1233, simple_loss=0.2042, pruned_loss=0.02125, over 4981.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03009, over 972718.40 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:26:16,020 INFO [train.py:715] (7/8) Epoch 15, batch 21650, loss[loss=0.1477, simple_loss=0.209, pruned_loss=0.0432, over 4901.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02969, over 972740.80 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:26:54,239 INFO [train.py:715] (7/8) Epoch 15, batch 21700, loss[loss=0.1253, simple_loss=0.1897, pruned_loss=0.03041, over 4899.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02979, over 973369.51 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 11:27:32,377 INFO [train.py:715] (7/8) Epoch 15, batch 21750, loss[loss=0.1329, simple_loss=0.21, pruned_loss=0.02788, over 4904.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03031, over 972798.61 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:28:10,476 INFO [train.py:715] (7/8) Epoch 15, batch 21800, loss[loss=0.1384, simple_loss=0.2181, pruned_loss=0.02936, over 4742.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.0301, over 972916.20 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:28:48,403 INFO [train.py:715] (7/8) Epoch 15, batch 21850, loss[loss=0.1244, simple_loss=0.2019, pruned_loss=0.02341, over 4774.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03029, over 972734.34 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:29:29,585 INFO [train.py:715] (7/8) Epoch 15, batch 21900, loss[loss=0.1548, simple_loss=0.2305, pruned_loss=0.03953, over 4896.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.03003, over 972620.92 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:30:08,736 INFO [train.py:715] (7/8) Epoch 15, batch 21950, loss[loss=0.1517, simple_loss=0.239, pruned_loss=0.03217, over 4976.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2071, pruned_loss=0.02986, over 972226.54 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:30:47,269 INFO [train.py:715] (7/8) Epoch 15, batch 22000, loss[loss=0.1302, simple_loss=0.1974, pruned_loss=0.03149, over 4829.00 frames.], tot_loss[loss=0.1334, simple_loss=0.207, pruned_loss=0.02986, over 972391.44 frames.], batch size: 30, lr: 1.46e-04 +2022-05-08 11:31:25,790 INFO [train.py:715] (7/8) Epoch 15, batch 22050, loss[loss=0.1361, simple_loss=0.1989, pruned_loss=0.03667, over 4965.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2067, pruned_loss=0.02999, over 972628.99 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:32:05,113 INFO [train.py:715] (7/8) Epoch 15, batch 22100, loss[loss=0.1138, simple_loss=0.1828, pruned_loss=0.02243, over 4982.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2069, pruned_loss=0.02993, over 972301.44 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 11:32:43,915 INFO [train.py:715] (7/8) Epoch 15, batch 22150, loss[loss=0.1645, simple_loss=0.2285, pruned_loss=0.0503, over 4854.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2077, pruned_loss=0.03053, over 972253.08 frames.], batch size: 30, lr: 1.46e-04 +2022-05-08 11:33:22,285 INFO [train.py:715] (7/8) Epoch 15, batch 22200, loss[loss=0.1375, simple_loss=0.2228, pruned_loss=0.02608, over 4854.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03019, over 971925.08 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 11:34:01,327 INFO [train.py:715] (7/8) Epoch 15, batch 22250, loss[loss=0.1567, simple_loss=0.2287, pruned_loss=0.04232, over 4860.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03026, over 972067.37 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 11:34:40,258 INFO [train.py:715] (7/8) Epoch 15, batch 22300, loss[loss=0.1423, simple_loss=0.2133, pruned_loss=0.03571, over 4864.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02996, over 972928.18 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 11:35:18,795 INFO [train.py:715] (7/8) Epoch 15, batch 22350, loss[loss=0.1278, simple_loss=0.2058, pruned_loss=0.0249, over 4692.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02965, over 972257.72 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:35:57,366 INFO [train.py:715] (7/8) Epoch 15, batch 22400, loss[loss=0.1622, simple_loss=0.2328, pruned_loss=0.04581, over 4911.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02957, over 972014.39 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 11:36:36,629 INFO [train.py:715] (7/8) Epoch 15, batch 22450, loss[loss=0.1169, simple_loss=0.1911, pruned_loss=0.02132, over 4910.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02925, over 972844.73 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:37:15,518 INFO [train.py:715] (7/8) Epoch 15, batch 22500, loss[loss=0.1371, simple_loss=0.2002, pruned_loss=0.03698, over 4771.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02942, over 972393.64 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:37:54,242 INFO [train.py:715] (7/8) Epoch 15, batch 22550, loss[loss=0.1225, simple_loss=0.1863, pruned_loss=0.02939, over 4817.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02953, over 973032.46 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:38:32,798 INFO [train.py:715] (7/8) Epoch 15, batch 22600, loss[loss=0.1432, simple_loss=0.2098, pruned_loss=0.03831, over 4695.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02944, over 972094.49 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:39:11,718 INFO [train.py:715] (7/8) Epoch 15, batch 22650, loss[loss=0.1368, simple_loss=0.2026, pruned_loss=0.03547, over 4872.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02985, over 972495.41 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:39:50,375 INFO [train.py:715] (7/8) Epoch 15, batch 22700, loss[loss=0.1174, simple_loss=0.188, pruned_loss=0.02341, over 4971.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02958, over 972557.79 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 11:40:29,112 INFO [train.py:715] (7/8) Epoch 15, batch 22750, loss[loss=0.1268, simple_loss=0.2159, pruned_loss=0.01883, over 4798.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.0298, over 971954.71 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:41:08,456 INFO [train.py:715] (7/8) Epoch 15, batch 22800, loss[loss=0.1244, simple_loss=0.2077, pruned_loss=0.0206, over 4875.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02953, over 972629.16 frames.], batch size: 16, lr: 1.46e-04 +2022-05-08 11:41:47,281 INFO [train.py:715] (7/8) Epoch 15, batch 22850, loss[loss=0.1177, simple_loss=0.188, pruned_loss=0.02365, over 4905.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02937, over 973126.32 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 11:42:26,023 INFO [train.py:715] (7/8) Epoch 15, batch 22900, loss[loss=0.1508, simple_loss=0.2205, pruned_loss=0.0405, over 4774.00 frames.], tot_loss[loss=0.134, simple_loss=0.2085, pruned_loss=0.02975, over 971310.50 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:43:05,209 INFO [train.py:715] (7/8) Epoch 15, batch 22950, loss[loss=0.1067, simple_loss=0.1854, pruned_loss=0.01394, over 4847.00 frames.], tot_loss[loss=0.133, simple_loss=0.2076, pruned_loss=0.02925, over 971548.50 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 11:43:43,828 INFO [train.py:715] (7/8) Epoch 15, batch 23000, loss[loss=0.1324, simple_loss=0.2015, pruned_loss=0.03168, over 4763.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02931, over 971744.47 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:44:22,238 INFO [train.py:715] (7/8) Epoch 15, batch 23050, loss[loss=0.132, simple_loss=0.2147, pruned_loss=0.02466, over 4897.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02952, over 972270.84 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:45:00,627 INFO [train.py:715] (7/8) Epoch 15, batch 23100, loss[loss=0.1321, simple_loss=0.2079, pruned_loss=0.02814, over 4820.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02939, over 970994.97 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 11:45:39,480 INFO [train.py:715] (7/8) Epoch 15, batch 23150, loss[loss=0.124, simple_loss=0.2019, pruned_loss=0.02303, over 4806.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.0297, over 971668.32 frames.], batch size: 25, lr: 1.46e-04 +2022-05-08 11:46:17,452 INFO [train.py:715] (7/8) Epoch 15, batch 23200, loss[loss=0.1115, simple_loss=0.1849, pruned_loss=0.01904, over 4774.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.0303, over 972012.95 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 11:46:55,708 INFO [train.py:715] (7/8) Epoch 15, batch 23250, loss[loss=0.1221, simple_loss=0.1989, pruned_loss=0.02259, over 4769.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03019, over 971558.59 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:47:34,387 INFO [train.py:715] (7/8) Epoch 15, batch 23300, loss[loss=0.1252, simple_loss=0.2068, pruned_loss=0.02174, over 4827.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02978, over 971799.38 frames.], batch size: 27, lr: 1.46e-04 +2022-05-08 11:48:12,428 INFO [train.py:715] (7/8) Epoch 15, batch 23350, loss[loss=0.1442, simple_loss=0.2338, pruned_loss=0.02734, over 4772.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02995, over 971648.32 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 11:48:50,740 INFO [train.py:715] (7/8) Epoch 15, batch 23400, loss[loss=0.126, simple_loss=0.1957, pruned_loss=0.0281, over 4977.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2069, pruned_loss=0.02991, over 972021.44 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 11:49:28,562 INFO [train.py:715] (7/8) Epoch 15, batch 23450, loss[loss=0.1352, simple_loss=0.2143, pruned_loss=0.02808, over 4843.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.02995, over 972523.33 frames.], batch size: 13, lr: 1.46e-04 +2022-05-08 11:50:07,085 INFO [train.py:715] (7/8) Epoch 15, batch 23500, loss[loss=0.132, simple_loss=0.2087, pruned_loss=0.02766, over 4837.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.0297, over 971657.23 frames.], batch size: 32, lr: 1.46e-04 +2022-05-08 11:50:44,859 INFO [train.py:715] (7/8) Epoch 15, batch 23550, loss[loss=0.1228, simple_loss=0.1938, pruned_loss=0.02592, over 4845.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02968, over 971820.09 frames.], batch size: 13, lr: 1.46e-04 +2022-05-08 11:51:22,854 INFO [train.py:715] (7/8) Epoch 15, batch 23600, loss[loss=0.1319, simple_loss=0.2025, pruned_loss=0.03066, over 4934.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2074, pruned_loss=0.03017, over 972089.00 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 11:52:01,278 INFO [train.py:715] (7/8) Epoch 15, batch 23650, loss[loss=0.1077, simple_loss=0.185, pruned_loss=0.01513, over 4803.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2073, pruned_loss=0.02994, over 972311.49 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 11:52:39,166 INFO [train.py:715] (7/8) Epoch 15, batch 23700, loss[loss=0.1247, simple_loss=0.1916, pruned_loss=0.02886, over 4822.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2071, pruned_loss=0.02999, over 972047.36 frames.], batch size: 25, lr: 1.46e-04 +2022-05-08 11:53:17,232 INFO [train.py:715] (7/8) Epoch 15, batch 23750, loss[loss=0.1243, simple_loss=0.1983, pruned_loss=0.02511, over 4954.00 frames.], tot_loss[loss=0.1335, simple_loss=0.207, pruned_loss=0.02998, over 972240.99 frames.], batch size: 35, lr: 1.46e-04 +2022-05-08 11:53:55,058 INFO [train.py:715] (7/8) Epoch 15, batch 23800, loss[loss=0.1314, simple_loss=0.1993, pruned_loss=0.03175, over 4983.00 frames.], tot_loss[loss=0.1334, simple_loss=0.207, pruned_loss=0.02993, over 972187.32 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 11:54:33,046 INFO [train.py:715] (7/8) Epoch 15, batch 23850, loss[loss=0.1608, simple_loss=0.2284, pruned_loss=0.0466, over 4853.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02976, over 972496.28 frames.], batch size: 20, lr: 1.46e-04 +2022-05-08 11:55:11,363 INFO [train.py:715] (7/8) Epoch 15, batch 23900, loss[loss=0.112, simple_loss=0.1916, pruned_loss=0.01619, over 4880.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02965, over 972933.74 frames.], batch size: 22, lr: 1.46e-04 +2022-05-08 11:55:48,900 INFO [train.py:715] (7/8) Epoch 15, batch 23950, loss[loss=0.1304, simple_loss=0.2092, pruned_loss=0.02585, over 4925.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2075, pruned_loss=0.03008, over 973123.12 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 11:56:27,447 INFO [train.py:715] (7/8) Epoch 15, batch 24000, loss[loss=0.128, simple_loss=0.1816, pruned_loss=0.03719, over 4806.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.0304, over 973486.64 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 11:56:27,448 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 11:56:37,034 INFO [train.py:742] (7/8) Epoch 15, validation: loss=0.105, simple_loss=0.1886, pruned_loss=0.01071, over 914524.00 frames. +2022-05-08 11:57:15,621 INFO [train.py:715] (7/8) Epoch 15, batch 24050, loss[loss=0.1233, simple_loss=0.2088, pruned_loss=0.01887, over 4926.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03043, over 973769.53 frames.], batch size: 29, lr: 1.46e-04 +2022-05-08 11:57:54,186 INFO [train.py:715] (7/8) Epoch 15, batch 24100, loss[loss=0.1322, simple_loss=0.2058, pruned_loss=0.02932, over 4934.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02989, over 973549.92 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 11:58:32,191 INFO [train.py:715] (7/8) Epoch 15, batch 24150, loss[loss=0.1396, simple_loss=0.22, pruned_loss=0.0296, over 4845.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02968, over 972776.68 frames.], batch size: 30, lr: 1.46e-04 +2022-05-08 11:59:10,406 INFO [train.py:715] (7/8) Epoch 15, batch 24200, loss[loss=0.1289, simple_loss=0.1956, pruned_loss=0.03108, over 4834.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02997, over 972970.90 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 11:59:48,420 INFO [train.py:715] (7/8) Epoch 15, batch 24250, loss[loss=0.1192, simple_loss=0.1973, pruned_loss=0.02054, over 4761.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2086, pruned_loss=0.0301, over 972594.68 frames.], batch size: 19, lr: 1.46e-04 +2022-05-08 12:00:26,747 INFO [train.py:715] (7/8) Epoch 15, batch 24300, loss[loss=0.1348, simple_loss=0.2026, pruned_loss=0.03348, over 4759.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.02966, over 973343.62 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 12:01:03,897 INFO [train.py:715] (7/8) Epoch 15, batch 24350, loss[loss=0.1092, simple_loss=0.1875, pruned_loss=0.01544, over 4929.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02989, over 973696.27 frames.], batch size: 23, lr: 1.46e-04 +2022-05-08 12:01:42,321 INFO [train.py:715] (7/8) Epoch 15, batch 24400, loss[loss=0.1541, simple_loss=0.2351, pruned_loss=0.03661, over 4931.00 frames.], tot_loss[loss=0.1345, simple_loss=0.209, pruned_loss=0.03002, over 972935.45 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 12:02:20,845 INFO [train.py:715] (7/8) Epoch 15, batch 24450, loss[loss=0.1183, simple_loss=0.2018, pruned_loss=0.01737, over 4931.00 frames.], tot_loss[loss=0.1344, simple_loss=0.209, pruned_loss=0.02987, over 973566.01 frames.], batch size: 29, lr: 1.46e-04 +2022-05-08 12:02:58,825 INFO [train.py:715] (7/8) Epoch 15, batch 24500, loss[loss=0.1511, simple_loss=0.2256, pruned_loss=0.03829, over 4956.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2092, pruned_loss=0.03003, over 973441.76 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 12:03:36,485 INFO [train.py:715] (7/8) Epoch 15, batch 24550, loss[loss=0.1464, simple_loss=0.2299, pruned_loss=0.03144, over 4966.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02988, over 973537.61 frames.], batch size: 35, lr: 1.46e-04 +2022-05-08 12:04:14,727 INFO [train.py:715] (7/8) Epoch 15, batch 24600, loss[loss=0.1539, simple_loss=0.2362, pruned_loss=0.03582, over 4948.00 frames.], tot_loss[loss=0.1347, simple_loss=0.209, pruned_loss=0.0302, over 974019.67 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 12:04:53,493 INFO [train.py:715] (7/8) Epoch 15, batch 24650, loss[loss=0.1271, simple_loss=0.1924, pruned_loss=0.03085, over 4939.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03012, over 973654.64 frames.], batch size: 29, lr: 1.46e-04 +2022-05-08 12:05:31,175 INFO [train.py:715] (7/8) Epoch 15, batch 24700, loss[loss=0.1238, simple_loss=0.1981, pruned_loss=0.02473, over 4839.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02984, over 973761.18 frames.], batch size: 13, lr: 1.46e-04 +2022-05-08 12:06:09,580 INFO [train.py:715] (7/8) Epoch 15, batch 24750, loss[loss=0.1363, simple_loss=0.2149, pruned_loss=0.02884, over 4917.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03004, over 973140.92 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 12:06:47,909 INFO [train.py:715] (7/8) Epoch 15, batch 24800, loss[loss=0.1454, simple_loss=0.2273, pruned_loss=0.03171, over 4934.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02992, over 973490.95 frames.], batch size: 29, lr: 1.46e-04 +2022-05-08 12:07:25,659 INFO [train.py:715] (7/8) Epoch 15, batch 24850, loss[loss=0.1408, simple_loss=0.2014, pruned_loss=0.04014, over 4822.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03058, over 973763.32 frames.], batch size: 12, lr: 1.46e-04 +2022-05-08 12:08:03,592 INFO [train.py:715] (7/8) Epoch 15, batch 24900, loss[loss=0.1533, simple_loss=0.2231, pruned_loss=0.04177, over 4790.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03007, over 972909.00 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 12:08:41,838 INFO [train.py:715] (7/8) Epoch 15, batch 24950, loss[loss=0.1576, simple_loss=0.2284, pruned_loss=0.04343, over 4909.00 frames.], tot_loss[loss=0.135, simple_loss=0.209, pruned_loss=0.03043, over 973282.32 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 12:09:20,949 INFO [train.py:715] (7/8) Epoch 15, batch 25000, loss[loss=0.1283, simple_loss=0.2024, pruned_loss=0.02711, over 4946.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02989, over 973804.63 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 12:09:58,499 INFO [train.py:715] (7/8) Epoch 15, batch 25050, loss[loss=0.1261, simple_loss=0.2037, pruned_loss=0.02424, over 4968.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2093, pruned_loss=0.03026, over 973926.33 frames.], batch size: 15, lr: 1.46e-04 +2022-05-08 12:10:36,536 INFO [train.py:715] (7/8) Epoch 15, batch 25100, loss[loss=0.1053, simple_loss=0.1902, pruned_loss=0.01021, over 4976.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2092, pruned_loss=0.03014, over 972960.56 frames.], batch size: 14, lr: 1.46e-04 +2022-05-08 12:11:14,987 INFO [train.py:715] (7/8) Epoch 15, batch 25150, loss[loss=0.145, simple_loss=0.221, pruned_loss=0.03452, over 4897.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03036, over 972259.61 frames.], batch size: 17, lr: 1.46e-04 +2022-05-08 12:11:53,003 INFO [train.py:715] (7/8) Epoch 15, batch 25200, loss[loss=0.1383, simple_loss=0.2101, pruned_loss=0.03326, over 4867.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2092, pruned_loss=0.03016, over 972054.70 frames.], batch size: 22, lr: 1.46e-04 +2022-05-08 12:12:30,795 INFO [train.py:715] (7/8) Epoch 15, batch 25250, loss[loss=0.1436, simple_loss=0.2199, pruned_loss=0.03364, over 4793.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03021, over 971869.43 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 12:13:09,120 INFO [train.py:715] (7/8) Epoch 15, batch 25300, loss[loss=0.1203, simple_loss=0.194, pruned_loss=0.02331, over 4966.00 frames.], tot_loss[loss=0.135, simple_loss=0.2084, pruned_loss=0.03083, over 971562.24 frames.], batch size: 24, lr: 1.46e-04 +2022-05-08 12:13:47,201 INFO [train.py:715] (7/8) Epoch 15, batch 25350, loss[loss=0.1445, simple_loss=0.2142, pruned_loss=0.03738, over 4934.00 frames.], tot_loss[loss=0.135, simple_loss=0.2085, pruned_loss=0.03075, over 971583.24 frames.], batch size: 39, lr: 1.46e-04 +2022-05-08 12:14:24,744 INFO [train.py:715] (7/8) Epoch 15, batch 25400, loss[loss=0.1235, simple_loss=0.2004, pruned_loss=0.02324, over 4804.00 frames.], tot_loss[loss=0.135, simple_loss=0.2087, pruned_loss=0.03066, over 971787.63 frames.], batch size: 21, lr: 1.46e-04 +2022-05-08 12:15:02,818 INFO [train.py:715] (7/8) Epoch 15, batch 25450, loss[loss=0.136, simple_loss=0.2165, pruned_loss=0.0278, over 4983.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03025, over 972172.32 frames.], batch size: 31, lr: 1.46e-04 +2022-05-08 12:15:41,206 INFO [train.py:715] (7/8) Epoch 15, batch 25500, loss[loss=0.1425, simple_loss=0.2125, pruned_loss=0.03628, over 4766.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03051, over 972242.06 frames.], batch size: 18, lr: 1.46e-04 +2022-05-08 12:16:18,763 INFO [train.py:715] (7/8) Epoch 15, batch 25550, loss[loss=0.1345, simple_loss=0.2106, pruned_loss=0.02924, over 4932.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03041, over 972544.37 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 12:16:56,913 INFO [train.py:715] (7/8) Epoch 15, batch 25600, loss[loss=0.1193, simple_loss=0.1921, pruned_loss=0.02318, over 4988.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2086, pruned_loss=0.03037, over 971809.29 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 12:17:35,537 INFO [train.py:715] (7/8) Epoch 15, batch 25650, loss[loss=0.1234, simple_loss=0.2084, pruned_loss=0.01923, over 4820.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03025, over 972525.30 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 12:18:13,815 INFO [train.py:715] (7/8) Epoch 15, batch 25700, loss[loss=0.145, simple_loss=0.2317, pruned_loss=0.02914, over 4867.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2087, pruned_loss=0.03075, over 972061.82 frames.], batch size: 20, lr: 1.45e-04 +2022-05-08 12:18:51,201 INFO [train.py:715] (7/8) Epoch 15, batch 25750, loss[loss=0.1257, simple_loss=0.1981, pruned_loss=0.02661, over 4924.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03036, over 972014.87 frames.], batch size: 29, lr: 1.45e-04 +2022-05-08 12:19:29,349 INFO [train.py:715] (7/8) Epoch 15, batch 25800, loss[loss=0.136, simple_loss=0.2184, pruned_loss=0.02677, over 4926.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02996, over 972943.45 frames.], batch size: 29, lr: 1.45e-04 +2022-05-08 12:20:07,974 INFO [train.py:715] (7/8) Epoch 15, batch 25850, loss[loss=0.1708, simple_loss=0.2448, pruned_loss=0.04836, over 4964.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02973, over 972962.29 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:20:45,417 INFO [train.py:715] (7/8) Epoch 15, batch 25900, loss[loss=0.1598, simple_loss=0.2404, pruned_loss=0.03962, over 4984.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.03002, over 972264.53 frames.], batch size: 39, lr: 1.45e-04 +2022-05-08 12:21:24,013 INFO [train.py:715] (7/8) Epoch 15, batch 25950, loss[loss=0.1258, simple_loss=0.1984, pruned_loss=0.02658, over 4708.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02992, over 972198.13 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:22:02,166 INFO [train.py:715] (7/8) Epoch 15, batch 26000, loss[loss=0.1329, simple_loss=0.2053, pruned_loss=0.03028, over 4773.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03012, over 972295.12 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 12:22:39,853 INFO [train.py:715] (7/8) Epoch 15, batch 26050, loss[loss=0.1437, simple_loss=0.2092, pruned_loss=0.03907, over 4947.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03028, over 972783.00 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 12:23:17,644 INFO [train.py:715] (7/8) Epoch 15, batch 26100, loss[loss=0.1424, simple_loss=0.2116, pruned_loss=0.03666, over 4875.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2071, pruned_loss=0.02992, over 973182.31 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 12:23:56,076 INFO [train.py:715] (7/8) Epoch 15, batch 26150, loss[loss=0.1719, simple_loss=0.2517, pruned_loss=0.04609, over 4961.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2086, pruned_loss=0.03058, over 971846.03 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 12:24:33,867 INFO [train.py:715] (7/8) Epoch 15, batch 26200, loss[loss=0.1701, simple_loss=0.2369, pruned_loss=0.05168, over 4898.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2081, pruned_loss=0.03043, over 971170.35 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 12:25:11,685 INFO [train.py:715] (7/8) Epoch 15, batch 26250, loss[loss=0.1377, simple_loss=0.2174, pruned_loss=0.029, over 4806.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03007, over 971386.87 frames.], batch size: 25, lr: 1.45e-04 +2022-05-08 12:25:50,003 INFO [train.py:715] (7/8) Epoch 15, batch 26300, loss[loss=0.1356, simple_loss=0.2106, pruned_loss=0.03031, over 4846.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03004, over 971297.58 frames.], batch size: 34, lr: 1.45e-04 +2022-05-08 12:26:28,459 INFO [train.py:715] (7/8) Epoch 15, batch 26350, loss[loss=0.1325, simple_loss=0.2109, pruned_loss=0.02708, over 4777.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03002, over 970970.85 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 12:27:06,218 INFO [train.py:715] (7/8) Epoch 15, batch 26400, loss[loss=0.1335, simple_loss=0.22, pruned_loss=0.02355, over 4815.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02976, over 971312.80 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 12:27:44,350 INFO [train.py:715] (7/8) Epoch 15, batch 26450, loss[loss=0.1376, simple_loss=0.2135, pruned_loss=0.03085, over 4911.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03039, over 972134.76 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 12:28:22,636 INFO [train.py:715] (7/8) Epoch 15, batch 26500, loss[loss=0.1371, simple_loss=0.2078, pruned_loss=0.03323, over 4834.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03028, over 972705.66 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:29:00,418 INFO [train.py:715] (7/8) Epoch 15, batch 26550, loss[loss=0.1265, simple_loss=0.2068, pruned_loss=0.0231, over 4806.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2089, pruned_loss=0.03005, over 971617.20 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 12:29:38,155 INFO [train.py:715] (7/8) Epoch 15, batch 26600, loss[loss=0.1462, simple_loss=0.2201, pruned_loss=0.03611, over 4883.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2091, pruned_loss=0.03024, over 970812.29 frames.], batch size: 39, lr: 1.45e-04 +2022-05-08 12:30:16,187 INFO [train.py:715] (7/8) Epoch 15, batch 26650, loss[loss=0.1456, simple_loss=0.2016, pruned_loss=0.04479, over 4706.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03018, over 971280.94 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:30:54,318 INFO [train.py:715] (7/8) Epoch 15, batch 26700, loss[loss=0.1279, simple_loss=0.1989, pruned_loss=0.02846, over 4783.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2089, pruned_loss=0.03035, over 971275.52 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 12:31:31,944 INFO [train.py:715] (7/8) Epoch 15, batch 26750, loss[loss=0.1624, simple_loss=0.2437, pruned_loss=0.04054, over 4953.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.02996, over 971955.92 frames.], batch size: 23, lr: 1.45e-04 +2022-05-08 12:32:10,363 INFO [train.py:715] (7/8) Epoch 15, batch 26800, loss[loss=0.138, simple_loss=0.2063, pruned_loss=0.03479, over 4783.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03005, over 971511.11 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 12:32:48,680 INFO [train.py:715] (7/8) Epoch 15, batch 26850, loss[loss=0.1352, simple_loss=0.2127, pruned_loss=0.02886, over 4751.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03056, over 971631.31 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 12:33:26,761 INFO [train.py:715] (7/8) Epoch 15, batch 26900, loss[loss=0.1224, simple_loss=0.2064, pruned_loss=0.01922, over 4889.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.0303, over 972294.28 frames.], batch size: 20, lr: 1.45e-04 +2022-05-08 12:34:04,500 INFO [train.py:715] (7/8) Epoch 15, batch 26950, loss[loss=0.1081, simple_loss=0.1707, pruned_loss=0.02273, over 4976.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03035, over 972944.89 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:34:42,584 INFO [train.py:715] (7/8) Epoch 15, batch 27000, loss[loss=0.1378, simple_loss=0.2197, pruned_loss=0.02799, over 4897.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03032, over 973437.70 frames.], batch size: 22, lr: 1.45e-04 +2022-05-08 12:34:42,584 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 12:34:52,204 INFO [train.py:742] (7/8) Epoch 15, validation: loss=0.1049, simple_loss=0.1884, pruned_loss=0.01064, over 914524.00 frames. +2022-05-08 12:35:31,296 INFO [train.py:715] (7/8) Epoch 15, batch 27050, loss[loss=0.1464, simple_loss=0.2206, pruned_loss=0.0361, over 4847.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03024, over 973041.34 frames.], batch size: 20, lr: 1.45e-04 +2022-05-08 12:36:10,022 INFO [train.py:715] (7/8) Epoch 15, batch 27100, loss[loss=0.1188, simple_loss=0.1915, pruned_loss=0.02307, over 4810.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2086, pruned_loss=0.03023, over 972998.86 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 12:36:48,673 INFO [train.py:715] (7/8) Epoch 15, batch 27150, loss[loss=0.1232, simple_loss=0.1985, pruned_loss=0.02392, over 4697.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03061, over 973058.80 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:37:26,866 INFO [train.py:715] (7/8) Epoch 15, batch 27200, loss[loss=0.1419, simple_loss=0.2171, pruned_loss=0.0333, over 4895.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.0302, over 973813.97 frames.], batch size: 22, lr: 1.45e-04 +2022-05-08 12:38:05,902 INFO [train.py:715] (7/8) Epoch 15, batch 27250, loss[loss=0.1412, simple_loss=0.2117, pruned_loss=0.0353, over 4982.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02995, over 973758.11 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:38:43,693 INFO [train.py:715] (7/8) Epoch 15, batch 27300, loss[loss=0.1329, simple_loss=0.1997, pruned_loss=0.03307, over 4773.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02977, over 973047.06 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 12:39:21,922 INFO [train.py:715] (7/8) Epoch 15, batch 27350, loss[loss=0.1245, simple_loss=0.196, pruned_loss=0.02656, over 4643.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02973, over 973236.86 frames.], batch size: 13, lr: 1.45e-04 +2022-05-08 12:40:00,096 INFO [train.py:715] (7/8) Epoch 15, batch 27400, loss[loss=0.1336, simple_loss=0.2058, pruned_loss=0.03064, over 4804.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02939, over 972962.20 frames.], batch size: 13, lr: 1.45e-04 +2022-05-08 12:40:38,394 INFO [train.py:715] (7/8) Epoch 15, batch 27450, loss[loss=0.1284, simple_loss=0.2013, pruned_loss=0.0278, over 4815.00 frames.], tot_loss[loss=0.1334, simple_loss=0.207, pruned_loss=0.02986, over 971554.05 frames.], batch size: 27, lr: 1.45e-04 +2022-05-08 12:41:16,659 INFO [train.py:715] (7/8) Epoch 15, batch 27500, loss[loss=0.1859, simple_loss=0.2548, pruned_loss=0.0585, over 4915.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02953, over 972566.90 frames.], batch size: 39, lr: 1.45e-04 +2022-05-08 12:41:54,848 INFO [train.py:715] (7/8) Epoch 15, batch 27550, loss[loss=0.1544, simple_loss=0.2227, pruned_loss=0.04306, over 4839.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02983, over 972617.62 frames.], batch size: 13, lr: 1.45e-04 +2022-05-08 12:42:33,401 INFO [train.py:715] (7/8) Epoch 15, batch 27600, loss[loss=0.1055, simple_loss=0.1805, pruned_loss=0.01523, over 4790.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.0296, over 972941.21 frames.], batch size: 23, lr: 1.45e-04 +2022-05-08 12:43:10,758 INFO [train.py:715] (7/8) Epoch 15, batch 27650, loss[loss=0.1378, simple_loss=0.2178, pruned_loss=0.02886, over 4929.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02946, over 972840.26 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 12:43:49,456 INFO [train.py:715] (7/8) Epoch 15, batch 27700, loss[loss=0.1237, simple_loss=0.1921, pruned_loss=0.0277, over 4813.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02947, over 972824.77 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 12:44:27,757 INFO [train.py:715] (7/8) Epoch 15, batch 27750, loss[loss=0.1329, simple_loss=0.2093, pruned_loss=0.0282, over 4851.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2065, pruned_loss=0.0295, over 973269.90 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 12:45:06,222 INFO [train.py:715] (7/8) Epoch 15, batch 27800, loss[loss=0.1072, simple_loss=0.1731, pruned_loss=0.02067, over 4799.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2061, pruned_loss=0.02943, over 972455.67 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 12:45:44,233 INFO [train.py:715] (7/8) Epoch 15, batch 27850, loss[loss=0.1253, simple_loss=0.1947, pruned_loss=0.02794, over 4805.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02987, over 972605.37 frames.], batch size: 13, lr: 1.45e-04 +2022-05-08 12:46:21,974 INFO [train.py:715] (7/8) Epoch 15, batch 27900, loss[loss=0.1265, simple_loss=0.2105, pruned_loss=0.02125, over 4783.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2071, pruned_loss=0.02979, over 971974.16 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 12:47:00,798 INFO [train.py:715] (7/8) Epoch 15, batch 27950, loss[loss=0.1098, simple_loss=0.1839, pruned_loss=0.01783, over 4795.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03002, over 971617.75 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 12:47:38,667 INFO [train.py:715] (7/8) Epoch 15, batch 28000, loss[loss=0.1466, simple_loss=0.2038, pruned_loss=0.04473, over 4809.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02977, over 972322.69 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 12:48:16,880 INFO [train.py:715] (7/8) Epoch 15, batch 28050, loss[loss=0.1108, simple_loss=0.189, pruned_loss=0.01624, over 4924.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02989, over 972603.60 frames.], batch size: 23, lr: 1.45e-04 +2022-05-08 12:48:55,112 INFO [train.py:715] (7/8) Epoch 15, batch 28100, loss[loss=0.1301, simple_loss=0.219, pruned_loss=0.02061, over 4993.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2069, pruned_loss=0.02964, over 972881.87 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 12:49:33,364 INFO [train.py:715] (7/8) Epoch 15, batch 28150, loss[loss=0.1749, simple_loss=0.2422, pruned_loss=0.05384, over 4943.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02968, over 973022.79 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 12:50:11,124 INFO [train.py:715] (7/8) Epoch 15, batch 28200, loss[loss=0.1603, simple_loss=0.2328, pruned_loss=0.0439, over 4859.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03015, over 973799.02 frames.], batch size: 32, lr: 1.45e-04 +2022-05-08 12:50:49,027 INFO [train.py:715] (7/8) Epoch 15, batch 28250, loss[loss=0.1452, simple_loss=0.216, pruned_loss=0.03721, over 4973.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.0301, over 972997.99 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:51:28,180 INFO [train.py:715] (7/8) Epoch 15, batch 28300, loss[loss=0.155, simple_loss=0.2254, pruned_loss=0.04235, over 4905.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03035, over 972635.70 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 12:52:05,677 INFO [train.py:715] (7/8) Epoch 15, batch 28350, loss[loss=0.1747, simple_loss=0.2452, pruned_loss=0.05206, over 4895.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03034, over 972348.03 frames.], batch size: 39, lr: 1.45e-04 +2022-05-08 12:52:43,905 INFO [train.py:715] (7/8) Epoch 15, batch 28400, loss[loss=0.163, simple_loss=0.2276, pruned_loss=0.04926, over 4861.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.02999, over 972455.87 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 12:53:22,225 INFO [train.py:715] (7/8) Epoch 15, batch 28450, loss[loss=0.1386, simple_loss=0.2142, pruned_loss=0.03146, over 4827.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2071, pruned_loss=0.02991, over 972198.97 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:54:00,374 INFO [train.py:715] (7/8) Epoch 15, batch 28500, loss[loss=0.16, simple_loss=0.2371, pruned_loss=0.04145, over 4966.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02965, over 972203.14 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 12:54:38,502 INFO [train.py:715] (7/8) Epoch 15, batch 28550, loss[loss=0.1481, simple_loss=0.228, pruned_loss=0.03406, over 4987.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.0297, over 972715.71 frames.], batch size: 25, lr: 1.45e-04 +2022-05-08 12:55:16,669 INFO [train.py:715] (7/8) Epoch 15, batch 28600, loss[loss=0.1546, simple_loss=0.2147, pruned_loss=0.04728, over 4975.00 frames.], tot_loss[loss=0.133, simple_loss=0.2068, pruned_loss=0.02958, over 973408.28 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 12:55:55,086 INFO [train.py:715] (7/8) Epoch 15, batch 28650, loss[loss=0.1055, simple_loss=0.1851, pruned_loss=0.01298, over 4805.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.0292, over 973822.02 frames.], batch size: 25, lr: 1.45e-04 +2022-05-08 12:56:32,942 INFO [train.py:715] (7/8) Epoch 15, batch 28700, loss[loss=0.1331, simple_loss=0.1994, pruned_loss=0.0334, over 4963.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2062, pruned_loss=0.0294, over 974005.71 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 12:57:11,383 INFO [train.py:715] (7/8) Epoch 15, batch 28750, loss[loss=0.1012, simple_loss=0.1707, pruned_loss=0.01581, over 4974.00 frames.], tot_loss[loss=0.132, simple_loss=0.2055, pruned_loss=0.0292, over 973499.12 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 12:57:50,113 INFO [train.py:715] (7/8) Epoch 15, batch 28800, loss[loss=0.1425, simple_loss=0.216, pruned_loss=0.03446, over 4888.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2069, pruned_loss=0.02986, over 973246.17 frames.], batch size: 22, lr: 1.45e-04 +2022-05-08 12:58:28,472 INFO [train.py:715] (7/8) Epoch 15, batch 28850, loss[loss=0.1317, simple_loss=0.2038, pruned_loss=0.0298, over 4781.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.03002, over 973132.06 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 12:59:06,965 INFO [train.py:715] (7/8) Epoch 15, batch 28900, loss[loss=0.1715, simple_loss=0.2387, pruned_loss=0.05211, over 4922.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2077, pruned_loss=0.03038, over 972924.12 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 12:59:45,681 INFO [train.py:715] (7/8) Epoch 15, batch 28950, loss[loss=0.1076, simple_loss=0.1843, pruned_loss=0.01548, over 4978.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2073, pruned_loss=0.03022, over 973074.08 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:00:24,854 INFO [train.py:715] (7/8) Epoch 15, batch 29000, loss[loss=0.1482, simple_loss=0.215, pruned_loss=0.04068, over 4875.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03046, over 971940.95 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:01:03,427 INFO [train.py:715] (7/8) Epoch 15, batch 29050, loss[loss=0.1036, simple_loss=0.1701, pruned_loss=0.01856, over 4816.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2078, pruned_loss=0.03027, over 971635.05 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 13:01:42,350 INFO [train.py:715] (7/8) Epoch 15, batch 29100, loss[loss=0.11, simple_loss=0.1801, pruned_loss=0.01994, over 4966.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2076, pruned_loss=0.0301, over 972719.15 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:02:21,517 INFO [train.py:715] (7/8) Epoch 15, batch 29150, loss[loss=0.1502, simple_loss=0.2204, pruned_loss=0.03994, over 4746.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03026, over 972773.92 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:03:00,530 INFO [train.py:715] (7/8) Epoch 15, batch 29200, loss[loss=0.1187, simple_loss=0.195, pruned_loss=0.0212, over 4929.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02987, over 971828.65 frames.], batch size: 29, lr: 1.45e-04 +2022-05-08 13:03:38,942 INFO [train.py:715] (7/8) Epoch 15, batch 29250, loss[loss=0.1234, simple_loss=0.2, pruned_loss=0.02342, over 4845.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.0297, over 971888.42 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:04:18,000 INFO [train.py:715] (7/8) Epoch 15, batch 29300, loss[loss=0.1406, simple_loss=0.213, pruned_loss=0.03409, over 4831.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03041, over 972965.43 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:04:56,888 INFO [train.py:715] (7/8) Epoch 15, batch 29350, loss[loss=0.1145, simple_loss=0.1904, pruned_loss=0.01932, over 4981.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.0296, over 972139.27 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:05:35,476 INFO [train.py:715] (7/8) Epoch 15, batch 29400, loss[loss=0.1439, simple_loss=0.2083, pruned_loss=0.03979, over 4757.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02904, over 971360.69 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:06:14,533 INFO [train.py:715] (7/8) Epoch 15, batch 29450, loss[loss=0.1351, simple_loss=0.2071, pruned_loss=0.03148, over 4891.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02922, over 971194.46 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:06:53,808 INFO [train.py:715] (7/8) Epoch 15, batch 29500, loss[loss=0.1409, simple_loss=0.2131, pruned_loss=0.03439, over 4833.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02918, over 972253.88 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:07:31,956 INFO [train.py:715] (7/8) Epoch 15, batch 29550, loss[loss=0.145, simple_loss=0.2229, pruned_loss=0.03351, over 4862.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.0294, over 971995.76 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:08:09,733 INFO [train.py:715] (7/8) Epoch 15, batch 29600, loss[loss=0.1338, simple_loss=0.2068, pruned_loss=0.03036, over 4792.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02988, over 971833.61 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:08:48,779 INFO [train.py:715] (7/8) Epoch 15, batch 29650, loss[loss=0.1258, simple_loss=0.1979, pruned_loss=0.02684, over 4837.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03001, over 972211.29 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:09:27,543 INFO [train.py:715] (7/8) Epoch 15, batch 29700, loss[loss=0.1172, simple_loss=0.1899, pruned_loss=0.02227, over 4876.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.0303, over 971978.20 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:10:05,827 INFO [train.py:715] (7/8) Epoch 15, batch 29750, loss[loss=0.1544, simple_loss=0.2178, pruned_loss=0.04551, over 4830.00 frames.], tot_loss[loss=0.1353, simple_loss=0.209, pruned_loss=0.03086, over 972168.80 frames.], batch size: 30, lr: 1.45e-04 +2022-05-08 13:10:43,494 INFO [train.py:715] (7/8) Epoch 15, batch 29800, loss[loss=0.1595, simple_loss=0.2279, pruned_loss=0.04553, over 4942.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2093, pruned_loss=0.03082, over 971395.19 frames.], batch size: 23, lr: 1.45e-04 +2022-05-08 13:11:22,787 INFO [train.py:715] (7/8) Epoch 15, batch 29850, loss[loss=0.1246, simple_loss=0.2011, pruned_loss=0.02406, over 4793.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2082, pruned_loss=0.03047, over 971945.04 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 13:12:04,496 INFO [train.py:715] (7/8) Epoch 15, batch 29900, loss[loss=0.1283, simple_loss=0.1902, pruned_loss=0.03324, over 4797.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03079, over 971899.29 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 13:12:43,057 INFO [train.py:715] (7/8) Epoch 15, batch 29950, loss[loss=0.1239, simple_loss=0.2096, pruned_loss=0.01911, over 4872.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03036, over 971628.06 frames.], batch size: 20, lr: 1.45e-04 +2022-05-08 13:13:21,388 INFO [train.py:715] (7/8) Epoch 15, batch 30000, loss[loss=0.1167, simple_loss=0.2012, pruned_loss=0.01609, over 4939.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2082, pruned_loss=0.03038, over 972564.28 frames.], batch size: 23, lr: 1.45e-04 +2022-05-08 13:13:21,389 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 13:13:30,916 INFO [train.py:742] (7/8) Epoch 15, validation: loss=0.1049, simple_loss=0.1885, pruned_loss=0.01066, over 914524.00 frames. +2022-05-08 13:14:09,969 INFO [train.py:715] (7/8) Epoch 15, batch 30050, loss[loss=0.1465, simple_loss=0.2172, pruned_loss=0.03787, over 4963.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03037, over 972719.57 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 13:14:49,057 INFO [train.py:715] (7/8) Epoch 15, batch 30100, loss[loss=0.1394, simple_loss=0.2109, pruned_loss=0.03394, over 4855.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03021, over 973332.64 frames.], batch size: 32, lr: 1.45e-04 +2022-05-08 13:15:28,216 INFO [train.py:715] (7/8) Epoch 15, batch 30150, loss[loss=0.1213, simple_loss=0.2023, pruned_loss=0.02017, over 4914.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02984, over 972021.76 frames.], batch size: 29, lr: 1.45e-04 +2022-05-08 13:16:07,079 INFO [train.py:715] (7/8) Epoch 15, batch 30200, loss[loss=0.1499, simple_loss=0.2262, pruned_loss=0.03678, over 4937.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02987, over 971669.94 frames.], batch size: 23, lr: 1.45e-04 +2022-05-08 13:16:46,375 INFO [train.py:715] (7/8) Epoch 15, batch 30250, loss[loss=0.1211, simple_loss=0.2004, pruned_loss=0.02092, over 4888.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02996, over 972733.53 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:17:25,197 INFO [train.py:715] (7/8) Epoch 15, batch 30300, loss[loss=0.1494, simple_loss=0.2216, pruned_loss=0.03861, over 4880.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.0299, over 972750.98 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:18:03,168 INFO [train.py:715] (7/8) Epoch 15, batch 30350, loss[loss=0.1178, simple_loss=0.1935, pruned_loss=0.02107, over 4648.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03053, over 972737.17 frames.], batch size: 13, lr: 1.45e-04 +2022-05-08 13:18:42,390 INFO [train.py:715] (7/8) Epoch 15, batch 30400, loss[loss=0.128, simple_loss=0.2077, pruned_loss=0.02419, over 4920.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03015, over 973260.59 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 13:19:21,253 INFO [train.py:715] (7/8) Epoch 15, batch 30450, loss[loss=0.1187, simple_loss=0.1946, pruned_loss=0.02134, over 4909.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03023, over 973284.05 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:20:00,132 INFO [train.py:715] (7/8) Epoch 15, batch 30500, loss[loss=0.1376, simple_loss=0.2133, pruned_loss=0.03096, over 4850.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2088, pruned_loss=0.03014, over 973692.04 frames.], batch size: 32, lr: 1.45e-04 +2022-05-08 13:20:38,343 INFO [train.py:715] (7/8) Epoch 15, batch 30550, loss[loss=0.1341, simple_loss=0.2055, pruned_loss=0.03136, over 4974.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2097, pruned_loss=0.03038, over 973180.20 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:21:17,387 INFO [train.py:715] (7/8) Epoch 15, batch 30600, loss[loss=0.1374, simple_loss=0.1939, pruned_loss=0.04044, over 4849.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2088, pruned_loss=0.03011, over 972791.11 frames.], batch size: 32, lr: 1.45e-04 +2022-05-08 13:21:56,195 INFO [train.py:715] (7/8) Epoch 15, batch 30650, loss[loss=0.1394, simple_loss=0.2107, pruned_loss=0.03403, over 4785.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2085, pruned_loss=0.03043, over 972025.33 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 13:22:34,338 INFO [train.py:715] (7/8) Epoch 15, batch 30700, loss[loss=0.1246, simple_loss=0.2057, pruned_loss=0.02169, over 4797.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.0302, over 971875.82 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 13:23:13,403 INFO [train.py:715] (7/8) Epoch 15, batch 30750, loss[loss=0.1433, simple_loss=0.2079, pruned_loss=0.03939, over 4753.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03004, over 973192.29 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:23:52,075 INFO [train.py:715] (7/8) Epoch 15, batch 30800, loss[loss=0.1481, simple_loss=0.2179, pruned_loss=0.03917, over 4982.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2074, pruned_loss=0.03008, over 972757.00 frames.], batch size: 33, lr: 1.45e-04 +2022-05-08 13:24:30,180 INFO [train.py:715] (7/8) Epoch 15, batch 30850, loss[loss=0.1239, simple_loss=0.1888, pruned_loss=0.02953, over 4978.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03027, over 972655.64 frames.], batch size: 33, lr: 1.45e-04 +2022-05-08 13:25:08,416 INFO [train.py:715] (7/8) Epoch 15, batch 30900, loss[loss=0.115, simple_loss=0.1863, pruned_loss=0.02182, over 4726.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03012, over 972168.05 frames.], batch size: 12, lr: 1.45e-04 +2022-05-08 13:25:46,856 INFO [train.py:715] (7/8) Epoch 15, batch 30950, loss[loss=0.1206, simple_loss=0.2016, pruned_loss=0.01987, over 4880.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02981, over 971345.72 frames.], batch size: 22, lr: 1.45e-04 +2022-05-08 13:26:25,010 INFO [train.py:715] (7/8) Epoch 15, batch 31000, loss[loss=0.1244, simple_loss=0.1947, pruned_loss=0.02711, over 4978.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03024, over 971999.79 frames.], batch size: 28, lr: 1.45e-04 +2022-05-08 13:27:02,423 INFO [train.py:715] (7/8) Epoch 15, batch 31050, loss[loss=0.1301, simple_loss=0.1955, pruned_loss=0.03234, over 4896.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03004, over 972014.47 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 13:27:40,734 INFO [train.py:715] (7/8) Epoch 15, batch 31100, loss[loss=0.1158, simple_loss=0.1898, pruned_loss=0.02083, over 4918.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02955, over 972284.77 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:28:18,885 INFO [train.py:715] (7/8) Epoch 15, batch 31150, loss[loss=0.1261, simple_loss=0.1937, pruned_loss=0.02924, over 4939.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2088, pruned_loss=0.03001, over 971721.91 frames.], batch size: 29, lr: 1.45e-04 +2022-05-08 13:28:57,277 INFO [train.py:715] (7/8) Epoch 15, batch 31200, loss[loss=0.1177, simple_loss=0.1846, pruned_loss=0.02539, over 4928.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02989, over 971900.27 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:29:34,875 INFO [train.py:715] (7/8) Epoch 15, batch 31250, loss[loss=0.1474, simple_loss=0.2215, pruned_loss=0.03664, over 4810.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2088, pruned_loss=0.03031, over 972341.73 frames.], batch size: 25, lr: 1.45e-04 +2022-05-08 13:30:13,197 INFO [train.py:715] (7/8) Epoch 15, batch 31300, loss[loss=0.127, simple_loss=0.1945, pruned_loss=0.02971, over 4977.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03019, over 972533.70 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:30:51,247 INFO [train.py:715] (7/8) Epoch 15, batch 31350, loss[loss=0.1349, simple_loss=0.2117, pruned_loss=0.02902, over 4954.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02953, over 972673.16 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 13:31:28,511 INFO [train.py:715] (7/8) Epoch 15, batch 31400, loss[loss=0.14, simple_loss=0.2242, pruned_loss=0.02785, over 4817.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.0293, over 972995.70 frames.], batch size: 27, lr: 1.45e-04 +2022-05-08 13:32:06,855 INFO [train.py:715] (7/8) Epoch 15, batch 31450, loss[loss=0.1565, simple_loss=0.2209, pruned_loss=0.04611, over 4779.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03012, over 972395.93 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:32:45,116 INFO [train.py:715] (7/8) Epoch 15, batch 31500, loss[loss=0.1339, simple_loss=0.2126, pruned_loss=0.02761, over 4983.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02991, over 972147.06 frames.], batch size: 31, lr: 1.45e-04 +2022-05-08 13:33:23,445 INFO [train.py:715] (7/8) Epoch 15, batch 31550, loss[loss=0.1294, simple_loss=0.2027, pruned_loss=0.02802, over 4978.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2071, pruned_loss=0.02991, over 972694.85 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 13:34:01,212 INFO [train.py:715] (7/8) Epoch 15, batch 31600, loss[loss=0.1129, simple_loss=0.1909, pruned_loss=0.01739, over 4834.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03004, over 972724.27 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 13:34:39,668 INFO [train.py:715] (7/8) Epoch 15, batch 31650, loss[loss=0.138, simple_loss=0.2171, pruned_loss=0.02949, over 4985.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.0301, over 973086.85 frames.], batch size: 28, lr: 1.45e-04 +2022-05-08 13:35:18,000 INFO [train.py:715] (7/8) Epoch 15, batch 31700, loss[loss=0.1538, simple_loss=0.2171, pruned_loss=0.04529, over 4965.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03023, over 973098.81 frames.], batch size: 35, lr: 1.45e-04 +2022-05-08 13:35:55,494 INFO [train.py:715] (7/8) Epoch 15, batch 31750, loss[loss=0.1304, simple_loss=0.2077, pruned_loss=0.02652, over 4743.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2078, pruned_loss=0.0303, over 972842.44 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:36:34,377 INFO [train.py:715] (7/8) Epoch 15, batch 31800, loss[loss=0.1549, simple_loss=0.2338, pruned_loss=0.03802, over 4814.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2083, pruned_loss=0.03052, over 972862.60 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:37:12,849 INFO [train.py:715] (7/8) Epoch 15, batch 31850, loss[loss=0.1269, simple_loss=0.1959, pruned_loss=0.02894, over 4693.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03029, over 973290.56 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:37:52,388 INFO [train.py:715] (7/8) Epoch 15, batch 31900, loss[loss=0.1154, simple_loss=0.1934, pruned_loss=0.01873, over 4990.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.0302, over 973718.46 frames.], batch size: 28, lr: 1.45e-04 +2022-05-08 13:38:29,677 INFO [train.py:715] (7/8) Epoch 15, batch 31950, loss[loss=0.1145, simple_loss=0.1829, pruned_loss=0.023, over 4876.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03044, over 972951.43 frames.], batch size: 32, lr: 1.45e-04 +2022-05-08 13:39:08,340 INFO [train.py:715] (7/8) Epoch 15, batch 32000, loss[loss=0.1523, simple_loss=0.227, pruned_loss=0.03882, over 4704.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03035, over 971079.95 frames.], batch size: 15, lr: 1.45e-04 +2022-05-08 13:39:46,519 INFO [train.py:715] (7/8) Epoch 15, batch 32050, loss[loss=0.1313, simple_loss=0.2085, pruned_loss=0.02702, over 4949.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03034, over 972088.44 frames.], batch size: 29, lr: 1.45e-04 +2022-05-08 13:40:23,946 INFO [train.py:715] (7/8) Epoch 15, batch 32100, loss[loss=0.1625, simple_loss=0.2334, pruned_loss=0.04578, over 4757.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03025, over 972087.94 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:41:02,341 INFO [train.py:715] (7/8) Epoch 15, batch 32150, loss[loss=0.1543, simple_loss=0.2275, pruned_loss=0.04056, over 4927.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2081, pruned_loss=0.03032, over 973121.75 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:41:40,503 INFO [train.py:715] (7/8) Epoch 15, batch 32200, loss[loss=0.1362, simple_loss=0.2103, pruned_loss=0.03106, over 4794.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.03002, over 973104.39 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 13:42:19,022 INFO [train.py:715] (7/8) Epoch 15, batch 32250, loss[loss=0.1637, simple_loss=0.2419, pruned_loss=0.04271, over 4977.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03028, over 973889.66 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 13:42:56,889 INFO [train.py:715] (7/8) Epoch 15, batch 32300, loss[loss=0.168, simple_loss=0.2364, pruned_loss=0.0498, over 4899.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2076, pruned_loss=0.03024, over 973214.27 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 13:43:35,758 INFO [train.py:715] (7/8) Epoch 15, batch 32350, loss[loss=0.1098, simple_loss=0.1904, pruned_loss=0.01464, over 4811.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03013, over 973002.83 frames.], batch size: 26, lr: 1.45e-04 +2022-05-08 13:44:14,153 INFO [train.py:715] (7/8) Epoch 15, batch 32400, loss[loss=0.1228, simple_loss=0.1995, pruned_loss=0.02307, over 4749.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02999, over 973006.68 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:44:51,908 INFO [train.py:715] (7/8) Epoch 15, batch 32450, loss[loss=0.1418, simple_loss=0.2055, pruned_loss=0.03907, over 4856.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02974, over 971902.32 frames.], batch size: 30, lr: 1.45e-04 +2022-05-08 13:45:30,472 INFO [train.py:715] (7/8) Epoch 15, batch 32500, loss[loss=0.1505, simple_loss=0.2174, pruned_loss=0.04183, over 4781.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02943, over 972085.70 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:46:08,937 INFO [train.py:715] (7/8) Epoch 15, batch 32550, loss[loss=0.1151, simple_loss=0.1776, pruned_loss=0.02627, over 4774.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.0298, over 971814.50 frames.], batch size: 14, lr: 1.45e-04 +2022-05-08 13:46:47,813 INFO [train.py:715] (7/8) Epoch 15, batch 32600, loss[loss=0.1456, simple_loss=0.2105, pruned_loss=0.04033, over 4930.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02982, over 972469.29 frames.], batch size: 23, lr: 1.45e-04 +2022-05-08 13:47:26,416 INFO [train.py:715] (7/8) Epoch 15, batch 32650, loss[loss=0.1273, simple_loss=0.1976, pruned_loss=0.02853, over 4903.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02962, over 971899.00 frames.], batch size: 19, lr: 1.45e-04 +2022-05-08 13:48:05,086 INFO [train.py:715] (7/8) Epoch 15, batch 32700, loss[loss=0.1301, simple_loss=0.2071, pruned_loss=0.02655, over 4943.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.0299, over 972217.46 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 13:48:43,312 INFO [train.py:715] (7/8) Epoch 15, batch 32750, loss[loss=0.1314, simple_loss=0.2046, pruned_loss=0.02907, over 4909.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02979, over 972607.36 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:49:21,521 INFO [train.py:715] (7/8) Epoch 15, batch 32800, loss[loss=0.1257, simple_loss=0.1944, pruned_loss=0.02844, over 4727.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02991, over 972180.64 frames.], batch size: 16, lr: 1.45e-04 +2022-05-08 13:49:59,267 INFO [train.py:715] (7/8) Epoch 15, batch 32850, loss[loss=0.1152, simple_loss=0.1897, pruned_loss=0.02033, over 4952.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02954, over 972755.74 frames.], batch size: 21, lr: 1.45e-04 +2022-05-08 13:50:37,485 INFO [train.py:715] (7/8) Epoch 15, batch 32900, loss[loss=0.1188, simple_loss=0.1977, pruned_loss=0.02, over 4970.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2081, pruned_loss=0.02954, over 973279.59 frames.], batch size: 24, lr: 1.45e-04 +2022-05-08 13:51:16,078 INFO [train.py:715] (7/8) Epoch 15, batch 32950, loss[loss=0.1315, simple_loss=0.2072, pruned_loss=0.02794, over 4864.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.0294, over 972505.93 frames.], batch size: 20, lr: 1.45e-04 +2022-05-08 13:51:54,465 INFO [train.py:715] (7/8) Epoch 15, batch 33000, loss[loss=0.1177, simple_loss=0.1837, pruned_loss=0.02583, over 4907.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02919, over 973546.75 frames.], batch size: 17, lr: 1.45e-04 +2022-05-08 13:51:54,466 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 13:52:03,986 INFO [train.py:742] (7/8) Epoch 15, validation: loss=0.1052, simple_loss=0.1886, pruned_loss=0.01088, over 914524.00 frames. +2022-05-08 13:52:42,024 INFO [train.py:715] (7/8) Epoch 15, batch 33050, loss[loss=0.1223, simple_loss=0.2073, pruned_loss=0.01864, over 4922.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02971, over 972591.44 frames.], batch size: 23, lr: 1.45e-04 +2022-05-08 13:53:20,375 INFO [train.py:715] (7/8) Epoch 15, batch 33100, loss[loss=0.1292, simple_loss=0.2014, pruned_loss=0.02849, over 4780.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2067, pruned_loss=0.02954, over 972553.51 frames.], batch size: 18, lr: 1.45e-04 +2022-05-08 13:53:58,082 INFO [train.py:715] (7/8) Epoch 15, batch 33150, loss[loss=0.1191, simple_loss=0.2047, pruned_loss=0.01673, over 4793.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2069, pruned_loss=0.02959, over 972472.56 frames.], batch size: 21, lr: 1.44e-04 +2022-05-08 13:54:37,162 INFO [train.py:715] (7/8) Epoch 15, batch 33200, loss[loss=0.1404, simple_loss=0.2107, pruned_loss=0.03507, over 4791.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2073, pruned_loss=0.03015, over 973136.53 frames.], batch size: 18, lr: 1.44e-04 +2022-05-08 13:55:15,595 INFO [train.py:715] (7/8) Epoch 15, batch 33250, loss[loss=0.1385, simple_loss=0.2089, pruned_loss=0.03402, over 4873.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02992, over 973327.99 frames.], batch size: 22, lr: 1.44e-04 +2022-05-08 13:55:53,708 INFO [train.py:715] (7/8) Epoch 15, batch 33300, loss[loss=0.1366, simple_loss=0.2163, pruned_loss=0.02848, over 4700.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.02997, over 973262.80 frames.], batch size: 15, lr: 1.44e-04 +2022-05-08 13:56:31,675 INFO [train.py:715] (7/8) Epoch 15, batch 33350, loss[loss=0.1883, simple_loss=0.2525, pruned_loss=0.06205, over 4766.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2069, pruned_loss=0.02966, over 972931.89 frames.], batch size: 19, lr: 1.44e-04 +2022-05-08 13:57:09,333 INFO [train.py:715] (7/8) Epoch 15, batch 33400, loss[loss=0.1281, simple_loss=0.207, pruned_loss=0.02466, over 4878.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.0298, over 972267.31 frames.], batch size: 22, lr: 1.44e-04 +2022-05-08 13:57:47,385 INFO [train.py:715] (7/8) Epoch 15, batch 33450, loss[loss=0.1275, simple_loss=0.1974, pruned_loss=0.02875, over 4975.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.0298, over 971712.81 frames.], batch size: 15, lr: 1.44e-04 +2022-05-08 13:58:25,100 INFO [train.py:715] (7/8) Epoch 15, batch 33500, loss[loss=0.1283, simple_loss=0.1982, pruned_loss=0.02922, over 4631.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2088, pruned_loss=0.02989, over 971753.85 frames.], batch size: 13, lr: 1.44e-04 +2022-05-08 13:59:02,924 INFO [train.py:715] (7/8) Epoch 15, batch 33550, loss[loss=0.1552, simple_loss=0.2293, pruned_loss=0.04057, over 4817.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2087, pruned_loss=0.02984, over 971423.50 frames.], batch size: 26, lr: 1.44e-04 +2022-05-08 13:59:40,601 INFO [train.py:715] (7/8) Epoch 15, batch 33600, loss[loss=0.09272, simple_loss=0.1658, pruned_loss=0.0098, over 4723.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2087, pruned_loss=0.0298, over 970852.56 frames.], batch size: 12, lr: 1.44e-04 +2022-05-08 14:00:18,652 INFO [train.py:715] (7/8) Epoch 15, batch 33650, loss[loss=0.1371, simple_loss=0.2076, pruned_loss=0.03334, over 4788.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02935, over 971109.28 frames.], batch size: 14, lr: 1.44e-04 +2022-05-08 14:00:56,119 INFO [train.py:715] (7/8) Epoch 15, batch 33700, loss[loss=0.1279, simple_loss=0.2, pruned_loss=0.02787, over 4894.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02948, over 971085.63 frames.], batch size: 29, lr: 1.44e-04 +2022-05-08 14:01:33,645 INFO [train.py:715] (7/8) Epoch 15, batch 33750, loss[loss=0.1235, simple_loss=0.2017, pruned_loss=0.02261, over 4938.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02937, over 971883.47 frames.], batch size: 23, lr: 1.44e-04 +2022-05-08 14:02:11,483 INFO [train.py:715] (7/8) Epoch 15, batch 33800, loss[loss=0.1476, simple_loss=0.2147, pruned_loss=0.0403, over 4797.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.02946, over 972421.76 frames.], batch size: 24, lr: 1.44e-04 +2022-05-08 14:02:48,674 INFO [train.py:715] (7/8) Epoch 15, batch 33850, loss[loss=0.1196, simple_loss=0.1976, pruned_loss=0.02082, over 4908.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02907, over 972121.32 frames.], batch size: 17, lr: 1.44e-04 +2022-05-08 14:03:26,483 INFO [train.py:715] (7/8) Epoch 15, batch 33900, loss[loss=0.1184, simple_loss=0.1919, pruned_loss=0.02248, over 4645.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02921, over 971137.32 frames.], batch size: 13, lr: 1.44e-04 +2022-05-08 14:04:04,822 INFO [train.py:715] (7/8) Epoch 15, batch 33950, loss[loss=0.1212, simple_loss=0.1954, pruned_loss=0.02354, over 4884.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02944, over 971139.16 frames.], batch size: 22, lr: 1.44e-04 +2022-05-08 14:04:42,873 INFO [train.py:715] (7/8) Epoch 15, batch 34000, loss[loss=0.1641, simple_loss=0.2282, pruned_loss=0.04996, over 4904.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2084, pruned_loss=0.0299, over 971340.28 frames.], batch size: 17, lr: 1.44e-04 +2022-05-08 14:05:20,768 INFO [train.py:715] (7/8) Epoch 15, batch 34050, loss[loss=0.111, simple_loss=0.1752, pruned_loss=0.02342, over 4850.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02988, over 971668.03 frames.], batch size: 13, lr: 1.44e-04 +2022-05-08 14:05:58,930 INFO [train.py:715] (7/8) Epoch 15, batch 34100, loss[loss=0.134, simple_loss=0.2127, pruned_loss=0.02766, over 4973.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02978, over 971243.88 frames.], batch size: 25, lr: 1.44e-04 +2022-05-08 14:06:37,188 INFO [train.py:715] (7/8) Epoch 15, batch 34150, loss[loss=0.121, simple_loss=0.1952, pruned_loss=0.02343, over 4845.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02983, over 971252.41 frames.], batch size: 30, lr: 1.44e-04 +2022-05-08 14:07:14,887 INFO [train.py:715] (7/8) Epoch 15, batch 34200, loss[loss=0.1419, simple_loss=0.2039, pruned_loss=0.03993, over 4971.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02973, over 971345.96 frames.], batch size: 31, lr: 1.44e-04 +2022-05-08 14:07:52,718 INFO [train.py:715] (7/8) Epoch 15, batch 34250, loss[loss=0.1322, simple_loss=0.2043, pruned_loss=0.03002, over 4813.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03011, over 971236.71 frames.], batch size: 21, lr: 1.44e-04 +2022-05-08 14:08:30,683 INFO [train.py:715] (7/8) Epoch 15, batch 34300, loss[loss=0.1591, simple_loss=0.2351, pruned_loss=0.04153, over 4874.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03037, over 971781.88 frames.], batch size: 32, lr: 1.44e-04 +2022-05-08 14:09:08,610 INFO [train.py:715] (7/8) Epoch 15, batch 34350, loss[loss=0.126, simple_loss=0.2003, pruned_loss=0.0259, over 4894.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2082, pruned_loss=0.03023, over 971184.84 frames.], batch size: 19, lr: 1.44e-04 +2022-05-08 14:09:45,971 INFO [train.py:715] (7/8) Epoch 15, batch 34400, loss[loss=0.1293, simple_loss=0.2055, pruned_loss=0.02661, over 4969.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.0303, over 971124.93 frames.], batch size: 24, lr: 1.44e-04 +2022-05-08 14:10:24,172 INFO [train.py:715] (7/8) Epoch 15, batch 34450, loss[loss=0.1135, simple_loss=0.1895, pruned_loss=0.01876, over 4793.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2091, pruned_loss=0.03026, over 971859.02 frames.], batch size: 13, lr: 1.44e-04 +2022-05-08 14:11:02,053 INFO [train.py:715] (7/8) Epoch 15, batch 34500, loss[loss=0.1235, simple_loss=0.1935, pruned_loss=0.0267, over 4980.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03002, over 972246.14 frames.], batch size: 28, lr: 1.44e-04 +2022-05-08 14:11:39,390 INFO [train.py:715] (7/8) Epoch 15, batch 34550, loss[loss=0.1522, simple_loss=0.2157, pruned_loss=0.04437, over 4849.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.0301, over 972048.47 frames.], batch size: 15, lr: 1.44e-04 +2022-05-08 14:12:17,005 INFO [train.py:715] (7/8) Epoch 15, batch 34600, loss[loss=0.1599, simple_loss=0.2343, pruned_loss=0.0427, over 4846.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03042, over 971662.83 frames.], batch size: 32, lr: 1.44e-04 +2022-05-08 14:12:54,929 INFO [train.py:715] (7/8) Epoch 15, batch 34650, loss[loss=0.1334, simple_loss=0.2006, pruned_loss=0.03308, over 4990.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2086, pruned_loss=0.03084, over 971821.22 frames.], batch size: 24, lr: 1.44e-04 +2022-05-08 14:13:32,474 INFO [train.py:715] (7/8) Epoch 15, batch 34700, loss[loss=0.1177, simple_loss=0.1928, pruned_loss=0.02135, over 4687.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03046, over 971567.67 frames.], batch size: 15, lr: 1.44e-04 +2022-05-08 14:14:09,602 INFO [train.py:715] (7/8) Epoch 15, batch 34750, loss[loss=0.1417, simple_loss=0.2193, pruned_loss=0.03201, over 4755.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2088, pruned_loss=0.03084, over 971985.49 frames.], batch size: 16, lr: 1.44e-04 +2022-05-08 14:14:44,841 INFO [train.py:715] (7/8) Epoch 15, batch 34800, loss[loss=0.1496, simple_loss=0.2182, pruned_loss=0.04051, over 4918.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2079, pruned_loss=0.03048, over 972729.80 frames.], batch size: 18, lr: 1.44e-04 +2022-05-08 14:15:33,454 INFO [train.py:715] (7/8) Epoch 16, batch 0, loss[loss=0.1317, simple_loss=0.2104, pruned_loss=0.02648, over 4967.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2104, pruned_loss=0.02648, over 4967.00 frames.], batch size: 39, lr: 1.40e-04 +2022-05-08 14:16:11,643 INFO [train.py:715] (7/8) Epoch 16, batch 50, loss[loss=0.1381, simple_loss=0.2122, pruned_loss=0.03205, over 4936.00 frames.], tot_loss[loss=0.133, simple_loss=0.208, pruned_loss=0.02906, over 219146.87 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 14:16:50,215 INFO [train.py:715] (7/8) Epoch 16, batch 100, loss[loss=0.1356, simple_loss=0.2165, pruned_loss=0.02736, over 4690.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02841, over 386934.22 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:17:27,946 INFO [train.py:715] (7/8) Epoch 16, batch 150, loss[loss=0.1023, simple_loss=0.1821, pruned_loss=0.01123, over 4905.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2057, pruned_loss=0.02843, over 516940.23 frames.], batch size: 19, lr: 1.40e-04 +2022-05-08 14:18:06,151 INFO [train.py:715] (7/8) Epoch 16, batch 200, loss[loss=0.1211, simple_loss=0.2028, pruned_loss=0.01969, over 4695.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02906, over 617816.71 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:18:44,279 INFO [train.py:715] (7/8) Epoch 16, batch 250, loss[loss=0.1642, simple_loss=0.2333, pruned_loss=0.04758, over 4956.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02979, over 696968.00 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:19:22,601 INFO [train.py:715] (7/8) Epoch 16, batch 300, loss[loss=0.1336, simple_loss=0.1902, pruned_loss=0.03854, over 4740.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02995, over 757950.04 frames.], batch size: 16, lr: 1.40e-04 +2022-05-08 14:20:01,027 INFO [train.py:715] (7/8) Epoch 16, batch 350, loss[loss=0.1517, simple_loss=0.2243, pruned_loss=0.03952, over 4891.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03, over 805401.32 frames.], batch size: 16, lr: 1.40e-04 +2022-05-08 14:20:38,708 INFO [train.py:715] (7/8) Epoch 16, batch 400, loss[loss=0.145, simple_loss=0.2175, pruned_loss=0.03623, over 4971.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02978, over 842894.72 frames.], batch size: 14, lr: 1.40e-04 +2022-05-08 14:21:17,413 INFO [train.py:715] (7/8) Epoch 16, batch 450, loss[loss=0.1372, simple_loss=0.2105, pruned_loss=0.03195, over 4989.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02945, over 871954.74 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:21:55,827 INFO [train.py:715] (7/8) Epoch 16, batch 500, loss[loss=0.1304, simple_loss=0.2105, pruned_loss=0.02512, over 4932.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02962, over 893906.43 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 14:22:33,536 INFO [train.py:715] (7/8) Epoch 16, batch 550, loss[loss=0.129, simple_loss=0.1927, pruned_loss=0.03271, over 4866.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03012, over 911014.92 frames.], batch size: 16, lr: 1.40e-04 +2022-05-08 14:23:12,212 INFO [train.py:715] (7/8) Epoch 16, batch 600, loss[loss=0.1451, simple_loss=0.224, pruned_loss=0.0331, over 4950.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02971, over 925390.75 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 14:23:50,876 INFO [train.py:715] (7/8) Epoch 16, batch 650, loss[loss=0.1529, simple_loss=0.2175, pruned_loss=0.04409, over 4862.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02974, over 935185.08 frames.], batch size: 30, lr: 1.40e-04 +2022-05-08 14:24:28,546 INFO [train.py:715] (7/8) Epoch 16, batch 700, loss[loss=0.1181, simple_loss=0.1942, pruned_loss=0.021, over 4750.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02999, over 943768.15 frames.], batch size: 16, lr: 1.40e-04 +2022-05-08 14:25:06,446 INFO [train.py:715] (7/8) Epoch 16, batch 750, loss[loss=0.111, simple_loss=0.1938, pruned_loss=0.01406, over 4823.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2088, pruned_loss=0.03043, over 950065.12 frames.], batch size: 25, lr: 1.40e-04 +2022-05-08 14:25:45,233 INFO [train.py:715] (7/8) Epoch 16, batch 800, loss[loss=0.1682, simple_loss=0.2357, pruned_loss=0.05035, over 4937.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.03023, over 954909.41 frames.], batch size: 39, lr: 1.40e-04 +2022-05-08 14:26:23,532 INFO [train.py:715] (7/8) Epoch 16, batch 850, loss[loss=0.1264, simple_loss=0.2124, pruned_loss=0.02019, over 4874.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03018, over 959428.71 frames.], batch size: 38, lr: 1.40e-04 +2022-05-08 14:27:01,575 INFO [train.py:715] (7/8) Epoch 16, batch 900, loss[loss=0.111, simple_loss=0.1925, pruned_loss=0.01478, over 4857.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2075, pruned_loss=0.03003, over 962100.28 frames.], batch size: 20, lr: 1.40e-04 +2022-05-08 14:27:39,692 INFO [train.py:715] (7/8) Epoch 16, batch 950, loss[loss=0.152, simple_loss=0.2355, pruned_loss=0.03422, over 4983.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03015, over 964773.30 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:28:18,128 INFO [train.py:715] (7/8) Epoch 16, batch 1000, loss[loss=0.1236, simple_loss=0.1967, pruned_loss=0.02531, over 4870.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03019, over 966574.48 frames.], batch size: 16, lr: 1.40e-04 +2022-05-08 14:28:55,785 INFO [train.py:715] (7/8) Epoch 16, batch 1050, loss[loss=0.1164, simple_loss=0.1853, pruned_loss=0.02375, over 4796.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.03034, over 967649.57 frames.], batch size: 12, lr: 1.40e-04 +2022-05-08 14:29:33,186 INFO [train.py:715] (7/8) Epoch 16, batch 1100, loss[loss=0.142, simple_loss=0.2137, pruned_loss=0.03511, over 4930.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2076, pruned_loss=0.03039, over 968890.47 frames.], batch size: 23, lr: 1.40e-04 +2022-05-08 14:30:11,812 INFO [train.py:715] (7/8) Epoch 16, batch 1150, loss[loss=0.136, simple_loss=0.2011, pruned_loss=0.03539, over 4875.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2084, pruned_loss=0.03049, over 969329.65 frames.], batch size: 32, lr: 1.40e-04 +2022-05-08 14:30:49,880 INFO [train.py:715] (7/8) Epoch 16, batch 1200, loss[loss=0.1122, simple_loss=0.1849, pruned_loss=0.01979, over 4761.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.0301, over 970281.32 frames.], batch size: 12, lr: 1.40e-04 +2022-05-08 14:31:27,246 INFO [train.py:715] (7/8) Epoch 16, batch 1250, loss[loss=0.1147, simple_loss=0.1941, pruned_loss=0.01768, over 4963.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02954, over 972252.61 frames.], batch size: 24, lr: 1.40e-04 +2022-05-08 14:32:05,203 INFO [train.py:715] (7/8) Epoch 16, batch 1300, loss[loss=0.1284, simple_loss=0.2021, pruned_loss=0.02735, over 4851.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02942, over 971511.87 frames.], batch size: 20, lr: 1.40e-04 +2022-05-08 14:32:43,364 INFO [train.py:715] (7/8) Epoch 16, batch 1350, loss[loss=0.1246, simple_loss=0.1949, pruned_loss=0.02713, over 4929.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.0293, over 972301.14 frames.], batch size: 23, lr: 1.40e-04 +2022-05-08 14:33:21,096 INFO [train.py:715] (7/8) Epoch 16, batch 1400, loss[loss=0.1416, simple_loss=0.2118, pruned_loss=0.0357, over 4862.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02939, over 972860.69 frames.], batch size: 34, lr: 1.40e-04 +2022-05-08 14:33:59,219 INFO [train.py:715] (7/8) Epoch 16, batch 1450, loss[loss=0.1219, simple_loss=0.194, pruned_loss=0.02492, over 4796.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02911, over 973309.96 frames.], batch size: 24, lr: 1.40e-04 +2022-05-08 14:34:37,200 INFO [train.py:715] (7/8) Epoch 16, batch 1500, loss[loss=0.116, simple_loss=0.1931, pruned_loss=0.01946, over 4842.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02872, over 972924.76 frames.], batch size: 26, lr: 1.40e-04 +2022-05-08 14:35:14,923 INFO [train.py:715] (7/8) Epoch 16, batch 1550, loss[loss=0.1147, simple_loss=0.1861, pruned_loss=0.02166, over 4964.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02853, over 972336.53 frames.], batch size: 14, lr: 1.40e-04 +2022-05-08 14:35:52,770 INFO [train.py:715] (7/8) Epoch 16, batch 1600, loss[loss=0.1115, simple_loss=0.1841, pruned_loss=0.01944, over 4945.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02858, over 972539.51 frames.], batch size: 35, lr: 1.40e-04 +2022-05-08 14:36:30,169 INFO [train.py:715] (7/8) Epoch 16, batch 1650, loss[loss=0.1427, simple_loss=0.2242, pruned_loss=0.03057, over 4827.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2062, pruned_loss=0.02834, over 972714.80 frames.], batch size: 26, lr: 1.40e-04 +2022-05-08 14:37:07,994 INFO [train.py:715] (7/8) Epoch 16, batch 1700, loss[loss=0.1417, simple_loss=0.2084, pruned_loss=0.03756, over 4849.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.02855, over 973345.54 frames.], batch size: 30, lr: 1.40e-04 +2022-05-08 14:37:46,152 INFO [train.py:715] (7/8) Epoch 16, batch 1750, loss[loss=0.1337, simple_loss=0.2159, pruned_loss=0.02579, over 4864.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02908, over 973442.59 frames.], batch size: 20, lr: 1.40e-04 +2022-05-08 14:38:24,068 INFO [train.py:715] (7/8) Epoch 16, batch 1800, loss[loss=0.1258, simple_loss=0.1979, pruned_loss=0.02681, over 4856.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02877, over 973875.45 frames.], batch size: 20, lr: 1.40e-04 +2022-05-08 14:39:02,377 INFO [train.py:715] (7/8) Epoch 16, batch 1850, loss[loss=0.1199, simple_loss=0.1988, pruned_loss=0.02043, over 4968.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02929, over 974087.98 frames.], batch size: 24, lr: 1.40e-04 +2022-05-08 14:39:41,005 INFO [train.py:715] (7/8) Epoch 16, batch 1900, loss[loss=0.1199, simple_loss=0.1937, pruned_loss=0.02302, over 4774.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02905, over 972678.12 frames.], batch size: 12, lr: 1.40e-04 +2022-05-08 14:40:18,871 INFO [train.py:715] (7/8) Epoch 16, batch 1950, loss[loss=0.1523, simple_loss=0.2243, pruned_loss=0.04018, over 4777.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02925, over 972568.56 frames.], batch size: 17, lr: 1.40e-04 +2022-05-08 14:40:57,048 INFO [train.py:715] (7/8) Epoch 16, batch 2000, loss[loss=0.1428, simple_loss=0.2143, pruned_loss=0.03565, over 4812.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02917, over 973526.09 frames.], batch size: 14, lr: 1.40e-04 +2022-05-08 14:41:35,846 INFO [train.py:715] (7/8) Epoch 16, batch 2050, loss[loss=0.1355, simple_loss=0.2169, pruned_loss=0.02709, over 4932.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02936, over 973056.53 frames.], batch size: 29, lr: 1.40e-04 +2022-05-08 14:42:14,574 INFO [train.py:715] (7/8) Epoch 16, batch 2100, loss[loss=0.136, simple_loss=0.2041, pruned_loss=0.03392, over 4746.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02924, over 972579.99 frames.], batch size: 16, lr: 1.40e-04 +2022-05-08 14:42:52,430 INFO [train.py:715] (7/8) Epoch 16, batch 2150, loss[loss=0.1265, simple_loss=0.2064, pruned_loss=0.02328, over 4989.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02938, over 972715.89 frames.], batch size: 25, lr: 1.40e-04 +2022-05-08 14:43:31,556 INFO [train.py:715] (7/8) Epoch 16, batch 2200, loss[loss=0.127, simple_loss=0.2047, pruned_loss=0.02468, over 4755.00 frames.], tot_loss[loss=0.134, simple_loss=0.2086, pruned_loss=0.02972, over 973769.12 frames.], batch size: 19, lr: 1.40e-04 +2022-05-08 14:44:09,851 INFO [train.py:715] (7/8) Epoch 16, batch 2250, loss[loss=0.1428, simple_loss=0.2155, pruned_loss=0.03504, over 4811.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2087, pruned_loss=0.02976, over 973728.77 frames.], batch size: 12, lr: 1.40e-04 +2022-05-08 14:44:47,484 INFO [train.py:715] (7/8) Epoch 16, batch 2300, loss[loss=0.123, simple_loss=0.1928, pruned_loss=0.02659, over 4975.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02961, over 973521.55 frames.], batch size: 28, lr: 1.40e-04 +2022-05-08 14:45:25,052 INFO [train.py:715] (7/8) Epoch 16, batch 2350, loss[loss=0.1213, simple_loss=0.1974, pruned_loss=0.02266, over 4830.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02948, over 973462.46 frames.], batch size: 13, lr: 1.40e-04 +2022-05-08 14:46:03,344 INFO [train.py:715] (7/8) Epoch 16, batch 2400, loss[loss=0.1329, simple_loss=0.212, pruned_loss=0.02695, over 4827.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02923, over 973352.49 frames.], batch size: 26, lr: 1.40e-04 +2022-05-08 14:46:41,419 INFO [train.py:715] (7/8) Epoch 16, batch 2450, loss[loss=0.1301, simple_loss=0.1981, pruned_loss=0.03101, over 4692.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02948, over 972406.23 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:47:18,881 INFO [train.py:715] (7/8) Epoch 16, batch 2500, loss[loss=0.1399, simple_loss=0.2129, pruned_loss=0.03342, over 4925.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2082, pruned_loss=0.03009, over 972941.69 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 14:47:57,273 INFO [train.py:715] (7/8) Epoch 16, batch 2550, loss[loss=0.175, simple_loss=0.2375, pruned_loss=0.05625, over 4764.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2088, pruned_loss=0.03009, over 972457.47 frames.], batch size: 14, lr: 1.40e-04 +2022-05-08 14:48:35,428 INFO [train.py:715] (7/8) Epoch 16, batch 2600, loss[loss=0.1252, simple_loss=0.1973, pruned_loss=0.02654, over 4762.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2093, pruned_loss=0.03008, over 972748.01 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 14:49:13,159 INFO [train.py:715] (7/8) Epoch 16, batch 2650, loss[loss=0.1383, simple_loss=0.2174, pruned_loss=0.02961, over 4811.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2089, pruned_loss=0.03015, over 972417.32 frames.], batch size: 25, lr: 1.40e-04 +2022-05-08 14:49:51,047 INFO [train.py:715] (7/8) Epoch 16, batch 2700, loss[loss=0.1377, simple_loss=0.2082, pruned_loss=0.03361, over 4942.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03031, over 972801.52 frames.], batch size: 35, lr: 1.40e-04 +2022-05-08 14:50:29,659 INFO [train.py:715] (7/8) Epoch 16, batch 2750, loss[loss=0.1086, simple_loss=0.1858, pruned_loss=0.01569, over 4788.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03001, over 972687.05 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 14:51:08,571 INFO [train.py:715] (7/8) Epoch 16, batch 2800, loss[loss=0.1219, simple_loss=0.1991, pruned_loss=0.02232, over 4886.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03019, over 972391.51 frames.], batch size: 22, lr: 1.40e-04 +2022-05-08 14:51:46,949 INFO [train.py:715] (7/8) Epoch 16, batch 2850, loss[loss=0.116, simple_loss=0.1895, pruned_loss=0.02126, over 4925.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03022, over 972168.20 frames.], batch size: 29, lr: 1.40e-04 +2022-05-08 14:52:24,998 INFO [train.py:715] (7/8) Epoch 16, batch 2900, loss[loss=0.1186, simple_loss=0.1949, pruned_loss=0.02109, over 4872.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03005, over 972411.25 frames.], batch size: 22, lr: 1.40e-04 +2022-05-08 14:53:03,772 INFO [train.py:715] (7/8) Epoch 16, batch 2950, loss[loss=0.1226, simple_loss=0.1989, pruned_loss=0.02315, over 4985.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03017, over 972064.66 frames.], batch size: 14, lr: 1.40e-04 +2022-05-08 14:53:41,757 INFO [train.py:715] (7/8) Epoch 16, batch 3000, loss[loss=0.117, simple_loss=0.1941, pruned_loss=0.01992, over 4814.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2075, pruned_loss=0.0301, over 972296.95 frames.], batch size: 25, lr: 1.40e-04 +2022-05-08 14:53:41,757 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 14:53:51,191 INFO [train.py:742] (7/8) Epoch 16, validation: loss=0.105, simple_loss=0.1885, pruned_loss=0.01074, over 914524.00 frames. +2022-05-08 14:54:29,007 INFO [train.py:715] (7/8) Epoch 16, batch 3050, loss[loss=0.152, simple_loss=0.2204, pruned_loss=0.04179, over 4829.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03065, over 971556.85 frames.], batch size: 30, lr: 1.40e-04 +2022-05-08 14:55:09,457 INFO [train.py:715] (7/8) Epoch 16, batch 3100, loss[loss=0.1599, simple_loss=0.2265, pruned_loss=0.04666, over 4873.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2082, pruned_loss=0.03072, over 972059.56 frames.], batch size: 38, lr: 1.40e-04 +2022-05-08 14:55:47,849 INFO [train.py:715] (7/8) Epoch 16, batch 3150, loss[loss=0.1282, simple_loss=0.2071, pruned_loss=0.02471, over 4953.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03006, over 972296.42 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 14:56:26,003 INFO [train.py:715] (7/8) Epoch 16, batch 3200, loss[loss=0.1188, simple_loss=0.1939, pruned_loss=0.02188, over 4964.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.03003, over 972626.65 frames.], batch size: 24, lr: 1.40e-04 +2022-05-08 14:57:04,238 INFO [train.py:715] (7/8) Epoch 16, batch 3250, loss[loss=0.1088, simple_loss=0.1832, pruned_loss=0.01723, over 4799.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02959, over 971854.96 frames.], batch size: 21, lr: 1.40e-04 +2022-05-08 14:57:42,063 INFO [train.py:715] (7/8) Epoch 16, batch 3300, loss[loss=0.1365, simple_loss=0.2122, pruned_loss=0.03038, over 4830.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02939, over 972437.24 frames.], batch size: 15, lr: 1.40e-04 +2022-05-08 14:58:20,054 INFO [train.py:715] (7/8) Epoch 16, batch 3350, loss[loss=0.1247, simple_loss=0.1861, pruned_loss=0.03168, over 4787.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.029, over 972818.07 frames.], batch size: 17, lr: 1.40e-04 +2022-05-08 14:58:57,930 INFO [train.py:715] (7/8) Epoch 16, batch 3400, loss[loss=0.1354, simple_loss=0.2063, pruned_loss=0.03227, over 4913.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02882, over 973183.09 frames.], batch size: 18, lr: 1.40e-04 +2022-05-08 14:59:35,864 INFO [train.py:715] (7/8) Epoch 16, batch 3450, loss[loss=0.1455, simple_loss=0.2187, pruned_loss=0.03618, over 4987.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02928, over 973207.05 frames.], batch size: 28, lr: 1.40e-04 +2022-05-08 15:00:13,953 INFO [train.py:715] (7/8) Epoch 16, batch 3500, loss[loss=0.1217, simple_loss=0.2031, pruned_loss=0.02012, over 4879.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02969, over 973811.54 frames.], batch size: 22, lr: 1.40e-04 +2022-05-08 15:00:51,755 INFO [train.py:715] (7/8) Epoch 16, batch 3550, loss[loss=0.1479, simple_loss=0.2142, pruned_loss=0.04081, over 4898.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2079, pruned_loss=0.0294, over 974522.82 frames.], batch size: 19, lr: 1.40e-04 +2022-05-08 15:01:30,178 INFO [train.py:715] (7/8) Epoch 16, batch 3600, loss[loss=0.1476, simple_loss=0.2233, pruned_loss=0.03596, over 4981.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.0294, over 974855.35 frames.], batch size: 25, lr: 1.40e-04 +2022-05-08 15:02:07,897 INFO [train.py:715] (7/8) Epoch 16, batch 3650, loss[loss=0.1253, simple_loss=0.2035, pruned_loss=0.02359, over 4989.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02972, over 973978.62 frames.], batch size: 26, lr: 1.40e-04 +2022-05-08 15:02:46,545 INFO [train.py:715] (7/8) Epoch 16, batch 3700, loss[loss=0.1288, simple_loss=0.1992, pruned_loss=0.02914, over 4881.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02929, over 973964.41 frames.], batch size: 22, lr: 1.40e-04 +2022-05-08 15:03:25,026 INFO [train.py:715] (7/8) Epoch 16, batch 3750, loss[loss=0.1379, simple_loss=0.2212, pruned_loss=0.02728, over 4748.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02927, over 973717.48 frames.], batch size: 19, lr: 1.40e-04 +2022-05-08 15:04:03,390 INFO [train.py:715] (7/8) Epoch 16, batch 3800, loss[loss=0.1217, simple_loss=0.1957, pruned_loss=0.02383, over 4761.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02966, over 973913.46 frames.], batch size: 19, lr: 1.40e-04 +2022-05-08 15:04:42,255 INFO [train.py:715] (7/8) Epoch 16, batch 3850, loss[loss=0.1363, simple_loss=0.2094, pruned_loss=0.03165, over 4852.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02982, over 973353.21 frames.], batch size: 20, lr: 1.40e-04 +2022-05-08 15:05:21,012 INFO [train.py:715] (7/8) Epoch 16, batch 3900, loss[loss=0.1391, simple_loss=0.2086, pruned_loss=0.03478, over 4832.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.03029, over 972335.90 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:05:58,860 INFO [train.py:715] (7/8) Epoch 16, batch 3950, loss[loss=0.1402, simple_loss=0.2207, pruned_loss=0.02981, over 4940.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02986, over 972663.83 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 15:06:36,784 INFO [train.py:715] (7/8) Epoch 16, batch 4000, loss[loss=0.1459, simple_loss=0.2327, pruned_loss=0.02954, over 4931.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02969, over 972186.40 frames.], batch size: 39, lr: 1.39e-04 +2022-05-08 15:07:14,744 INFO [train.py:715] (7/8) Epoch 16, batch 4050, loss[loss=0.1311, simple_loss=0.2045, pruned_loss=0.02884, over 4883.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02971, over 972458.42 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 15:07:52,147 INFO [train.py:715] (7/8) Epoch 16, batch 4100, loss[loss=0.1615, simple_loss=0.2371, pruned_loss=0.04295, over 4748.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2088, pruned_loss=0.02986, over 973212.94 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:08:29,797 INFO [train.py:715] (7/8) Epoch 16, batch 4150, loss[loss=0.1514, simple_loss=0.2192, pruned_loss=0.04178, over 4916.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02987, over 973355.68 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:09:07,466 INFO [train.py:715] (7/8) Epoch 16, batch 4200, loss[loss=0.123, simple_loss=0.2008, pruned_loss=0.0226, over 4961.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02954, over 973424.50 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 15:09:45,633 INFO [train.py:715] (7/8) Epoch 16, batch 4250, loss[loss=0.1354, simple_loss=0.1963, pruned_loss=0.03723, over 4687.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02981, over 973027.01 frames.], batch size: 12, lr: 1.39e-04 +2022-05-08 15:10:23,340 INFO [train.py:715] (7/8) Epoch 16, batch 4300, loss[loss=0.1515, simple_loss=0.2168, pruned_loss=0.04308, over 4752.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02987, over 972692.79 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 15:11:01,194 INFO [train.py:715] (7/8) Epoch 16, batch 4350, loss[loss=0.1554, simple_loss=0.2277, pruned_loss=0.04152, over 4931.00 frames.], tot_loss[loss=0.135, simple_loss=0.2089, pruned_loss=0.03061, over 973209.09 frames.], batch size: 39, lr: 1.39e-04 +2022-05-08 15:11:39,304 INFO [train.py:715] (7/8) Epoch 16, batch 4400, loss[loss=0.1176, simple_loss=0.2095, pruned_loss=0.01288, over 4889.00 frames.], tot_loss[loss=0.134, simple_loss=0.2082, pruned_loss=0.02984, over 973344.03 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 15:12:17,133 INFO [train.py:715] (7/8) Epoch 16, batch 4450, loss[loss=0.11, simple_loss=0.1872, pruned_loss=0.01638, over 4755.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02972, over 973470.62 frames.], batch size: 12, lr: 1.39e-04 +2022-05-08 15:12:54,750 INFO [train.py:715] (7/8) Epoch 16, batch 4500, loss[loss=0.1189, simple_loss=0.1946, pruned_loss=0.02159, over 4820.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02965, over 973495.30 frames.], batch size: 26, lr: 1.39e-04 +2022-05-08 15:13:32,858 INFO [train.py:715] (7/8) Epoch 16, batch 4550, loss[loss=0.1332, simple_loss=0.1979, pruned_loss=0.0342, over 4789.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02927, over 972938.02 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:14:11,254 INFO [train.py:715] (7/8) Epoch 16, batch 4600, loss[loss=0.1402, simple_loss=0.2058, pruned_loss=0.03731, over 4845.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02963, over 973569.82 frames.], batch size: 30, lr: 1.39e-04 +2022-05-08 15:14:49,229 INFO [train.py:715] (7/8) Epoch 16, batch 4650, loss[loss=0.1328, simple_loss=0.2109, pruned_loss=0.02734, over 4884.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02962, over 972513.43 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:15:27,629 INFO [train.py:715] (7/8) Epoch 16, batch 4700, loss[loss=0.1445, simple_loss=0.2159, pruned_loss=0.03654, over 4775.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2087, pruned_loss=0.02985, over 972815.16 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 15:16:06,226 INFO [train.py:715] (7/8) Epoch 16, batch 4750, loss[loss=0.1393, simple_loss=0.2133, pruned_loss=0.03266, over 4877.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2081, pruned_loss=0.02935, over 972532.10 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:16:44,827 INFO [train.py:715] (7/8) Epoch 16, batch 4800, loss[loss=0.1404, simple_loss=0.211, pruned_loss=0.03488, over 4983.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2084, pruned_loss=0.0296, over 972509.35 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:17:23,108 INFO [train.py:715] (7/8) Epoch 16, batch 4850, loss[loss=0.1811, simple_loss=0.2516, pruned_loss=0.05527, over 4895.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02945, over 973524.39 frames.], batch size: 38, lr: 1.39e-04 +2022-05-08 15:18:01,809 INFO [train.py:715] (7/8) Epoch 16, batch 4900, loss[loss=0.1123, simple_loss=0.1918, pruned_loss=0.01636, over 4819.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02949, over 973123.22 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 15:18:40,682 INFO [train.py:715] (7/8) Epoch 16, batch 4950, loss[loss=0.141, simple_loss=0.2112, pruned_loss=0.03537, over 4902.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02976, over 973403.37 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 15:19:18,924 INFO [train.py:715] (7/8) Epoch 16, batch 5000, loss[loss=0.1279, simple_loss=0.1945, pruned_loss=0.03067, over 4951.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2085, pruned_loss=0.02986, over 973300.57 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 15:19:57,139 INFO [train.py:715] (7/8) Epoch 16, batch 5050, loss[loss=0.1206, simple_loss=0.1965, pruned_loss=0.02239, over 4978.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2082, pruned_loss=0.02967, over 973390.70 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:20:35,446 INFO [train.py:715] (7/8) Epoch 16, batch 5100, loss[loss=0.1387, simple_loss=0.2133, pruned_loss=0.03199, over 4691.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02968, over 972779.31 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:21:13,349 INFO [train.py:715] (7/8) Epoch 16, batch 5150, loss[loss=0.1318, simple_loss=0.218, pruned_loss=0.02285, over 4928.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2078, pruned_loss=0.02945, over 973456.77 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 15:21:50,907 INFO [train.py:715] (7/8) Epoch 16, batch 5200, loss[loss=0.1377, simple_loss=0.2059, pruned_loss=0.03477, over 4977.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02916, over 972898.71 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 15:22:28,865 INFO [train.py:715] (7/8) Epoch 16, batch 5250, loss[loss=0.1201, simple_loss=0.1847, pruned_loss=0.02775, over 4826.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02945, over 972349.62 frames.], batch size: 26, lr: 1.39e-04 +2022-05-08 15:23:07,103 INFO [train.py:715] (7/8) Epoch 16, batch 5300, loss[loss=0.1362, simple_loss=0.208, pruned_loss=0.03224, over 4816.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03007, over 971731.91 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 15:23:45,226 INFO [train.py:715] (7/8) Epoch 16, batch 5350, loss[loss=0.1191, simple_loss=0.2015, pruned_loss=0.01831, over 4935.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02973, over 972538.30 frames.], batch size: 29, lr: 1.39e-04 +2022-05-08 15:24:23,037 INFO [train.py:715] (7/8) Epoch 16, batch 5400, loss[loss=0.113, simple_loss=0.1934, pruned_loss=0.01623, over 4926.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02962, over 971980.93 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 15:25:00,890 INFO [train.py:715] (7/8) Epoch 16, batch 5450, loss[loss=0.1433, simple_loss=0.2116, pruned_loss=0.0375, over 4752.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2072, pruned_loss=0.02966, over 972723.13 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:25:38,708 INFO [train.py:715] (7/8) Epoch 16, batch 5500, loss[loss=0.1159, simple_loss=0.1906, pruned_loss=0.02058, over 4976.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02944, over 973347.77 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 15:26:16,326 INFO [train.py:715] (7/8) Epoch 16, batch 5550, loss[loss=0.1429, simple_loss=0.2191, pruned_loss=0.03339, over 4835.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02914, over 973069.15 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:26:54,075 INFO [train.py:715] (7/8) Epoch 16, batch 5600, loss[loss=0.1501, simple_loss=0.2278, pruned_loss=0.03619, over 4923.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02949, over 973153.25 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:27:32,729 INFO [train.py:715] (7/8) Epoch 16, batch 5650, loss[loss=0.1164, simple_loss=0.1921, pruned_loss=0.0204, over 4858.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02944, over 973089.78 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:28:10,547 INFO [train.py:715] (7/8) Epoch 16, batch 5700, loss[loss=0.1353, simple_loss=0.2101, pruned_loss=0.03029, over 4865.00 frames.], tot_loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.0289, over 973339.33 frames.], batch size: 20, lr: 1.39e-04 +2022-05-08 15:28:48,367 INFO [train.py:715] (7/8) Epoch 16, batch 5750, loss[loss=0.1269, simple_loss=0.2035, pruned_loss=0.02516, over 4817.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02944, over 972833.70 frames.], batch size: 26, lr: 1.39e-04 +2022-05-08 15:29:26,212 INFO [train.py:715] (7/8) Epoch 16, batch 5800, loss[loss=0.1554, simple_loss=0.2259, pruned_loss=0.04244, over 4914.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02946, over 972588.32 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 15:30:04,475 INFO [train.py:715] (7/8) Epoch 16, batch 5850, loss[loss=0.122, simple_loss=0.1946, pruned_loss=0.02474, over 4748.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02949, over 972473.82 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:30:42,016 INFO [train.py:715] (7/8) Epoch 16, batch 5900, loss[loss=0.1222, simple_loss=0.2037, pruned_loss=0.02036, over 4815.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02927, over 972409.92 frames.], batch size: 13, lr: 1.39e-04 +2022-05-08 15:31:19,660 INFO [train.py:715] (7/8) Epoch 16, batch 5950, loss[loss=0.1411, simple_loss=0.2213, pruned_loss=0.03043, over 4878.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02933, over 972051.37 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:31:58,427 INFO [train.py:715] (7/8) Epoch 16, batch 6000, loss[loss=0.1143, simple_loss=0.1945, pruned_loss=0.01708, over 4913.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02953, over 971757.27 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 15:31:58,428 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 15:32:07,945 INFO [train.py:742] (7/8) Epoch 16, validation: loss=0.105, simple_loss=0.1885, pruned_loss=0.01082, over 914524.00 frames. +2022-05-08 15:32:46,978 INFO [train.py:715] (7/8) Epoch 16, batch 6050, loss[loss=0.135, simple_loss=0.2057, pruned_loss=0.03215, over 4861.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.0297, over 970648.01 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 15:33:25,022 INFO [train.py:715] (7/8) Epoch 16, batch 6100, loss[loss=0.1497, simple_loss=0.2286, pruned_loss=0.03542, over 4831.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02984, over 971373.63 frames.], batch size: 30, lr: 1.39e-04 +2022-05-08 15:34:02,793 INFO [train.py:715] (7/8) Epoch 16, batch 6150, loss[loss=0.1379, simple_loss=0.2098, pruned_loss=0.03304, over 4768.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03033, over 971705.98 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 15:34:40,934 INFO [train.py:715] (7/8) Epoch 16, batch 6200, loss[loss=0.1405, simple_loss=0.2173, pruned_loss=0.03181, over 4749.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03041, over 971377.88 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:35:19,471 INFO [train.py:715] (7/8) Epoch 16, batch 6250, loss[loss=0.1504, simple_loss=0.228, pruned_loss=0.03642, over 4985.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03017, over 971300.82 frames.], batch size: 28, lr: 1.39e-04 +2022-05-08 15:35:57,114 INFO [train.py:715] (7/8) Epoch 16, batch 6300, loss[loss=0.1254, simple_loss=0.1883, pruned_loss=0.03122, over 4842.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2063, pruned_loss=0.02946, over 971538.65 frames.], batch size: 30, lr: 1.39e-04 +2022-05-08 15:36:34,885 INFO [train.py:715] (7/8) Epoch 16, batch 6350, loss[loss=0.1336, simple_loss=0.2128, pruned_loss=0.02718, over 4743.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02954, over 970732.51 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:37:13,402 INFO [train.py:715] (7/8) Epoch 16, batch 6400, loss[loss=0.1222, simple_loss=0.1982, pruned_loss=0.02314, over 4750.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03003, over 971873.54 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:37:51,668 INFO [train.py:715] (7/8) Epoch 16, batch 6450, loss[loss=0.119, simple_loss=0.1931, pruned_loss=0.02247, over 4851.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02992, over 971472.02 frames.], batch size: 13, lr: 1.39e-04 +2022-05-08 15:38:29,440 INFO [train.py:715] (7/8) Epoch 16, batch 6500, loss[loss=0.1131, simple_loss=0.1858, pruned_loss=0.02018, over 4781.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02969, over 971777.84 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:39:07,582 INFO [train.py:715] (7/8) Epoch 16, batch 6550, loss[loss=0.1502, simple_loss=0.2214, pruned_loss=0.03948, over 4837.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.0301, over 971511.85 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 15:39:46,029 INFO [train.py:715] (7/8) Epoch 16, batch 6600, loss[loss=0.1809, simple_loss=0.265, pruned_loss=0.04839, over 4958.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.03024, over 970737.24 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 15:40:23,830 INFO [train.py:715] (7/8) Epoch 16, batch 6650, loss[loss=0.1323, simple_loss=0.2042, pruned_loss=0.03023, over 4898.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03025, over 971178.03 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 15:41:01,686 INFO [train.py:715] (7/8) Epoch 16, batch 6700, loss[loss=0.1362, simple_loss=0.212, pruned_loss=0.03024, over 4784.00 frames.], tot_loss[loss=0.1358, simple_loss=0.2096, pruned_loss=0.03098, over 971293.17 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 15:41:39,712 INFO [train.py:715] (7/8) Epoch 16, batch 6750, loss[loss=0.1164, simple_loss=0.1997, pruned_loss=0.01656, over 4685.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.0303, over 971010.53 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 15:42:17,831 INFO [train.py:715] (7/8) Epoch 16, batch 6800, loss[loss=0.1366, simple_loss=0.2109, pruned_loss=0.03112, over 4916.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03039, over 970844.31 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 15:42:54,811 INFO [train.py:715] (7/8) Epoch 16, batch 6850, loss[loss=0.1223, simple_loss=0.2042, pruned_loss=0.02021, over 4973.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03031, over 971400.52 frames.], batch size: 28, lr: 1.39e-04 +2022-05-08 15:43:32,598 INFO [train.py:715] (7/8) Epoch 16, batch 6900, loss[loss=0.1368, simple_loss=0.2074, pruned_loss=0.03315, over 4740.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03065, over 971520.19 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:44:10,714 INFO [train.py:715] (7/8) Epoch 16, batch 6950, loss[loss=0.1421, simple_loss=0.2198, pruned_loss=0.03221, over 4953.00 frames.], tot_loss[loss=0.1355, simple_loss=0.21, pruned_loss=0.03052, over 970757.11 frames.], batch size: 29, lr: 1.39e-04 +2022-05-08 15:44:48,420 INFO [train.py:715] (7/8) Epoch 16, batch 7000, loss[loss=0.1289, simple_loss=0.2082, pruned_loss=0.0248, over 4941.00 frames.], tot_loss[loss=0.135, simple_loss=0.2093, pruned_loss=0.03034, over 971135.86 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 15:45:26,356 INFO [train.py:715] (7/8) Epoch 16, batch 7050, loss[loss=0.1131, simple_loss=0.1855, pruned_loss=0.02035, over 4772.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03007, over 971130.78 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 15:46:04,189 INFO [train.py:715] (7/8) Epoch 16, batch 7100, loss[loss=0.1299, simple_loss=0.2077, pruned_loss=0.02606, over 4948.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02977, over 971201.78 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 15:46:42,645 INFO [train.py:715] (7/8) Epoch 16, batch 7150, loss[loss=0.14, simple_loss=0.2137, pruned_loss=0.03316, over 4949.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02981, over 971231.27 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 15:47:19,959 INFO [train.py:715] (7/8) Epoch 16, batch 7200, loss[loss=0.1191, simple_loss=0.1914, pruned_loss=0.02338, over 4888.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02959, over 971737.34 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:47:57,928 INFO [train.py:715] (7/8) Epoch 16, batch 7250, loss[loss=0.1416, simple_loss=0.2155, pruned_loss=0.03383, over 4913.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02975, over 972702.65 frames.], batch size: 39, lr: 1.39e-04 +2022-05-08 15:48:36,996 INFO [train.py:715] (7/8) Epoch 16, batch 7300, loss[loss=0.1281, simple_loss=0.2055, pruned_loss=0.0253, over 4823.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2084, pruned_loss=0.0299, over 971890.18 frames.], batch size: 26, lr: 1.39e-04 +2022-05-08 15:49:15,798 INFO [train.py:715] (7/8) Epoch 16, batch 7350, loss[loss=0.1387, simple_loss=0.2032, pruned_loss=0.03703, over 4780.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02984, over 971346.12 frames.], batch size: 12, lr: 1.39e-04 +2022-05-08 15:49:55,253 INFO [train.py:715] (7/8) Epoch 16, batch 7400, loss[loss=0.1194, simple_loss=0.1924, pruned_loss=0.02316, over 4936.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03019, over 971588.68 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 15:50:34,947 INFO [train.py:715] (7/8) Epoch 16, batch 7450, loss[loss=0.1364, simple_loss=0.2082, pruned_loss=0.03228, over 4826.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02996, over 972206.98 frames.], batch size: 13, lr: 1.39e-04 +2022-05-08 15:51:14,629 INFO [train.py:715] (7/8) Epoch 16, batch 7500, loss[loss=0.1525, simple_loss=0.2303, pruned_loss=0.03735, over 4868.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03013, over 972632.78 frames.], batch size: 39, lr: 1.39e-04 +2022-05-08 15:51:53,687 INFO [train.py:715] (7/8) Epoch 16, batch 7550, loss[loss=0.1403, simple_loss=0.2172, pruned_loss=0.03167, over 4857.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02948, over 971764.98 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 15:52:33,700 INFO [train.py:715] (7/8) Epoch 16, batch 7600, loss[loss=0.152, simple_loss=0.2213, pruned_loss=0.04137, over 4868.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02974, over 971608.05 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:53:14,089 INFO [train.py:715] (7/8) Epoch 16, batch 7650, loss[loss=0.1672, simple_loss=0.2458, pruned_loss=0.04431, over 4886.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02957, over 972403.13 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 15:53:54,222 INFO [train.py:715] (7/8) Epoch 16, batch 7700, loss[loss=0.1327, simple_loss=0.2156, pruned_loss=0.02495, over 4793.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02998, over 972699.64 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 15:54:33,734 INFO [train.py:715] (7/8) Epoch 16, batch 7750, loss[loss=0.1345, simple_loss=0.2005, pruned_loss=0.03429, over 4788.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02934, over 973278.56 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 15:55:13,931 INFO [train.py:715] (7/8) Epoch 16, batch 7800, loss[loss=0.1386, simple_loss=0.2106, pruned_loss=0.03327, over 4820.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02952, over 973453.00 frames.], batch size: 13, lr: 1.39e-04 +2022-05-08 15:55:54,769 INFO [train.py:715] (7/8) Epoch 16, batch 7850, loss[loss=0.1411, simple_loss=0.2136, pruned_loss=0.0343, over 4797.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02921, over 973187.58 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 15:56:34,177 INFO [train.py:715] (7/8) Epoch 16, batch 7900, loss[loss=0.1168, simple_loss=0.1956, pruned_loss=0.01893, over 4881.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02964, over 973026.24 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 15:57:14,066 INFO [train.py:715] (7/8) Epoch 16, batch 7950, loss[loss=0.1547, simple_loss=0.2356, pruned_loss=0.03687, over 4761.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2071, pruned_loss=0.02997, over 972520.02 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:57:54,569 INFO [train.py:715] (7/8) Epoch 16, batch 8000, loss[loss=0.1156, simple_loss=0.1884, pruned_loss=0.0214, over 4867.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02986, over 972750.95 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 15:58:34,608 INFO [train.py:715] (7/8) Epoch 16, batch 8050, loss[loss=0.1358, simple_loss=0.2063, pruned_loss=0.03268, over 4981.00 frames.], tot_loss[loss=0.1336, simple_loss=0.207, pruned_loss=0.03009, over 972917.00 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 15:59:14,260 INFO [train.py:715] (7/8) Epoch 16, batch 8100, loss[loss=0.1427, simple_loss=0.2219, pruned_loss=0.03181, over 4872.00 frames.], tot_loss[loss=0.134, simple_loss=0.2074, pruned_loss=0.03032, over 973117.08 frames.], batch size: 22, lr: 1.39e-04 +2022-05-08 15:59:54,680 INFO [train.py:715] (7/8) Epoch 16, batch 8150, loss[loss=0.1466, simple_loss=0.2188, pruned_loss=0.03723, over 4940.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2084, pruned_loss=0.03115, over 973555.28 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 16:00:35,755 INFO [train.py:715] (7/8) Epoch 16, batch 8200, loss[loss=0.1335, simple_loss=0.213, pruned_loss=0.02698, over 4781.00 frames.], tot_loss[loss=0.1357, simple_loss=0.2088, pruned_loss=0.03129, over 973338.63 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:01:15,838 INFO [train.py:715] (7/8) Epoch 16, batch 8250, loss[loss=0.1454, simple_loss=0.2114, pruned_loss=0.03972, over 4782.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2081, pruned_loss=0.03078, over 972734.09 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 16:01:55,592 INFO [train.py:715] (7/8) Epoch 16, batch 8300, loss[loss=0.1356, simple_loss=0.1992, pruned_loss=0.03601, over 4758.00 frames.], tot_loss[loss=0.1356, simple_loss=0.2089, pruned_loss=0.03113, over 972566.38 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:02:36,305 INFO [train.py:715] (7/8) Epoch 16, batch 8350, loss[loss=0.128, simple_loss=0.2065, pruned_loss=0.02477, over 4964.00 frames.], tot_loss[loss=0.1355, simple_loss=0.2087, pruned_loss=0.03117, over 973413.21 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 16:03:16,603 INFO [train.py:715] (7/8) Epoch 16, batch 8400, loss[loss=0.1277, simple_loss=0.2096, pruned_loss=0.02292, over 4900.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2079, pruned_loss=0.03045, over 973301.57 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:03:55,137 INFO [train.py:715] (7/8) Epoch 16, batch 8450, loss[loss=0.1684, simple_loss=0.2424, pruned_loss=0.04717, over 4931.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03012, over 972713.34 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 16:04:34,540 INFO [train.py:715] (7/8) Epoch 16, batch 8500, loss[loss=0.1549, simple_loss=0.2371, pruned_loss=0.03633, over 4932.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2085, pruned_loss=0.03106, over 973048.81 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 16:05:13,260 INFO [train.py:715] (7/8) Epoch 16, batch 8550, loss[loss=0.1525, simple_loss=0.2237, pruned_loss=0.04064, over 4697.00 frames.], tot_loss[loss=0.1353, simple_loss=0.2088, pruned_loss=0.03093, over 973046.69 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:05:51,579 INFO [train.py:715] (7/8) Epoch 16, batch 8600, loss[loss=0.1284, simple_loss=0.2061, pruned_loss=0.02535, over 4838.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2088, pruned_loss=0.03099, over 974100.61 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:06:29,595 INFO [train.py:715] (7/8) Epoch 16, batch 8650, loss[loss=0.1347, simple_loss=0.2104, pruned_loss=0.02952, over 4734.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03052, over 973632.89 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 16:07:08,669 INFO [train.py:715] (7/8) Epoch 16, batch 8700, loss[loss=0.1549, simple_loss=0.2234, pruned_loss=0.04321, over 4870.00 frames.], tot_loss[loss=0.1345, simple_loss=0.208, pruned_loss=0.03048, over 973203.40 frames.], batch size: 34, lr: 1.39e-04 +2022-05-08 16:07:47,716 INFO [train.py:715] (7/8) Epoch 16, batch 8750, loss[loss=0.1595, simple_loss=0.2384, pruned_loss=0.04028, over 4970.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2079, pruned_loss=0.03054, over 972856.72 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:08:26,278 INFO [train.py:715] (7/8) Epoch 16, batch 8800, loss[loss=0.1295, simple_loss=0.1996, pruned_loss=0.02969, over 4811.00 frames.], tot_loss[loss=0.134, simple_loss=0.2076, pruned_loss=0.03026, over 973877.20 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 16:09:04,976 INFO [train.py:715] (7/8) Epoch 16, batch 8850, loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02905, over 4776.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2087, pruned_loss=0.03057, over 974095.13 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:09:44,466 INFO [train.py:715] (7/8) Epoch 16, batch 8900, loss[loss=0.1164, simple_loss=0.1854, pruned_loss=0.02367, over 4928.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03022, over 973704.78 frames.], batch size: 29, lr: 1.39e-04 +2022-05-08 16:10:22,908 INFO [train.py:715] (7/8) Epoch 16, batch 8950, loss[loss=0.1575, simple_loss=0.224, pruned_loss=0.04555, over 4906.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.02998, over 973458.29 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:11:01,151 INFO [train.py:715] (7/8) Epoch 16, batch 9000, loss[loss=0.1122, simple_loss=0.1891, pruned_loss=0.01761, over 4855.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03, over 973407.99 frames.], batch size: 13, lr: 1.39e-04 +2022-05-08 16:11:01,151 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 16:11:23,894 INFO [train.py:742] (7/8) Epoch 16, validation: loss=0.105, simple_loss=0.1884, pruned_loss=0.01076, over 914524.00 frames. +2022-05-08 16:12:02,818 INFO [train.py:715] (7/8) Epoch 16, batch 9050, loss[loss=0.1496, simple_loss=0.2232, pruned_loss=0.03801, over 4892.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03017, over 972881.88 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 16:12:41,945 INFO [train.py:715] (7/8) Epoch 16, batch 9100, loss[loss=0.1188, simple_loss=0.1974, pruned_loss=0.0201, over 4820.00 frames.], tot_loss[loss=0.134, simple_loss=0.2077, pruned_loss=0.03016, over 973490.64 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 16:13:20,955 INFO [train.py:715] (7/8) Epoch 16, batch 9150, loss[loss=0.1417, simple_loss=0.211, pruned_loss=0.03618, over 4802.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02974, over 972609.01 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 16:13:58,478 INFO [train.py:715] (7/8) Epoch 16, batch 9200, loss[loss=0.1204, simple_loss=0.1956, pruned_loss=0.02262, over 4876.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2069, pruned_loss=0.0296, over 972108.04 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:14:37,127 INFO [train.py:715] (7/8) Epoch 16, batch 9250, loss[loss=0.1199, simple_loss=0.1969, pruned_loss=0.02143, over 4903.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02958, over 971904.21 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:15:16,083 INFO [train.py:715] (7/8) Epoch 16, batch 9300, loss[loss=0.1345, simple_loss=0.2053, pruned_loss=0.03185, over 4745.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02968, over 972738.77 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:15:54,777 INFO [train.py:715] (7/8) Epoch 16, batch 9350, loss[loss=0.1527, simple_loss=0.2257, pruned_loss=0.03985, over 4810.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02968, over 971808.93 frames.], batch size: 12, lr: 1.39e-04 +2022-05-08 16:16:33,102 INFO [train.py:715] (7/8) Epoch 16, batch 9400, loss[loss=0.1003, simple_loss=0.1649, pruned_loss=0.01789, over 4758.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2062, pruned_loss=0.02927, over 972118.52 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:17:11,621 INFO [train.py:715] (7/8) Epoch 16, batch 9450, loss[loss=0.1126, simple_loss=0.1899, pruned_loss=0.01763, over 4929.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2059, pruned_loss=0.02931, over 971697.73 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 16:17:50,526 INFO [train.py:715] (7/8) Epoch 16, batch 9500, loss[loss=0.151, simple_loss=0.2213, pruned_loss=0.0404, over 4771.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2059, pruned_loss=0.02915, over 971035.31 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 16:18:28,784 INFO [train.py:715] (7/8) Epoch 16, batch 9550, loss[loss=0.1177, simple_loss=0.1844, pruned_loss=0.02549, over 4752.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02913, over 971318.83 frames.], batch size: 12, lr: 1.39e-04 +2022-05-08 16:19:08,106 INFO [train.py:715] (7/8) Epoch 16, batch 9600, loss[loss=0.1512, simple_loss=0.2157, pruned_loss=0.04334, over 4815.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02916, over 970510.84 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 16:19:47,952 INFO [train.py:715] (7/8) Epoch 16, batch 9650, loss[loss=0.1239, simple_loss=0.1941, pruned_loss=0.02685, over 4964.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02943, over 971578.39 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 16:20:27,615 INFO [train.py:715] (7/8) Epoch 16, batch 9700, loss[loss=0.1345, simple_loss=0.201, pruned_loss=0.03407, over 4842.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02939, over 972047.04 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 16:21:08,024 INFO [train.py:715] (7/8) Epoch 16, batch 9750, loss[loss=0.1502, simple_loss=0.2278, pruned_loss=0.03629, over 4930.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02896, over 972188.59 frames.], batch size: 39, lr: 1.39e-04 +2022-05-08 16:21:49,084 INFO [train.py:715] (7/8) Epoch 16, batch 9800, loss[loss=0.147, simple_loss=0.2081, pruned_loss=0.04298, over 4889.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02969, over 972861.07 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:22:29,522 INFO [train.py:715] (7/8) Epoch 16, batch 9850, loss[loss=0.1338, simple_loss=0.2104, pruned_loss=0.02863, over 4946.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02926, over 972921.97 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 16:23:09,308 INFO [train.py:715] (7/8) Epoch 16, batch 9900, loss[loss=0.1228, simple_loss=0.1956, pruned_loss=0.02502, over 4751.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02967, over 973243.41 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:23:49,492 INFO [train.py:715] (7/8) Epoch 16, batch 9950, loss[loss=0.1214, simple_loss=0.1967, pruned_loss=0.02304, over 4754.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02968, over 973382.27 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:24:30,465 INFO [train.py:715] (7/8) Epoch 16, batch 10000, loss[loss=0.1183, simple_loss=0.1998, pruned_loss=0.01844, over 4756.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02945, over 973456.35 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:25:09,396 INFO [train.py:715] (7/8) Epoch 16, batch 10050, loss[loss=0.09709, simple_loss=0.1705, pruned_loss=0.01186, over 4766.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03007, over 973115.29 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 16:25:49,623 INFO [train.py:715] (7/8) Epoch 16, batch 10100, loss[loss=0.1187, simple_loss=0.1913, pruned_loss=0.02301, over 4885.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02983, over 972269.44 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:26:30,396 INFO [train.py:715] (7/8) Epoch 16, batch 10150, loss[loss=0.1641, simple_loss=0.2352, pruned_loss=0.04652, over 4697.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2085, pruned_loss=0.03019, over 972020.68 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:27:10,599 INFO [train.py:715] (7/8) Epoch 16, batch 10200, loss[loss=0.1488, simple_loss=0.2184, pruned_loss=0.03963, over 4777.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.02998, over 972283.23 frames.], batch size: 14, lr: 1.39e-04 +2022-05-08 16:27:49,590 INFO [train.py:715] (7/8) Epoch 16, batch 10250, loss[loss=0.1303, simple_loss=0.2056, pruned_loss=0.02752, over 4974.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02948, over 972820.95 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 16:28:29,498 INFO [train.py:715] (7/8) Epoch 16, batch 10300, loss[loss=0.135, simple_loss=0.2053, pruned_loss=0.03237, over 4908.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.02951, over 973013.99 frames.], batch size: 39, lr: 1.39e-04 +2022-05-08 16:29:09,155 INFO [train.py:715] (7/8) Epoch 16, batch 10350, loss[loss=0.142, simple_loss=0.2089, pruned_loss=0.0376, over 4802.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02962, over 972981.31 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 16:29:47,475 INFO [train.py:715] (7/8) Epoch 16, batch 10400, loss[loss=0.1495, simple_loss=0.2205, pruned_loss=0.03921, over 4967.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02989, over 973142.98 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:30:26,270 INFO [train.py:715] (7/8) Epoch 16, batch 10450, loss[loss=0.1147, simple_loss=0.187, pruned_loss=0.02119, over 4810.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03027, over 973249.01 frames.], batch size: 12, lr: 1.39e-04 +2022-05-08 16:31:05,205 INFO [train.py:715] (7/8) Epoch 16, batch 10500, loss[loss=0.1221, simple_loss=0.2005, pruned_loss=0.02184, over 4950.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02992, over 972738.80 frames.], batch size: 21, lr: 1.39e-04 +2022-05-08 16:31:44,637 INFO [train.py:715] (7/8) Epoch 16, batch 10550, loss[loss=0.1428, simple_loss=0.2223, pruned_loss=0.03169, over 4986.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02972, over 972514.79 frames.], batch size: 31, lr: 1.39e-04 +2022-05-08 16:32:22,615 INFO [train.py:715] (7/8) Epoch 16, batch 10600, loss[loss=0.1528, simple_loss=0.2328, pruned_loss=0.03638, over 4797.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2086, pruned_loss=0.03015, over 972131.67 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 16:33:01,320 INFO [train.py:715] (7/8) Epoch 16, batch 10650, loss[loss=0.1405, simple_loss=0.206, pruned_loss=0.03752, over 4855.00 frames.], tot_loss[loss=0.135, simple_loss=0.2093, pruned_loss=0.03036, over 972182.53 frames.], batch size: 30, lr: 1.39e-04 +2022-05-08 16:33:40,767 INFO [train.py:715] (7/8) Epoch 16, batch 10700, loss[loss=0.1408, simple_loss=0.2099, pruned_loss=0.03581, over 4876.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2095, pruned_loss=0.03011, over 972718.81 frames.], batch size: 13, lr: 1.39e-04 +2022-05-08 16:34:19,604 INFO [train.py:715] (7/8) Epoch 16, batch 10750, loss[loss=0.1421, simple_loss=0.2074, pruned_loss=0.03844, over 4807.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2089, pruned_loss=0.02992, over 972222.59 frames.], batch size: 25, lr: 1.39e-04 +2022-05-08 16:34:58,503 INFO [train.py:715] (7/8) Epoch 16, batch 10800, loss[loss=0.1119, simple_loss=0.1862, pruned_loss=0.01883, over 4896.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02957, over 971883.69 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:35:37,666 INFO [train.py:715] (7/8) Epoch 16, batch 10850, loss[loss=0.121, simple_loss=0.1924, pruned_loss=0.02481, over 4922.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02923, over 972224.53 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 16:36:17,327 INFO [train.py:715] (7/8) Epoch 16, batch 10900, loss[loss=0.1163, simple_loss=0.1922, pruned_loss=0.02018, over 4970.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02953, over 971992.75 frames.], batch size: 28, lr: 1.39e-04 +2022-05-08 16:36:55,551 INFO [train.py:715] (7/8) Epoch 16, batch 10950, loss[loss=0.1287, simple_loss=0.2048, pruned_loss=0.02623, over 4967.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02952, over 972461.77 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 16:37:34,520 INFO [train.py:715] (7/8) Epoch 16, batch 11000, loss[loss=0.1568, simple_loss=0.2239, pruned_loss=0.04485, over 4867.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02973, over 972178.74 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:38:13,975 INFO [train.py:715] (7/8) Epoch 16, batch 11050, loss[loss=0.1386, simple_loss=0.2208, pruned_loss=0.02822, over 4865.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02992, over 971652.41 frames.], batch size: 20, lr: 1.39e-04 +2022-05-08 16:38:55,221 INFO [train.py:715] (7/8) Epoch 16, batch 11100, loss[loss=0.1146, simple_loss=0.1887, pruned_loss=0.02026, over 4962.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.0299, over 970744.11 frames.], batch size: 24, lr: 1.39e-04 +2022-05-08 16:39:33,650 INFO [train.py:715] (7/8) Epoch 16, batch 11150, loss[loss=0.1289, simple_loss=0.2072, pruned_loss=0.02533, over 4912.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02993, over 971246.67 frames.], batch size: 19, lr: 1.39e-04 +2022-05-08 16:40:12,900 INFO [train.py:715] (7/8) Epoch 16, batch 11200, loss[loss=0.1577, simple_loss=0.2182, pruned_loss=0.04865, over 4846.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02981, over 972323.31 frames.], batch size: 32, lr: 1.39e-04 +2022-05-08 16:40:51,684 INFO [train.py:715] (7/8) Epoch 16, batch 11250, loss[loss=0.1319, simple_loss=0.1977, pruned_loss=0.03309, over 4838.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02982, over 971707.65 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:41:29,875 INFO [train.py:715] (7/8) Epoch 16, batch 11300, loss[loss=0.1584, simple_loss=0.2348, pruned_loss=0.04096, over 4940.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02987, over 972618.18 frames.], batch size: 39, lr: 1.39e-04 +2022-05-08 16:42:08,149 INFO [train.py:715] (7/8) Epoch 16, batch 11350, loss[loss=0.1459, simple_loss=0.2166, pruned_loss=0.03762, over 4916.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2086, pruned_loss=0.03027, over 972512.44 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 16:42:47,129 INFO [train.py:715] (7/8) Epoch 16, batch 11400, loss[loss=0.1038, simple_loss=0.1755, pruned_loss=0.01608, over 4803.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03007, over 973096.72 frames.], batch size: 18, lr: 1.39e-04 +2022-05-08 16:43:25,154 INFO [train.py:715] (7/8) Epoch 16, batch 11450, loss[loss=0.1731, simple_loss=0.2468, pruned_loss=0.04972, over 4703.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2083, pruned_loss=0.02979, over 972878.56 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:44:03,096 INFO [train.py:715] (7/8) Epoch 16, batch 11500, loss[loss=0.1439, simple_loss=0.2094, pruned_loss=0.03917, over 4971.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02981, over 972693.99 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 16:44:41,801 INFO [train.py:715] (7/8) Epoch 16, batch 11550, loss[loss=0.1263, simple_loss=0.2105, pruned_loss=0.02105, over 4892.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02962, over 972240.48 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:45:20,369 INFO [train.py:715] (7/8) Epoch 16, batch 11600, loss[loss=0.1604, simple_loss=0.2372, pruned_loss=0.04182, over 4838.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02948, over 972325.11 frames.], batch size: 30, lr: 1.39e-04 +2022-05-08 16:45:57,962 INFO [train.py:715] (7/8) Epoch 16, batch 11650, loss[loss=0.1235, simple_loss=0.1955, pruned_loss=0.02573, over 4875.00 frames.], tot_loss[loss=0.134, simple_loss=0.2084, pruned_loss=0.0298, over 972443.58 frames.], batch size: 16, lr: 1.39e-04 +2022-05-08 16:46:36,440 INFO [train.py:715] (7/8) Epoch 16, batch 11700, loss[loss=0.1378, simple_loss=0.2074, pruned_loss=0.03409, over 4968.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02956, over 972809.28 frames.], batch size: 35, lr: 1.39e-04 +2022-05-08 16:47:15,522 INFO [train.py:715] (7/8) Epoch 16, batch 11750, loss[loss=0.1504, simple_loss=0.2302, pruned_loss=0.03533, over 4940.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02966, over 972476.90 frames.], batch size: 23, lr: 1.39e-04 +2022-05-08 16:47:53,673 INFO [train.py:715] (7/8) Epoch 16, batch 11800, loss[loss=0.1403, simple_loss=0.2141, pruned_loss=0.03322, over 4844.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02898, over 972794.29 frames.], batch size: 15, lr: 1.39e-04 +2022-05-08 16:48:31,491 INFO [train.py:715] (7/8) Epoch 16, batch 11850, loss[loss=0.1281, simple_loss=0.2065, pruned_loss=0.0249, over 4779.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.02952, over 972745.66 frames.], batch size: 17, lr: 1.39e-04 +2022-05-08 16:49:10,175 INFO [train.py:715] (7/8) Epoch 16, batch 11900, loss[loss=0.09366, simple_loss=0.1641, pruned_loss=0.01163, over 4765.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2061, pruned_loss=0.02914, over 971920.93 frames.], batch size: 12, lr: 1.39e-04 +2022-05-08 16:49:48,595 INFO [train.py:715] (7/8) Epoch 16, batch 11950, loss[loss=0.1325, simple_loss=0.2038, pruned_loss=0.03064, over 4816.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02917, over 972079.28 frames.], batch size: 26, lr: 1.39e-04 +2022-05-08 16:50:26,416 INFO [train.py:715] (7/8) Epoch 16, batch 12000, loss[loss=0.1329, simple_loss=0.2054, pruned_loss=0.03023, over 4851.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02958, over 972412.45 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 16:50:26,417 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 16:50:37,201 INFO [train.py:742] (7/8) Epoch 16, validation: loss=0.1049, simple_loss=0.1884, pruned_loss=0.01072, over 914524.00 frames. +2022-05-08 16:51:16,050 INFO [train.py:715] (7/8) Epoch 16, batch 12050, loss[loss=0.1326, simple_loss=0.2061, pruned_loss=0.02958, over 4776.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.02999, over 972294.83 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 16:51:55,272 INFO [train.py:715] (7/8) Epoch 16, batch 12100, loss[loss=0.1699, simple_loss=0.2515, pruned_loss=0.04413, over 4798.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03002, over 972031.95 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 16:52:34,707 INFO [train.py:715] (7/8) Epoch 16, batch 12150, loss[loss=0.1456, simple_loss=0.2184, pruned_loss=0.0364, over 4697.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2085, pruned_loss=0.03034, over 972646.95 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 16:53:12,373 INFO [train.py:715] (7/8) Epoch 16, batch 12200, loss[loss=0.1391, simple_loss=0.221, pruned_loss=0.02854, over 4933.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2087, pruned_loss=0.03011, over 973062.80 frames.], batch size: 23, lr: 1.38e-04 +2022-05-08 16:53:50,654 INFO [train.py:715] (7/8) Epoch 16, batch 12250, loss[loss=0.1655, simple_loss=0.2366, pruned_loss=0.04723, over 4889.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2087, pruned_loss=0.02973, over 972913.49 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 16:54:29,690 INFO [train.py:715] (7/8) Epoch 16, batch 12300, loss[loss=0.1384, simple_loss=0.2129, pruned_loss=0.032, over 4765.00 frames.], tot_loss[loss=0.1332, simple_loss=0.208, pruned_loss=0.02924, over 973003.65 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 16:55:08,771 INFO [train.py:715] (7/8) Epoch 16, batch 12350, loss[loss=0.11, simple_loss=0.185, pruned_loss=0.01747, over 4889.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02912, over 972984.40 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 16:55:47,031 INFO [train.py:715] (7/8) Epoch 16, batch 12400, loss[loss=0.1238, simple_loss=0.1943, pruned_loss=0.02662, over 4975.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2073, pruned_loss=0.02887, over 972569.92 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 16:56:26,129 INFO [train.py:715] (7/8) Epoch 16, batch 12450, loss[loss=0.1244, simple_loss=0.1895, pruned_loss=0.0296, over 4964.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02935, over 972537.54 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 16:57:05,989 INFO [train.py:715] (7/8) Epoch 16, batch 12500, loss[loss=0.1208, simple_loss=0.2075, pruned_loss=0.01702, over 4973.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2085, pruned_loss=0.0299, over 973168.06 frames.], batch size: 28, lr: 1.38e-04 +2022-05-08 16:57:44,598 INFO [train.py:715] (7/8) Epoch 16, batch 12550, loss[loss=0.1195, simple_loss=0.2016, pruned_loss=0.01875, over 4983.00 frames.], tot_loss[loss=0.1349, simple_loss=0.209, pruned_loss=0.03035, over 972757.19 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 16:58:23,182 INFO [train.py:715] (7/8) Epoch 16, batch 12600, loss[loss=0.1227, simple_loss=0.2069, pruned_loss=0.01922, over 4945.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02971, over 972506.11 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 16:59:01,905 INFO [train.py:715] (7/8) Epoch 16, batch 12650, loss[loss=0.1556, simple_loss=0.2338, pruned_loss=0.03876, over 4908.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02955, over 972530.35 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 16:59:40,539 INFO [train.py:715] (7/8) Epoch 16, batch 12700, loss[loss=0.1333, simple_loss=0.202, pruned_loss=0.03232, over 4975.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02979, over 971067.72 frames.], batch size: 35, lr: 1.38e-04 +2022-05-08 17:00:18,084 INFO [train.py:715] (7/8) Epoch 16, batch 12750, loss[loss=0.1392, simple_loss=0.2087, pruned_loss=0.03486, over 4926.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02937, over 971533.47 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 17:00:57,706 INFO [train.py:715] (7/8) Epoch 16, batch 12800, loss[loss=0.1078, simple_loss=0.1893, pruned_loss=0.01311, over 4826.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02945, over 972785.71 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 17:01:36,684 INFO [train.py:715] (7/8) Epoch 16, batch 12850, loss[loss=0.1258, simple_loss=0.1984, pruned_loss=0.02667, over 4975.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02986, over 972440.60 frames.], batch size: 35, lr: 1.38e-04 +2022-05-08 17:02:15,046 INFO [train.py:715] (7/8) Epoch 16, batch 12900, loss[loss=0.1391, simple_loss=0.2204, pruned_loss=0.02888, over 4798.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02986, over 971783.67 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 17:02:53,758 INFO [train.py:715] (7/8) Epoch 16, batch 12950, loss[loss=0.1559, simple_loss=0.2245, pruned_loss=0.04366, over 4866.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2088, pruned_loss=0.03022, over 972393.20 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 17:03:32,778 INFO [train.py:715] (7/8) Epoch 16, batch 13000, loss[loss=0.1245, simple_loss=0.2014, pruned_loss=0.02382, over 4699.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02986, over 972270.29 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:04:11,282 INFO [train.py:715] (7/8) Epoch 16, batch 13050, loss[loss=0.1609, simple_loss=0.2241, pruned_loss=0.04887, over 4901.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03003, over 971956.38 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:04:49,802 INFO [train.py:715] (7/8) Epoch 16, batch 13100, loss[loss=0.1404, simple_loss=0.2259, pruned_loss=0.02747, over 4909.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02992, over 972078.75 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:05:28,954 INFO [train.py:715] (7/8) Epoch 16, batch 13150, loss[loss=0.1311, simple_loss=0.203, pruned_loss=0.02955, over 4816.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02968, over 971348.18 frames.], batch size: 27, lr: 1.38e-04 +2022-05-08 17:06:08,076 INFO [train.py:715] (7/8) Epoch 16, batch 13200, loss[loss=0.1342, simple_loss=0.2157, pruned_loss=0.02639, over 4945.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.02921, over 971627.42 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 17:06:46,154 INFO [train.py:715] (7/8) Epoch 16, batch 13250, loss[loss=0.1281, simple_loss=0.2035, pruned_loss=0.02634, over 4901.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02946, over 971938.97 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:07:25,006 INFO [train.py:715] (7/8) Epoch 16, batch 13300, loss[loss=0.1364, simple_loss=0.2096, pruned_loss=0.03156, over 4874.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02906, over 972708.87 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 17:08:04,361 INFO [train.py:715] (7/8) Epoch 16, batch 13350, loss[loss=0.1365, simple_loss=0.2049, pruned_loss=0.03404, over 4990.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02907, over 972647.09 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:08:42,684 INFO [train.py:715] (7/8) Epoch 16, batch 13400, loss[loss=0.1177, simple_loss=0.2012, pruned_loss=0.0171, over 4816.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02983, over 972558.37 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:09:21,147 INFO [train.py:715] (7/8) Epoch 16, batch 13450, loss[loss=0.1239, simple_loss=0.2091, pruned_loss=0.01935, over 4784.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2069, pruned_loss=0.02988, over 972615.58 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:10:00,907 INFO [train.py:715] (7/8) Epoch 16, batch 13500, loss[loss=0.1206, simple_loss=0.1937, pruned_loss=0.02374, over 4916.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2068, pruned_loss=0.02968, over 972490.67 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:10:39,238 INFO [train.py:715] (7/8) Epoch 16, batch 13550, loss[loss=0.1077, simple_loss=0.1855, pruned_loss=0.01496, over 4980.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02976, over 973209.42 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:11:17,351 INFO [train.py:715] (7/8) Epoch 16, batch 13600, loss[loss=0.1352, simple_loss=0.2105, pruned_loss=0.02995, over 4928.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02983, over 972343.74 frames.], batch size: 39, lr: 1.38e-04 +2022-05-08 17:11:56,186 INFO [train.py:715] (7/8) Epoch 16, batch 13650, loss[loss=0.1587, simple_loss=0.2346, pruned_loss=0.04138, over 4841.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02925, over 971916.81 frames.], batch size: 30, lr: 1.38e-04 +2022-05-08 17:12:35,109 INFO [train.py:715] (7/8) Epoch 16, batch 13700, loss[loss=0.1214, simple_loss=0.1934, pruned_loss=0.02469, over 4930.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02883, over 972322.19 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 17:13:13,499 INFO [train.py:715] (7/8) Epoch 16, batch 13750, loss[loss=0.1545, simple_loss=0.233, pruned_loss=0.03802, over 4956.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02914, over 972792.23 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 17:13:52,014 INFO [train.py:715] (7/8) Epoch 16, batch 13800, loss[loss=0.1127, simple_loss=0.1877, pruned_loss=0.0188, over 4976.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.0294, over 972299.50 frames.], batch size: 31, lr: 1.38e-04 +2022-05-08 17:14:30,650 INFO [train.py:715] (7/8) Epoch 16, batch 13850, loss[loss=0.1297, simple_loss=0.1945, pruned_loss=0.03239, over 4744.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02945, over 972311.40 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:15:08,623 INFO [train.py:715] (7/8) Epoch 16, batch 13900, loss[loss=0.1319, simple_loss=0.2137, pruned_loss=0.02505, over 4886.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02946, over 973087.12 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:15:46,309 INFO [train.py:715] (7/8) Epoch 16, batch 13950, loss[loss=0.1414, simple_loss=0.2086, pruned_loss=0.03706, over 4931.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02953, over 973133.86 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:16:24,662 INFO [train.py:715] (7/8) Epoch 16, batch 14000, loss[loss=0.1409, simple_loss=0.2196, pruned_loss=0.03111, over 4822.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03022, over 972698.86 frames.], batch size: 26, lr: 1.38e-04 +2022-05-08 17:17:03,282 INFO [train.py:715] (7/8) Epoch 16, batch 14050, loss[loss=0.1315, simple_loss=0.2073, pruned_loss=0.02782, over 4905.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.0298, over 973375.64 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:17:41,057 INFO [train.py:715] (7/8) Epoch 16, batch 14100, loss[loss=0.153, simple_loss=0.2368, pruned_loss=0.03464, over 4791.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02987, over 973390.45 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 17:18:18,776 INFO [train.py:715] (7/8) Epoch 16, batch 14150, loss[loss=0.1426, simple_loss=0.2287, pruned_loss=0.02831, over 4980.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2089, pruned_loss=0.02972, over 973686.28 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 17:18:57,314 INFO [train.py:715] (7/8) Epoch 16, batch 14200, loss[loss=0.1353, simple_loss=0.2231, pruned_loss=0.02379, over 4897.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2095, pruned_loss=0.0301, over 974116.50 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:19:36,007 INFO [train.py:715] (7/8) Epoch 16, batch 14250, loss[loss=0.1186, simple_loss=0.2062, pruned_loss=0.01547, over 4805.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2085, pruned_loss=0.03001, over 973611.91 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 17:20:14,626 INFO [train.py:715] (7/8) Epoch 16, batch 14300, loss[loss=0.133, simple_loss=0.2094, pruned_loss=0.0283, over 4796.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2081, pruned_loss=0.0294, over 973255.46 frames.], batch size: 12, lr: 1.38e-04 +2022-05-08 17:20:53,331 INFO [train.py:715] (7/8) Epoch 16, batch 14350, loss[loss=0.1334, simple_loss=0.2057, pruned_loss=0.03059, over 4966.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02988, over 973139.21 frames.], batch size: 35, lr: 1.38e-04 +2022-05-08 17:21:32,524 INFO [train.py:715] (7/8) Epoch 16, batch 14400, loss[loss=0.1233, simple_loss=0.1981, pruned_loss=0.02429, over 4837.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03022, over 972620.72 frames.], batch size: 30, lr: 1.38e-04 +2022-05-08 17:22:10,260 INFO [train.py:715] (7/8) Epoch 16, batch 14450, loss[loss=0.1216, simple_loss=0.199, pruned_loss=0.02207, over 4810.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02992, over 972744.47 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 17:22:49,091 INFO [train.py:715] (7/8) Epoch 16, batch 14500, loss[loss=0.1241, simple_loss=0.2069, pruned_loss=0.02069, over 4884.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.02995, over 972762.14 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:23:28,026 INFO [train.py:715] (7/8) Epoch 16, batch 14550, loss[loss=0.125, simple_loss=0.1988, pruned_loss=0.0256, over 4977.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02973, over 973665.04 frames.], batch size: 28, lr: 1.38e-04 +2022-05-08 17:24:06,693 INFO [train.py:715] (7/8) Epoch 16, batch 14600, loss[loss=0.1512, simple_loss=0.2279, pruned_loss=0.03724, over 4769.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02983, over 973675.62 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:24:44,963 INFO [train.py:715] (7/8) Epoch 16, batch 14650, loss[loss=0.132, simple_loss=0.2119, pruned_loss=0.02608, over 4971.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02965, over 973503.43 frames.], batch size: 28, lr: 1.38e-04 +2022-05-08 17:25:23,541 INFO [train.py:715] (7/8) Epoch 16, batch 14700, loss[loss=0.1279, simple_loss=0.2144, pruned_loss=0.0207, over 4759.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02936, over 973206.48 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:26:02,838 INFO [train.py:715] (7/8) Epoch 16, batch 14750, loss[loss=0.1301, simple_loss=0.2012, pruned_loss=0.02955, over 4785.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.0292, over 973323.06 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:26:40,633 INFO [train.py:715] (7/8) Epoch 16, batch 14800, loss[loss=0.1798, simple_loss=0.2518, pruned_loss=0.05391, over 4785.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02925, over 972776.90 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:27:19,708 INFO [train.py:715] (7/8) Epoch 16, batch 14850, loss[loss=0.1292, simple_loss=0.1984, pruned_loss=0.03005, over 4963.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02938, over 972309.10 frames.], batch size: 39, lr: 1.38e-04 +2022-05-08 17:27:58,605 INFO [train.py:715] (7/8) Epoch 16, batch 14900, loss[loss=0.138, simple_loss=0.2154, pruned_loss=0.03031, over 4877.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02987, over 972484.41 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:28:37,059 INFO [train.py:715] (7/8) Epoch 16, batch 14950, loss[loss=0.1429, simple_loss=0.2115, pruned_loss=0.03713, over 4800.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03032, over 972799.58 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 17:29:16,117 INFO [train.py:715] (7/8) Epoch 16, batch 15000, loss[loss=0.1225, simple_loss=0.1927, pruned_loss=0.0262, over 4826.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2091, pruned_loss=0.03032, over 973035.93 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 17:29:16,117 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 17:29:25,726 INFO [train.py:742] (7/8) Epoch 16, validation: loss=0.1049, simple_loss=0.1884, pruned_loss=0.01069, over 914524.00 frames. +2022-05-08 17:30:03,998 INFO [train.py:715] (7/8) Epoch 16, batch 15050, loss[loss=0.1341, simple_loss=0.2121, pruned_loss=0.02808, over 4759.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2087, pruned_loss=0.02995, over 973468.18 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:30:42,063 INFO [train.py:715] (7/8) Epoch 16, batch 15100, loss[loss=0.128, simple_loss=0.2001, pruned_loss=0.02794, over 4653.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02975, over 972825.93 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 17:31:20,869 INFO [train.py:715] (7/8) Epoch 16, batch 15150, loss[loss=0.1309, simple_loss=0.199, pruned_loss=0.03139, over 4910.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02954, over 972305.16 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:31:58,565 INFO [train.py:715] (7/8) Epoch 16, batch 15200, loss[loss=0.1293, simple_loss=0.2121, pruned_loss=0.02325, over 4984.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02955, over 973012.98 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:32:36,117 INFO [train.py:715] (7/8) Epoch 16, batch 15250, loss[loss=0.1142, simple_loss=0.181, pruned_loss=0.0237, over 4985.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2078, pruned_loss=0.02946, over 972682.44 frames.], batch size: 31, lr: 1.38e-04 +2022-05-08 17:33:14,308 INFO [train.py:715] (7/8) Epoch 16, batch 15300, loss[loss=0.1433, simple_loss=0.222, pruned_loss=0.03225, over 4789.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02967, over 972481.80 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:33:52,456 INFO [train.py:715] (7/8) Epoch 16, batch 15350, loss[loss=0.1221, simple_loss=0.1955, pruned_loss=0.02438, over 4886.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2078, pruned_loss=0.02946, over 972885.10 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:34:30,725 INFO [train.py:715] (7/8) Epoch 16, batch 15400, loss[loss=0.123, simple_loss=0.1993, pruned_loss=0.02337, over 4868.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2088, pruned_loss=0.0299, over 971227.79 frames.], batch size: 20, lr: 1.38e-04 +2022-05-08 17:35:08,739 INFO [train.py:715] (7/8) Epoch 16, batch 15450, loss[loss=0.1083, simple_loss=0.1782, pruned_loss=0.01922, over 4801.00 frames.], tot_loss[loss=0.134, simple_loss=0.2087, pruned_loss=0.02966, over 972441.50 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 17:35:47,169 INFO [train.py:715] (7/8) Epoch 16, batch 15500, loss[loss=0.1329, simple_loss=0.2046, pruned_loss=0.0306, over 4695.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02972, over 972100.58 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:36:24,799 INFO [train.py:715] (7/8) Epoch 16, batch 15550, loss[loss=0.1533, simple_loss=0.236, pruned_loss=0.03535, over 4925.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02915, over 972797.96 frames.], batch size: 23, lr: 1.38e-04 +2022-05-08 17:37:02,466 INFO [train.py:715] (7/8) Epoch 16, batch 15600, loss[loss=0.1298, simple_loss=0.2027, pruned_loss=0.02843, over 4884.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2081, pruned_loss=0.02919, over 972445.26 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:37:41,083 INFO [train.py:715] (7/8) Epoch 16, batch 15650, loss[loss=0.1292, simple_loss=0.1956, pruned_loss=0.03146, over 4904.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.0296, over 972120.62 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:38:19,117 INFO [train.py:715] (7/8) Epoch 16, batch 15700, loss[loss=0.1433, simple_loss=0.2265, pruned_loss=0.03002, over 4745.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.0298, over 972375.60 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:38:56,853 INFO [train.py:715] (7/8) Epoch 16, batch 15750, loss[loss=0.1553, simple_loss=0.2319, pruned_loss=0.03935, over 4834.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02991, over 971951.65 frames.], batch size: 26, lr: 1.38e-04 +2022-05-08 17:39:34,742 INFO [train.py:715] (7/8) Epoch 16, batch 15800, loss[loss=0.1714, simple_loss=0.2442, pruned_loss=0.04933, over 4803.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02982, over 971905.28 frames.], batch size: 26, lr: 1.38e-04 +2022-05-08 17:40:13,086 INFO [train.py:715] (7/8) Epoch 16, batch 15850, loss[loss=0.1493, simple_loss=0.2098, pruned_loss=0.0444, over 4863.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2069, pruned_loss=0.0298, over 972046.24 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:40:50,713 INFO [train.py:715] (7/8) Epoch 16, batch 15900, loss[loss=0.1555, simple_loss=0.222, pruned_loss=0.04444, over 4800.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2072, pruned_loss=0.02975, over 971848.11 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:41:28,318 INFO [train.py:715] (7/8) Epoch 16, batch 15950, loss[loss=0.1538, simple_loss=0.2188, pruned_loss=0.04441, over 4984.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2072, pruned_loss=0.03013, over 970743.02 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:42:06,732 INFO [train.py:715] (7/8) Epoch 16, batch 16000, loss[loss=0.1147, simple_loss=0.1786, pruned_loss=0.02537, over 4850.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.03003, over 970675.71 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 17:42:44,835 INFO [train.py:715] (7/8) Epoch 16, batch 16050, loss[loss=0.1294, simple_loss=0.1968, pruned_loss=0.031, over 4786.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2078, pruned_loss=0.03044, over 969650.35 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:43:22,461 INFO [train.py:715] (7/8) Epoch 16, batch 16100, loss[loss=0.1204, simple_loss=0.1825, pruned_loss=0.02912, over 4647.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2079, pruned_loss=0.03097, over 969283.39 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 17:43:59,973 INFO [train.py:715] (7/8) Epoch 16, batch 16150, loss[loss=0.1258, simple_loss=0.1873, pruned_loss=0.03218, over 4964.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2083, pruned_loss=0.03098, over 969208.98 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 17:44:38,352 INFO [train.py:715] (7/8) Epoch 16, batch 16200, loss[loss=0.1359, simple_loss=0.2053, pruned_loss=0.03329, over 4855.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2082, pruned_loss=0.03058, over 969817.27 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 17:45:15,917 INFO [train.py:715] (7/8) Epoch 16, batch 16250, loss[loss=0.1251, simple_loss=0.2059, pruned_loss=0.02214, over 4895.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03018, over 970326.25 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:45:53,547 INFO [train.py:715] (7/8) Epoch 16, batch 16300, loss[loss=0.1183, simple_loss=0.1942, pruned_loss=0.02126, over 4812.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2071, pruned_loss=0.02985, over 971501.28 frames.], batch size: 26, lr: 1.38e-04 +2022-05-08 17:46:31,874 INFO [train.py:715] (7/8) Epoch 16, batch 16350, loss[loss=0.1459, simple_loss=0.2221, pruned_loss=0.03483, over 4898.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02974, over 970945.72 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:47:10,532 INFO [train.py:715] (7/8) Epoch 16, batch 16400, loss[loss=0.1208, simple_loss=0.1931, pruned_loss=0.0242, over 4947.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2073, pruned_loss=0.03006, over 971461.98 frames.], batch size: 35, lr: 1.38e-04 +2022-05-08 17:47:47,571 INFO [train.py:715] (7/8) Epoch 16, batch 16450, loss[loss=0.111, simple_loss=0.1798, pruned_loss=0.02106, over 4700.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2077, pruned_loss=0.03027, over 971711.40 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:48:25,520 INFO [train.py:715] (7/8) Epoch 16, batch 16500, loss[loss=0.1322, simple_loss=0.2046, pruned_loss=0.0299, over 4862.00 frames.], tot_loss[loss=0.134, simple_loss=0.2076, pruned_loss=0.0302, over 972081.22 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 17:49:04,092 INFO [train.py:715] (7/8) Epoch 16, batch 16550, loss[loss=0.139, simple_loss=0.2123, pruned_loss=0.03285, over 4915.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2078, pruned_loss=0.03022, over 971762.50 frames.], batch size: 23, lr: 1.38e-04 +2022-05-08 17:49:41,515 INFO [train.py:715] (7/8) Epoch 16, batch 16600, loss[loss=0.1213, simple_loss=0.192, pruned_loss=0.02525, over 4696.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02948, over 972018.38 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:50:19,532 INFO [train.py:715] (7/8) Epoch 16, batch 16650, loss[loss=0.1437, simple_loss=0.2216, pruned_loss=0.03289, over 4955.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02916, over 971977.47 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 17:50:57,795 INFO [train.py:715] (7/8) Epoch 16, batch 16700, loss[loss=0.1253, simple_loss=0.2031, pruned_loss=0.02373, over 4974.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02947, over 972080.91 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:51:35,937 INFO [train.py:715] (7/8) Epoch 16, batch 16750, loss[loss=0.1302, simple_loss=0.2011, pruned_loss=0.0297, over 4785.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02955, over 973565.74 frames.], batch size: 12, lr: 1.38e-04 +2022-05-08 17:52:13,453 INFO [train.py:715] (7/8) Epoch 16, batch 16800, loss[loss=0.1447, simple_loss=0.2227, pruned_loss=0.03335, over 4897.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02981, over 973575.71 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 17:52:51,537 INFO [train.py:715] (7/8) Epoch 16, batch 16850, loss[loss=0.1396, simple_loss=0.2086, pruned_loss=0.0353, over 4902.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2074, pruned_loss=0.02983, over 973254.10 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 17:53:30,000 INFO [train.py:715] (7/8) Epoch 16, batch 16900, loss[loss=0.1682, simple_loss=0.226, pruned_loss=0.05526, over 4815.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02995, over 973987.25 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 17:54:07,592 INFO [train.py:715] (7/8) Epoch 16, batch 16950, loss[loss=0.1641, simple_loss=0.2326, pruned_loss=0.04781, over 4770.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02973, over 973242.95 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 17:54:45,477 INFO [train.py:715] (7/8) Epoch 16, batch 17000, loss[loss=0.1567, simple_loss=0.2248, pruned_loss=0.04434, over 4843.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02971, over 972538.38 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:55:23,673 INFO [train.py:715] (7/8) Epoch 16, batch 17050, loss[loss=0.14, simple_loss=0.2096, pruned_loss=0.03516, over 4962.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02959, over 972931.47 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 17:56:02,257 INFO [train.py:715] (7/8) Epoch 16, batch 17100, loss[loss=0.119, simple_loss=0.1986, pruned_loss=0.01968, over 4981.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02928, over 972691.53 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 17:56:39,333 INFO [train.py:715] (7/8) Epoch 16, batch 17150, loss[loss=0.1409, simple_loss=0.2115, pruned_loss=0.03508, over 4827.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02989, over 972340.67 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 17:57:17,464 INFO [train.py:715] (7/8) Epoch 16, batch 17200, loss[loss=0.1397, simple_loss=0.2052, pruned_loss=0.03707, over 4856.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2079, pruned_loss=0.02932, over 972360.68 frames.], batch size: 30, lr: 1.38e-04 +2022-05-08 17:57:56,361 INFO [train.py:715] (7/8) Epoch 16, batch 17250, loss[loss=0.1405, simple_loss=0.2179, pruned_loss=0.03154, over 4917.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02914, over 971878.98 frames.], batch size: 23, lr: 1.38e-04 +2022-05-08 17:58:33,737 INFO [train.py:715] (7/8) Epoch 16, batch 17300, loss[loss=0.1355, simple_loss=0.2025, pruned_loss=0.03424, over 4882.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02881, over 972915.11 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 17:59:11,266 INFO [train.py:715] (7/8) Epoch 16, batch 17350, loss[loss=0.1457, simple_loss=0.2145, pruned_loss=0.03847, over 4852.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02928, over 972985.36 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 17:59:49,077 INFO [train.py:715] (7/8) Epoch 16, batch 17400, loss[loss=0.1192, simple_loss=0.1948, pruned_loss=0.0218, over 4768.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02954, over 972432.76 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 18:00:27,744 INFO [train.py:715] (7/8) Epoch 16, batch 17450, loss[loss=0.1667, simple_loss=0.2349, pruned_loss=0.04931, over 4794.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02939, over 972653.20 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:01:04,506 INFO [train.py:715] (7/8) Epoch 16, batch 17500, loss[loss=0.1192, simple_loss=0.1867, pruned_loss=0.02587, over 4772.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.02948, over 972430.34 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:01:42,654 INFO [train.py:715] (7/8) Epoch 16, batch 17550, loss[loss=0.1047, simple_loss=0.1796, pruned_loss=0.01484, over 4810.00 frames.], tot_loss[loss=0.1333, simple_loss=0.208, pruned_loss=0.02932, over 972285.57 frames.], batch size: 25, lr: 1.38e-04 +2022-05-08 18:02:21,341 INFO [train.py:715] (7/8) Epoch 16, batch 17600, loss[loss=0.1433, simple_loss=0.1997, pruned_loss=0.04347, over 4816.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02996, over 972241.97 frames.], batch size: 12, lr: 1.38e-04 +2022-05-08 18:02:58,692 INFO [train.py:715] (7/8) Epoch 16, batch 17650, loss[loss=0.1106, simple_loss=0.1806, pruned_loss=0.02027, over 4877.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02937, over 971560.74 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 18:03:36,636 INFO [train.py:715] (7/8) Epoch 16, batch 17700, loss[loss=0.146, simple_loss=0.2197, pruned_loss=0.03621, over 4762.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02956, over 970851.07 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 18:04:15,006 INFO [train.py:715] (7/8) Epoch 16, batch 17750, loss[loss=0.1259, simple_loss=0.2044, pruned_loss=0.02371, over 4781.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02961, over 970231.15 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:04:53,067 INFO [train.py:715] (7/8) Epoch 16, batch 17800, loss[loss=0.1472, simple_loss=0.2147, pruned_loss=0.03983, over 4740.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02978, over 970274.57 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 18:05:30,278 INFO [train.py:715] (7/8) Epoch 16, batch 17850, loss[loss=0.1199, simple_loss=0.1994, pruned_loss=0.0202, over 4781.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02981, over 970207.83 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 18:06:08,449 INFO [train.py:715] (7/8) Epoch 16, batch 17900, loss[loss=0.1278, simple_loss=0.2083, pruned_loss=0.02366, over 4911.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2084, pruned_loss=0.02993, over 970196.24 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:06:46,890 INFO [train.py:715] (7/8) Epoch 16, batch 17950, loss[loss=0.1047, simple_loss=0.1676, pruned_loss=0.02092, over 4829.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02941, over 970479.17 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 18:07:24,273 INFO [train.py:715] (7/8) Epoch 16, batch 18000, loss[loss=0.1327, simple_loss=0.1956, pruned_loss=0.03485, over 4896.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02932, over 970338.94 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 18:07:24,274 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 18:07:33,813 INFO [train.py:742] (7/8) Epoch 16, validation: loss=0.105, simple_loss=0.1884, pruned_loss=0.01082, over 914524.00 frames. +2022-05-08 18:08:11,767 INFO [train.py:715] (7/8) Epoch 16, batch 18050, loss[loss=0.13, simple_loss=0.2088, pruned_loss=0.02562, over 4912.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.0293, over 970569.55 frames.], batch size: 23, lr: 1.38e-04 +2022-05-08 18:08:50,178 INFO [train.py:715] (7/8) Epoch 16, batch 18100, loss[loss=0.1502, simple_loss=0.2194, pruned_loss=0.04053, over 4779.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.0295, over 971802.51 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:09:28,823 INFO [train.py:715] (7/8) Epoch 16, batch 18150, loss[loss=0.1222, simple_loss=0.2053, pruned_loss=0.0196, over 4944.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2078, pruned_loss=0.02945, over 972115.77 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 18:10:07,472 INFO [train.py:715] (7/8) Epoch 16, batch 18200, loss[loss=0.1306, simple_loss=0.2073, pruned_loss=0.02694, over 4933.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02951, over 972124.95 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 18:10:45,082 INFO [train.py:715] (7/8) Epoch 16, batch 18250, loss[loss=0.125, simple_loss=0.1878, pruned_loss=0.03113, over 4877.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02956, over 972561.72 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 18:11:23,847 INFO [train.py:715] (7/8) Epoch 16, batch 18300, loss[loss=0.1944, simple_loss=0.2571, pruned_loss=0.06583, over 4987.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02947, over 972124.50 frames.], batch size: 27, lr: 1.38e-04 +2022-05-08 18:12:02,947 INFO [train.py:715] (7/8) Epoch 16, batch 18350, loss[loss=0.137, simple_loss=0.211, pruned_loss=0.03154, over 4844.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02949, over 971130.33 frames.], batch size: 27, lr: 1.38e-04 +2022-05-08 18:12:40,719 INFO [train.py:715] (7/8) Epoch 16, batch 18400, loss[loss=0.1367, simple_loss=0.2113, pruned_loss=0.03108, over 4841.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02925, over 971432.96 frames.], batch size: 30, lr: 1.38e-04 +2022-05-08 18:13:19,243 INFO [train.py:715] (7/8) Epoch 16, batch 18450, loss[loss=0.114, simple_loss=0.1843, pruned_loss=0.02184, over 4867.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02944, over 972495.91 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 18:13:57,850 INFO [train.py:715] (7/8) Epoch 16, batch 18500, loss[loss=0.1464, simple_loss=0.2165, pruned_loss=0.03813, over 4926.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2084, pruned_loss=0.02949, over 972763.31 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 18:14:36,372 INFO [train.py:715] (7/8) Epoch 16, batch 18550, loss[loss=0.1377, simple_loss=0.2047, pruned_loss=0.0354, over 4968.00 frames.], tot_loss[loss=0.1333, simple_loss=0.208, pruned_loss=0.02936, over 973008.02 frames.], batch size: 31, lr: 1.38e-04 +2022-05-08 18:15:13,856 INFO [train.py:715] (7/8) Epoch 16, batch 18600, loss[loss=0.1283, simple_loss=0.204, pruned_loss=0.02626, over 4851.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02931, over 972719.22 frames.], batch size: 32, lr: 1.38e-04 +2022-05-08 18:15:52,139 INFO [train.py:715] (7/8) Epoch 16, batch 18650, loss[loss=0.1396, simple_loss=0.2094, pruned_loss=0.03494, over 4695.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02919, over 971092.47 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 18:16:30,642 INFO [train.py:715] (7/8) Epoch 16, batch 18700, loss[loss=0.144, simple_loss=0.217, pruned_loss=0.03551, over 4777.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02902, over 971133.29 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 18:17:08,139 INFO [train.py:715] (7/8) Epoch 16, batch 18750, loss[loss=0.109, simple_loss=0.1901, pruned_loss=0.01395, over 4813.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02904, over 971963.16 frames.], batch size: 27, lr: 1.38e-04 +2022-05-08 18:17:45,512 INFO [train.py:715] (7/8) Epoch 16, batch 18800, loss[loss=0.1322, simple_loss=0.1977, pruned_loss=0.03333, over 4748.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02956, over 971388.01 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 18:18:23,822 INFO [train.py:715] (7/8) Epoch 16, batch 18850, loss[loss=0.1241, simple_loss=0.1977, pruned_loss=0.02525, over 4975.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02971, over 971607.39 frames.], batch size: 28, lr: 1.38e-04 +2022-05-08 18:19:02,092 INFO [train.py:715] (7/8) Epoch 16, batch 18900, loss[loss=0.1181, simple_loss=0.1863, pruned_loss=0.02499, over 4692.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.0294, over 971350.49 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 18:19:39,521 INFO [train.py:715] (7/8) Epoch 16, batch 18950, loss[loss=0.1135, simple_loss=0.191, pruned_loss=0.01802, over 4920.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2065, pruned_loss=0.02933, over 971336.46 frames.], batch size: 18, lr: 1.38e-04 +2022-05-08 18:20:17,359 INFO [train.py:715] (7/8) Epoch 16, batch 19000, loss[loss=0.1306, simple_loss=0.2119, pruned_loss=0.02464, over 4892.00 frames.], tot_loss[loss=0.1321, simple_loss=0.206, pruned_loss=0.0291, over 972348.75 frames.], batch size: 22, lr: 1.38e-04 +2022-05-08 18:20:55,966 INFO [train.py:715] (7/8) Epoch 16, batch 19050, loss[loss=0.1419, simple_loss=0.2165, pruned_loss=0.03366, over 4827.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02921, over 972205.50 frames.], batch size: 27, lr: 1.38e-04 +2022-05-08 18:21:36,428 INFO [train.py:715] (7/8) Epoch 16, batch 19100, loss[loss=0.1451, simple_loss=0.216, pruned_loss=0.03708, over 4988.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02934, over 972348.61 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 18:22:14,090 INFO [train.py:715] (7/8) Epoch 16, batch 19150, loss[loss=0.141, simple_loss=0.2274, pruned_loss=0.0273, over 4967.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02937, over 972136.41 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 18:22:52,369 INFO [train.py:715] (7/8) Epoch 16, batch 19200, loss[loss=0.1199, simple_loss=0.1898, pruned_loss=0.02495, over 4921.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02945, over 971732.94 frames.], batch size: 29, lr: 1.38e-04 +2022-05-08 18:23:31,020 INFO [train.py:715] (7/8) Epoch 16, batch 19250, loss[loss=0.1318, simple_loss=0.1975, pruned_loss=0.03308, over 4686.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02927, over 971539.82 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 18:24:08,554 INFO [train.py:715] (7/8) Epoch 16, batch 19300, loss[loss=0.1332, simple_loss=0.1992, pruned_loss=0.03356, over 4896.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02909, over 971657.96 frames.], batch size: 22, lr: 1.38e-04 +2022-05-08 18:24:46,582 INFO [train.py:715] (7/8) Epoch 16, batch 19350, loss[loss=0.1762, simple_loss=0.2494, pruned_loss=0.05152, over 4952.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2062, pruned_loss=0.02923, over 973167.82 frames.], batch size: 39, lr: 1.38e-04 +2022-05-08 18:25:25,227 INFO [train.py:715] (7/8) Epoch 16, batch 19400, loss[loss=0.1288, simple_loss=0.193, pruned_loss=0.03231, over 4843.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.0295, over 972754.18 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 18:26:03,259 INFO [train.py:715] (7/8) Epoch 16, batch 19450, loss[loss=0.1284, simple_loss=0.2032, pruned_loss=0.02677, over 4822.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02945, over 972892.93 frames.], batch size: 27, lr: 1.38e-04 +2022-05-08 18:26:40,798 INFO [train.py:715] (7/8) Epoch 16, batch 19500, loss[loss=0.1347, simple_loss=0.2068, pruned_loss=0.0313, over 4970.00 frames.], tot_loss[loss=0.134, simple_loss=0.2078, pruned_loss=0.03012, over 972432.46 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 18:27:18,958 INFO [train.py:715] (7/8) Epoch 16, batch 19550, loss[loss=0.1598, simple_loss=0.2322, pruned_loss=0.04363, over 4754.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2085, pruned_loss=0.03051, over 972843.33 frames.], batch size: 16, lr: 1.38e-04 +2022-05-08 18:27:57,191 INFO [train.py:715] (7/8) Epoch 16, batch 19600, loss[loss=0.1445, simple_loss=0.2227, pruned_loss=0.03313, over 4888.00 frames.], tot_loss[loss=0.134, simple_loss=0.2075, pruned_loss=0.03026, over 973652.33 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 18:28:34,600 INFO [train.py:715] (7/8) Epoch 16, batch 19650, loss[loss=0.1749, simple_loss=0.2538, pruned_loss=0.04796, over 4894.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03039, over 972889.50 frames.], batch size: 22, lr: 1.38e-04 +2022-05-08 18:29:12,875 INFO [train.py:715] (7/8) Epoch 16, batch 19700, loss[loss=0.1392, simple_loss=0.2187, pruned_loss=0.0298, over 4758.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2084, pruned_loss=0.03041, over 971911.65 frames.], batch size: 12, lr: 1.38e-04 +2022-05-08 18:29:51,091 INFO [train.py:715] (7/8) Epoch 16, batch 19750, loss[loss=0.1606, simple_loss=0.23, pruned_loss=0.0456, over 4904.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03006, over 971692.77 frames.], batch size: 17, lr: 1.38e-04 +2022-05-08 18:30:28,917 INFO [train.py:715] (7/8) Epoch 16, batch 19800, loss[loss=0.1363, simple_loss=0.2085, pruned_loss=0.03205, over 4787.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.02999, over 971396.95 frames.], batch size: 24, lr: 1.38e-04 +2022-05-08 18:31:06,636 INFO [train.py:715] (7/8) Epoch 16, batch 19850, loss[loss=0.1171, simple_loss=0.1948, pruned_loss=0.01966, over 4784.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03002, over 971253.00 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 18:31:44,942 INFO [train.py:715] (7/8) Epoch 16, batch 19900, loss[loss=0.1846, simple_loss=0.2414, pruned_loss=0.06393, over 4756.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03001, over 971659.01 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 18:32:22,972 INFO [train.py:715] (7/8) Epoch 16, batch 19950, loss[loss=0.1505, simple_loss=0.2192, pruned_loss=0.04093, over 4779.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.03025, over 972901.27 frames.], batch size: 14, lr: 1.38e-04 +2022-05-08 18:33:00,614 INFO [train.py:715] (7/8) Epoch 16, batch 20000, loss[loss=0.1191, simple_loss=0.1941, pruned_loss=0.02202, over 4812.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03006, over 972228.47 frames.], batch size: 13, lr: 1.38e-04 +2022-05-08 18:33:38,892 INFO [train.py:715] (7/8) Epoch 16, batch 20050, loss[loss=0.1283, simple_loss=0.2094, pruned_loss=0.0236, over 4820.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02982, over 972525.37 frames.], batch size: 15, lr: 1.38e-04 +2022-05-08 18:34:17,306 INFO [train.py:715] (7/8) Epoch 16, batch 20100, loss[loss=0.1596, simple_loss=0.2333, pruned_loss=0.04292, over 4980.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.03015, over 973023.33 frames.], batch size: 35, lr: 1.38e-04 +2022-05-08 18:34:54,668 INFO [train.py:715] (7/8) Epoch 16, batch 20150, loss[loss=0.1519, simple_loss=0.2322, pruned_loss=0.0358, over 4947.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02976, over 972682.16 frames.], batch size: 23, lr: 1.38e-04 +2022-05-08 18:35:32,576 INFO [train.py:715] (7/8) Epoch 16, batch 20200, loss[loss=0.1536, simple_loss=0.2232, pruned_loss=0.04197, over 4886.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02938, over 972139.04 frames.], batch size: 39, lr: 1.38e-04 +2022-05-08 18:36:10,896 INFO [train.py:715] (7/8) Epoch 16, batch 20250, loss[loss=0.1113, simple_loss=0.1887, pruned_loss=0.01691, over 4760.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02925, over 970979.75 frames.], batch size: 19, lr: 1.38e-04 +2022-05-08 18:36:49,190 INFO [train.py:715] (7/8) Epoch 16, batch 20300, loss[loss=0.1378, simple_loss=0.2147, pruned_loss=0.03042, over 4798.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02892, over 971918.62 frames.], batch size: 21, lr: 1.38e-04 +2022-05-08 18:37:27,016 INFO [train.py:715] (7/8) Epoch 16, batch 20350, loss[loss=0.1505, simple_loss=0.2214, pruned_loss=0.0398, over 4700.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.02911, over 972315.52 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 18:38:05,175 INFO [train.py:715] (7/8) Epoch 16, batch 20400, loss[loss=0.192, simple_loss=0.2675, pruned_loss=0.05827, over 4881.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02938, over 972768.47 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 18:38:43,165 INFO [train.py:715] (7/8) Epoch 16, batch 20450, loss[loss=0.1365, simple_loss=0.2098, pruned_loss=0.0316, over 4818.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02966, over 972886.69 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 18:39:21,071 INFO [train.py:715] (7/8) Epoch 16, batch 20500, loss[loss=0.1348, simple_loss=0.2245, pruned_loss=0.02249, over 4815.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02947, over 972981.63 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 18:39:58,715 INFO [train.py:715] (7/8) Epoch 16, batch 20550, loss[loss=0.1281, simple_loss=0.2073, pruned_loss=0.0245, over 4800.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02932, over 972747.19 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 18:40:37,506 INFO [train.py:715] (7/8) Epoch 16, batch 20600, loss[loss=0.1223, simple_loss=0.2012, pruned_loss=0.02166, over 4794.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02938, over 972371.84 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 18:41:15,473 INFO [train.py:715] (7/8) Epoch 16, batch 20650, loss[loss=0.1374, simple_loss=0.2182, pruned_loss=0.0283, over 4785.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02922, over 972318.98 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 18:41:52,934 INFO [train.py:715] (7/8) Epoch 16, batch 20700, loss[loss=0.1096, simple_loss=0.1851, pruned_loss=0.01703, over 4862.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02953, over 973028.62 frames.], batch size: 32, lr: 1.37e-04 +2022-05-08 18:42:31,440 INFO [train.py:715] (7/8) Epoch 16, batch 20750, loss[loss=0.1332, simple_loss=0.211, pruned_loss=0.0277, over 4943.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02985, over 973151.80 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 18:43:09,456 INFO [train.py:715] (7/8) Epoch 16, batch 20800, loss[loss=0.1386, simple_loss=0.2095, pruned_loss=0.03388, over 4826.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.0296, over 972561.83 frames.], batch size: 30, lr: 1.37e-04 +2022-05-08 18:43:47,990 INFO [train.py:715] (7/8) Epoch 16, batch 20850, loss[loss=0.1161, simple_loss=0.1962, pruned_loss=0.01802, over 4803.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03002, over 971981.95 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 18:44:25,954 INFO [train.py:715] (7/8) Epoch 16, batch 20900, loss[loss=0.1316, simple_loss=0.2112, pruned_loss=0.02599, over 4976.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.02981, over 971840.28 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 18:45:05,225 INFO [train.py:715] (7/8) Epoch 16, batch 20950, loss[loss=0.1235, simple_loss=0.1923, pruned_loss=0.02739, over 4968.00 frames.], tot_loss[loss=0.1348, simple_loss=0.209, pruned_loss=0.03036, over 972591.99 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 18:45:43,431 INFO [train.py:715] (7/8) Epoch 16, batch 21000, loss[loss=0.1317, simple_loss=0.2103, pruned_loss=0.02654, over 4871.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03001, over 972889.87 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 18:45:43,432 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 18:45:53,029 INFO [train.py:742] (7/8) Epoch 16, validation: loss=0.1047, simple_loss=0.1882, pruned_loss=0.0106, over 914524.00 frames. +2022-05-08 18:46:31,912 INFO [train.py:715] (7/8) Epoch 16, batch 21050, loss[loss=0.1161, simple_loss=0.1948, pruned_loss=0.01876, over 4886.00 frames.], tot_loss[loss=0.1349, simple_loss=0.2091, pruned_loss=0.03033, over 972186.41 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 18:47:10,472 INFO [train.py:715] (7/8) Epoch 16, batch 21100, loss[loss=0.1293, simple_loss=0.2009, pruned_loss=0.02883, over 4938.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03032, over 972173.70 frames.], batch size: 29, lr: 1.37e-04 +2022-05-08 18:47:49,069 INFO [train.py:715] (7/8) Epoch 16, batch 21150, loss[loss=0.12, simple_loss=0.2018, pruned_loss=0.01906, over 4923.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2079, pruned_loss=0.03019, over 971800.29 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 18:48:27,789 INFO [train.py:715] (7/8) Epoch 16, batch 21200, loss[loss=0.1566, simple_loss=0.2268, pruned_loss=0.04315, over 4868.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02989, over 973216.92 frames.], batch size: 32, lr: 1.37e-04 +2022-05-08 18:49:06,847 INFO [train.py:715] (7/8) Epoch 16, batch 21250, loss[loss=0.1216, simple_loss=0.1974, pruned_loss=0.0229, over 4719.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02969, over 972953.35 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 18:49:44,918 INFO [train.py:715] (7/8) Epoch 16, batch 21300, loss[loss=0.1138, simple_loss=0.1909, pruned_loss=0.01838, over 4945.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02931, over 972975.53 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 18:50:23,521 INFO [train.py:715] (7/8) Epoch 16, batch 21350, loss[loss=0.1072, simple_loss=0.1842, pruned_loss=0.01509, over 4756.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02867, over 973166.19 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 18:51:01,535 INFO [train.py:715] (7/8) Epoch 16, batch 21400, loss[loss=0.1236, simple_loss=0.2061, pruned_loss=0.02055, over 4894.00 frames.], tot_loss[loss=0.132, simple_loss=0.2059, pruned_loss=0.029, over 972612.82 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 18:51:39,054 INFO [train.py:715] (7/8) Epoch 16, batch 21450, loss[loss=0.1344, simple_loss=0.2147, pruned_loss=0.02709, over 4896.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02875, over 972539.70 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 18:52:17,448 INFO [train.py:715] (7/8) Epoch 16, batch 21500, loss[loss=0.1424, simple_loss=0.2187, pruned_loss=0.03303, over 4975.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.029, over 972843.89 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 18:52:55,412 INFO [train.py:715] (7/8) Epoch 16, batch 21550, loss[loss=0.131, simple_loss=0.2023, pruned_loss=0.02984, over 4880.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02944, over 972055.78 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 18:53:33,003 INFO [train.py:715] (7/8) Epoch 16, batch 21600, loss[loss=0.1252, simple_loss=0.1984, pruned_loss=0.02598, over 4923.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02902, over 971568.23 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 18:54:11,336 INFO [train.py:715] (7/8) Epoch 16, batch 21650, loss[loss=0.1486, simple_loss=0.2214, pruned_loss=0.03783, over 4887.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02899, over 972684.87 frames.], batch size: 39, lr: 1.37e-04 +2022-05-08 18:54:49,121 INFO [train.py:715] (7/8) Epoch 16, batch 21700, loss[loss=0.121, simple_loss=0.1949, pruned_loss=0.02359, over 4799.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02915, over 972731.02 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 18:55:27,322 INFO [train.py:715] (7/8) Epoch 16, batch 21750, loss[loss=0.1229, simple_loss=0.1945, pruned_loss=0.0257, over 4880.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02937, over 973455.57 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 18:56:04,816 INFO [train.py:715] (7/8) Epoch 16, batch 21800, loss[loss=0.1149, simple_loss=0.1866, pruned_loss=0.02163, over 4973.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02994, over 974320.91 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 18:56:42,919 INFO [train.py:715] (7/8) Epoch 16, batch 21850, loss[loss=0.1704, simple_loss=0.2324, pruned_loss=0.05424, over 4784.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2078, pruned_loss=0.03002, over 973562.56 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 18:57:20,560 INFO [train.py:715] (7/8) Epoch 16, batch 21900, loss[loss=0.1405, simple_loss=0.2219, pruned_loss=0.02955, over 4731.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03024, over 973299.10 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 18:57:57,978 INFO [train.py:715] (7/8) Epoch 16, batch 21950, loss[loss=0.1255, simple_loss=0.1987, pruned_loss=0.02616, over 4940.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02973, over 973684.57 frames.], batch size: 29, lr: 1.37e-04 +2022-05-08 18:58:36,384 INFO [train.py:715] (7/8) Epoch 16, batch 22000, loss[loss=0.1429, simple_loss=0.2377, pruned_loss=0.0241, over 4976.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02987, over 973770.66 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 18:59:13,997 INFO [train.py:715] (7/8) Epoch 16, batch 22050, loss[loss=0.1134, simple_loss=0.189, pruned_loss=0.01887, over 4886.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03018, over 973671.05 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 18:59:52,236 INFO [train.py:715] (7/8) Epoch 16, batch 22100, loss[loss=0.1318, simple_loss=0.2052, pruned_loss=0.02921, over 4837.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.0293, over 972967.73 frames.], batch size: 30, lr: 1.37e-04 +2022-05-08 19:00:29,951 INFO [train.py:715] (7/8) Epoch 16, batch 22150, loss[loss=0.1264, simple_loss=0.197, pruned_loss=0.02787, over 4890.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02954, over 972418.61 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:01:08,385 INFO [train.py:715] (7/8) Epoch 16, batch 22200, loss[loss=0.1267, simple_loss=0.1965, pruned_loss=0.02843, over 4695.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02931, over 972320.23 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:01:46,150 INFO [train.py:715] (7/8) Epoch 16, batch 22250, loss[loss=0.1492, simple_loss=0.2189, pruned_loss=0.03971, over 4988.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.0295, over 973117.29 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:02:24,236 INFO [train.py:715] (7/8) Epoch 16, batch 22300, loss[loss=0.1294, simple_loss=0.2055, pruned_loss=0.02663, over 4758.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02941, over 973154.19 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:03:02,794 INFO [train.py:715] (7/8) Epoch 16, batch 22350, loss[loss=0.1541, simple_loss=0.2229, pruned_loss=0.04263, over 4989.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02986, over 973838.13 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:03:40,843 INFO [train.py:715] (7/8) Epoch 16, batch 22400, loss[loss=0.1288, simple_loss=0.2066, pruned_loss=0.02552, over 4875.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02979, over 973599.04 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 19:04:19,198 INFO [train.py:715] (7/8) Epoch 16, batch 22450, loss[loss=0.1178, simple_loss=0.1914, pruned_loss=0.02205, over 4965.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2077, pruned_loss=0.02939, over 972924.05 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 19:04:57,327 INFO [train.py:715] (7/8) Epoch 16, batch 22500, loss[loss=0.1619, simple_loss=0.2256, pruned_loss=0.04914, over 4784.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02966, over 971971.91 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:05:35,513 INFO [train.py:715] (7/8) Epoch 16, batch 22550, loss[loss=0.1321, simple_loss=0.2046, pruned_loss=0.02979, over 4791.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02935, over 972235.35 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:06:13,252 INFO [train.py:715] (7/8) Epoch 16, batch 22600, loss[loss=0.1187, simple_loss=0.1999, pruned_loss=0.01875, over 4903.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02893, over 972250.36 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:06:50,941 INFO [train.py:715] (7/8) Epoch 16, batch 22650, loss[loss=0.1145, simple_loss=0.195, pruned_loss=0.01698, over 4817.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02938, over 972798.69 frames.], batch size: 27, lr: 1.37e-04 +2022-05-08 19:07:29,634 INFO [train.py:715] (7/8) Epoch 16, batch 22700, loss[loss=0.1883, simple_loss=0.2506, pruned_loss=0.06298, over 4833.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02971, over 972480.64 frames.], batch size: 30, lr: 1.37e-04 +2022-05-08 19:08:07,677 INFO [train.py:715] (7/8) Epoch 16, batch 22750, loss[loss=0.1148, simple_loss=0.1872, pruned_loss=0.02122, over 4776.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02968, over 972277.88 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 19:08:45,790 INFO [train.py:715] (7/8) Epoch 16, batch 22800, loss[loss=0.1147, simple_loss=0.1964, pruned_loss=0.01649, over 4859.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02993, over 971826.49 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 19:09:23,699 INFO [train.py:715] (7/8) Epoch 16, batch 22850, loss[loss=0.1396, simple_loss=0.2187, pruned_loss=0.03026, over 4906.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2084, pruned_loss=0.02985, over 972080.21 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 19:10:01,845 INFO [train.py:715] (7/8) Epoch 16, batch 22900, loss[loss=0.134, simple_loss=0.2044, pruned_loss=0.03179, over 4773.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02961, over 971896.35 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 19:10:39,882 INFO [train.py:715] (7/8) Epoch 16, batch 22950, loss[loss=0.1247, simple_loss=0.2012, pruned_loss=0.02413, over 4940.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2073, pruned_loss=0.02908, over 971982.00 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 19:11:17,829 INFO [train.py:715] (7/8) Epoch 16, batch 23000, loss[loss=0.1148, simple_loss=0.191, pruned_loss=0.0193, over 4880.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02894, over 972418.56 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 19:11:56,367 INFO [train.py:715] (7/8) Epoch 16, batch 23050, loss[loss=0.1188, simple_loss=0.1862, pruned_loss=0.02574, over 4778.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02903, over 972866.79 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 19:12:34,514 INFO [train.py:715] (7/8) Epoch 16, batch 23100, loss[loss=0.1231, simple_loss=0.2016, pruned_loss=0.02232, over 4876.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02931, over 973193.25 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 19:13:12,451 INFO [train.py:715] (7/8) Epoch 16, batch 23150, loss[loss=0.1399, simple_loss=0.2138, pruned_loss=0.03299, over 4835.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02974, over 973102.81 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:13:50,196 INFO [train.py:715] (7/8) Epoch 16, batch 23200, loss[loss=0.1064, simple_loss=0.1801, pruned_loss=0.01633, over 4860.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2061, pruned_loss=0.02925, over 973747.29 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 19:14:28,510 INFO [train.py:715] (7/8) Epoch 16, batch 23250, loss[loss=0.1242, simple_loss=0.2049, pruned_loss=0.02174, over 4897.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02927, over 973674.81 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:15:06,177 INFO [train.py:715] (7/8) Epoch 16, batch 23300, loss[loss=0.1374, simple_loss=0.2052, pruned_loss=0.03484, over 4823.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02973, over 972775.62 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:15:44,247 INFO [train.py:715] (7/8) Epoch 16, batch 23350, loss[loss=0.1414, simple_loss=0.2096, pruned_loss=0.03659, over 4690.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2083, pruned_loss=0.0302, over 972977.14 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:16:21,895 INFO [train.py:715] (7/8) Epoch 16, batch 23400, loss[loss=0.1095, simple_loss=0.1883, pruned_loss=0.01533, over 4965.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02996, over 973228.16 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 19:16:59,783 INFO [train.py:715] (7/8) Epoch 16, batch 23450, loss[loss=0.1277, simple_loss=0.1948, pruned_loss=0.03025, over 4984.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02996, over 972525.36 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:17:37,689 INFO [train.py:715] (7/8) Epoch 16, batch 23500, loss[loss=0.1382, simple_loss=0.2082, pruned_loss=0.03413, over 4846.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2073, pruned_loss=0.02996, over 971868.57 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 19:18:15,672 INFO [train.py:715] (7/8) Epoch 16, batch 23550, loss[loss=0.1343, simple_loss=0.2109, pruned_loss=0.02885, over 4970.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2071, pruned_loss=0.02993, over 972238.58 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 19:18:54,222 INFO [train.py:715] (7/8) Epoch 16, batch 23600, loss[loss=0.1371, simple_loss=0.2113, pruned_loss=0.03143, over 4849.00 frames.], tot_loss[loss=0.133, simple_loss=0.2068, pruned_loss=0.02959, over 971372.86 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:19:31,588 INFO [train.py:715] (7/8) Epoch 16, batch 23650, loss[loss=0.1311, simple_loss=0.2091, pruned_loss=0.02657, over 4878.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02936, over 970573.41 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 19:20:09,500 INFO [train.py:715] (7/8) Epoch 16, batch 23700, loss[loss=0.1313, simple_loss=0.2109, pruned_loss=0.02584, over 4895.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02908, over 971364.58 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:20:47,875 INFO [train.py:715] (7/8) Epoch 16, batch 23750, loss[loss=0.1526, simple_loss=0.2247, pruned_loss=0.04027, over 4876.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02939, over 971907.39 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 19:21:25,949 INFO [train.py:715] (7/8) Epoch 16, batch 23800, loss[loss=0.1159, simple_loss=0.1922, pruned_loss=0.01986, over 4787.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02974, over 972138.50 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:22:04,206 INFO [train.py:715] (7/8) Epoch 16, batch 23850, loss[loss=0.1321, simple_loss=0.2118, pruned_loss=0.02625, over 4921.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2061, pruned_loss=0.02927, over 972093.29 frames.], batch size: 29, lr: 1.37e-04 +2022-05-08 19:22:42,141 INFO [train.py:715] (7/8) Epoch 16, batch 23900, loss[loss=0.1455, simple_loss=0.2278, pruned_loss=0.03157, over 4752.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02928, over 972033.42 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:23:20,419 INFO [train.py:715] (7/8) Epoch 16, batch 23950, loss[loss=0.1286, simple_loss=0.2038, pruned_loss=0.02665, over 4861.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2055, pruned_loss=0.0287, over 971923.52 frames.], batch size: 30, lr: 1.37e-04 +2022-05-08 19:23:57,818 INFO [train.py:715] (7/8) Epoch 16, batch 24000, loss[loss=0.1287, simple_loss=0.2, pruned_loss=0.02869, over 4836.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2054, pruned_loss=0.02876, over 972163.68 frames.], batch size: 30, lr: 1.37e-04 +2022-05-08 19:23:57,818 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 19:24:07,635 INFO [train.py:742] (7/8) Epoch 16, validation: loss=0.1049, simple_loss=0.1883, pruned_loss=0.01074, over 914524.00 frames. +2022-05-08 19:24:46,403 INFO [train.py:715] (7/8) Epoch 16, batch 24050, loss[loss=0.1274, simple_loss=0.1965, pruned_loss=0.02915, over 4838.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2057, pruned_loss=0.02883, over 972070.19 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 19:25:24,729 INFO [train.py:715] (7/8) Epoch 16, batch 24100, loss[loss=0.1537, simple_loss=0.2328, pruned_loss=0.03733, over 4967.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2062, pruned_loss=0.02953, over 971908.73 frames.], batch size: 40, lr: 1.37e-04 +2022-05-08 19:26:03,113 INFO [train.py:715] (7/8) Epoch 16, batch 24150, loss[loss=0.1231, simple_loss=0.1969, pruned_loss=0.0246, over 4794.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.0294, over 973204.98 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:26:40,869 INFO [train.py:715] (7/8) Epoch 16, batch 24200, loss[loss=0.1247, simple_loss=0.2089, pruned_loss=0.02023, over 4929.00 frames.], tot_loss[loss=0.133, simple_loss=0.2068, pruned_loss=0.02958, over 972716.43 frames.], batch size: 29, lr: 1.37e-04 +2022-05-08 19:27:19,229 INFO [train.py:715] (7/8) Epoch 16, batch 24250, loss[loss=0.1121, simple_loss=0.1918, pruned_loss=0.01627, over 4985.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.0295, over 972893.06 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 19:27:57,172 INFO [train.py:715] (7/8) Epoch 16, batch 24300, loss[loss=0.1301, simple_loss=0.1948, pruned_loss=0.0327, over 4846.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2063, pruned_loss=0.02921, over 972358.21 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:28:35,671 INFO [train.py:715] (7/8) Epoch 16, batch 24350, loss[loss=0.1304, simple_loss=0.2059, pruned_loss=0.02749, over 4665.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2056, pruned_loss=0.0289, over 971896.40 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:29:13,224 INFO [train.py:715] (7/8) Epoch 16, batch 24400, loss[loss=0.1363, simple_loss=0.2091, pruned_loss=0.03176, over 4815.00 frames.], tot_loss[loss=0.132, simple_loss=0.2059, pruned_loss=0.02906, over 971994.17 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:29:50,781 INFO [train.py:715] (7/8) Epoch 16, batch 24450, loss[loss=0.1681, simple_loss=0.2404, pruned_loss=0.04787, over 4891.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02901, over 972097.05 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 19:30:28,695 INFO [train.py:715] (7/8) Epoch 16, batch 24500, loss[loss=0.1197, simple_loss=0.195, pruned_loss=0.02218, over 4788.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2061, pruned_loss=0.02918, over 970992.90 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:31:06,547 INFO [train.py:715] (7/8) Epoch 16, batch 24550, loss[loss=0.1435, simple_loss=0.2253, pruned_loss=0.03085, over 4907.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02941, over 971431.09 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:31:43,992 INFO [train.py:715] (7/8) Epoch 16, batch 24600, loss[loss=0.1172, simple_loss=0.1831, pruned_loss=0.02563, over 4781.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02941, over 970540.69 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 19:32:21,345 INFO [train.py:715] (7/8) Epoch 16, batch 24650, loss[loss=0.1427, simple_loss=0.2154, pruned_loss=0.03505, over 4691.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2072, pruned_loss=0.02983, over 970895.81 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:32:59,494 INFO [train.py:715] (7/8) Epoch 16, batch 24700, loss[loss=0.1348, simple_loss=0.2069, pruned_loss=0.03131, over 4985.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2072, pruned_loss=0.02975, over 970992.02 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 19:33:37,067 INFO [train.py:715] (7/8) Epoch 16, batch 24750, loss[loss=0.1284, simple_loss=0.2018, pruned_loss=0.0275, over 4938.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02965, over 972420.10 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 19:34:14,866 INFO [train.py:715] (7/8) Epoch 16, batch 24800, loss[loss=0.1422, simple_loss=0.227, pruned_loss=0.02875, over 4825.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02969, over 971540.53 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 19:34:52,611 INFO [train.py:715] (7/8) Epoch 16, batch 24850, loss[loss=0.14, simple_loss=0.2209, pruned_loss=0.0295, over 4883.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.0297, over 971612.41 frames.], batch size: 32, lr: 1.37e-04 +2022-05-08 19:35:30,368 INFO [train.py:715] (7/8) Epoch 16, batch 24900, loss[loss=0.1319, simple_loss=0.2079, pruned_loss=0.02797, over 4757.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02937, over 971307.53 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:36:08,066 INFO [train.py:715] (7/8) Epoch 16, batch 24950, loss[loss=0.1187, simple_loss=0.1919, pruned_loss=0.02275, over 4810.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.0291, over 972060.50 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 19:36:45,485 INFO [train.py:715] (7/8) Epoch 16, batch 25000, loss[loss=0.1125, simple_loss=0.1976, pruned_loss=0.01369, over 4833.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02934, over 971754.43 frames.], batch size: 26, lr: 1.37e-04 +2022-05-08 19:37:23,736 INFO [train.py:715] (7/8) Epoch 16, batch 25050, loss[loss=0.1397, simple_loss=0.2096, pruned_loss=0.03491, over 4819.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02913, over 971434.81 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 19:38:02,498 INFO [train.py:715] (7/8) Epoch 16, batch 25100, loss[loss=0.1223, simple_loss=0.208, pruned_loss=0.01835, over 4767.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02917, over 971283.37 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:38:40,223 INFO [train.py:715] (7/8) Epoch 16, batch 25150, loss[loss=0.1482, simple_loss=0.2104, pruned_loss=0.04305, over 4851.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02896, over 971546.09 frames.], batch size: 30, lr: 1.37e-04 +2022-05-08 19:39:18,063 INFO [train.py:715] (7/8) Epoch 16, batch 25200, loss[loss=0.1413, simple_loss=0.2077, pruned_loss=0.03746, over 4777.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02898, over 971094.75 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:39:56,036 INFO [train.py:715] (7/8) Epoch 16, batch 25250, loss[loss=0.1151, simple_loss=0.1954, pruned_loss=0.01741, over 4943.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2077, pruned_loss=0.02897, over 971220.59 frames.], batch size: 29, lr: 1.37e-04 +2022-05-08 19:40:33,647 INFO [train.py:715] (7/8) Epoch 16, batch 25300, loss[loss=0.1395, simple_loss=0.2164, pruned_loss=0.0313, over 4869.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2073, pruned_loss=0.02897, over 972520.78 frames.], batch size: 38, lr: 1.37e-04 +2022-05-08 19:41:10,911 INFO [train.py:715] (7/8) Epoch 16, batch 25350, loss[loss=0.1391, simple_loss=0.219, pruned_loss=0.02961, over 4975.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2071, pruned_loss=0.02868, over 973024.41 frames.], batch size: 35, lr: 1.37e-04 +2022-05-08 19:41:49,016 INFO [train.py:715] (7/8) Epoch 16, batch 25400, loss[loss=0.1283, simple_loss=0.2044, pruned_loss=0.02607, over 4804.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.0289, over 972944.35 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 19:42:27,349 INFO [train.py:715] (7/8) Epoch 16, batch 25450, loss[loss=0.1283, simple_loss=0.2088, pruned_loss=0.02384, over 4947.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02878, over 972177.36 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 19:43:04,843 INFO [train.py:715] (7/8) Epoch 16, batch 25500, loss[loss=0.1194, simple_loss=0.1973, pruned_loss=0.02074, over 4772.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02913, over 972314.37 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:43:42,833 INFO [train.py:715] (7/8) Epoch 16, batch 25550, loss[loss=0.1064, simple_loss=0.1757, pruned_loss=0.01855, over 4905.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.0294, over 971862.40 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 19:44:21,344 INFO [train.py:715] (7/8) Epoch 16, batch 25600, loss[loss=0.1417, simple_loss=0.2146, pruned_loss=0.03441, over 4712.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2076, pruned_loss=0.02915, over 972079.08 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:45:00,130 INFO [train.py:715] (7/8) Epoch 16, batch 25650, loss[loss=0.1157, simple_loss=0.1961, pruned_loss=0.01767, over 4869.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2088, pruned_loss=0.03004, over 971720.86 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 19:45:38,354 INFO [train.py:715] (7/8) Epoch 16, batch 25700, loss[loss=0.1612, simple_loss=0.2364, pruned_loss=0.04296, over 4777.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02969, over 970946.72 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:46:16,988 INFO [train.py:715] (7/8) Epoch 16, batch 25750, loss[loss=0.1348, simple_loss=0.2101, pruned_loss=0.02979, over 4952.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2082, pruned_loss=0.02957, over 971326.06 frames.], batch size: 39, lr: 1.37e-04 +2022-05-08 19:46:55,626 INFO [train.py:715] (7/8) Epoch 16, batch 25800, loss[loss=0.1294, simple_loss=0.2085, pruned_loss=0.02515, over 4827.00 frames.], tot_loss[loss=0.133, simple_loss=0.2076, pruned_loss=0.02917, over 972228.49 frames.], batch size: 26, lr: 1.37e-04 +2022-05-08 19:47:34,230 INFO [train.py:715] (7/8) Epoch 16, batch 25850, loss[loss=0.1165, simple_loss=0.1877, pruned_loss=0.02263, over 4733.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.02915, over 971935.15 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 19:48:13,052 INFO [train.py:715] (7/8) Epoch 16, batch 25900, loss[loss=0.1491, simple_loss=0.2131, pruned_loss=0.04259, over 4709.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02919, over 971836.94 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:48:52,470 INFO [train.py:715] (7/8) Epoch 16, batch 25950, loss[loss=0.1675, simple_loss=0.2395, pruned_loss=0.04775, over 4818.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02972, over 972887.12 frames.], batch size: 26, lr: 1.37e-04 +2022-05-08 19:49:32,205 INFO [train.py:715] (7/8) Epoch 16, batch 26000, loss[loss=0.116, simple_loss=0.1962, pruned_loss=0.01795, over 4970.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02977, over 972312.64 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 19:50:11,558 INFO [train.py:715] (7/8) Epoch 16, batch 26050, loss[loss=0.1365, simple_loss=0.2195, pruned_loss=0.02675, over 4941.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.03001, over 972032.88 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 19:50:50,796 INFO [train.py:715] (7/8) Epoch 16, batch 26100, loss[loss=0.1449, simple_loss=0.2228, pruned_loss=0.03356, over 4882.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02976, over 971942.88 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 19:51:30,062 INFO [train.py:715] (7/8) Epoch 16, batch 26150, loss[loss=0.1359, simple_loss=0.2074, pruned_loss=0.03214, over 4741.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2084, pruned_loss=0.02994, over 971861.54 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 19:52:08,703 INFO [train.py:715] (7/8) Epoch 16, batch 26200, loss[loss=0.1265, simple_loss=0.2047, pruned_loss=0.02419, over 4766.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02973, over 971060.89 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 19:52:48,175 INFO [train.py:715] (7/8) Epoch 16, batch 26250, loss[loss=0.1269, simple_loss=0.2035, pruned_loss=0.02513, over 4949.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02991, over 971503.07 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 19:53:27,330 INFO [train.py:715] (7/8) Epoch 16, batch 26300, loss[loss=0.1259, simple_loss=0.2093, pruned_loss=0.02128, over 4919.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2089, pruned_loss=0.02986, over 971851.15 frames.], batch size: 35, lr: 1.37e-04 +2022-05-08 19:54:06,991 INFO [train.py:715] (7/8) Epoch 16, batch 26350, loss[loss=0.1493, simple_loss=0.2106, pruned_loss=0.04404, over 4820.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02964, over 971840.82 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 19:54:46,283 INFO [train.py:715] (7/8) Epoch 16, batch 26400, loss[loss=0.1269, simple_loss=0.2, pruned_loss=0.02696, over 4910.00 frames.], tot_loss[loss=0.1346, simple_loss=0.209, pruned_loss=0.03008, over 971967.93 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:55:26,167 INFO [train.py:715] (7/8) Epoch 16, batch 26450, loss[loss=0.1183, simple_loss=0.1918, pruned_loss=0.0224, over 4899.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02978, over 972877.07 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 19:56:05,125 INFO [train.py:715] (7/8) Epoch 16, batch 26500, loss[loss=0.1373, simple_loss=0.2068, pruned_loss=0.03389, over 4772.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.0295, over 972206.09 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 19:56:44,036 INFO [train.py:715] (7/8) Epoch 16, batch 26550, loss[loss=0.1406, simple_loss=0.2165, pruned_loss=0.03236, over 4970.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02918, over 972002.57 frames.], batch size: 35, lr: 1.37e-04 +2022-05-08 19:57:23,102 INFO [train.py:715] (7/8) Epoch 16, batch 26600, loss[loss=0.1301, simple_loss=0.1987, pruned_loss=0.03076, over 4756.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2072, pruned_loss=0.02867, over 972005.06 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 19:58:02,098 INFO [train.py:715] (7/8) Epoch 16, batch 26650, loss[loss=0.149, simple_loss=0.2183, pruned_loss=0.03989, over 4936.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02864, over 971911.92 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 19:58:41,427 INFO [train.py:715] (7/8) Epoch 16, batch 26700, loss[loss=0.1302, simple_loss=0.2017, pruned_loss=0.02934, over 4708.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2063, pruned_loss=0.0285, over 971556.30 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 19:59:20,661 INFO [train.py:715] (7/8) Epoch 16, batch 26750, loss[loss=0.1248, simple_loss=0.196, pruned_loss=0.02685, over 4686.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02874, over 970983.31 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:00:00,475 INFO [train.py:715] (7/8) Epoch 16, batch 26800, loss[loss=0.1443, simple_loss=0.2114, pruned_loss=0.03861, over 4795.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02942, over 970975.37 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 20:00:39,351 INFO [train.py:715] (7/8) Epoch 16, batch 26850, loss[loss=0.1345, simple_loss=0.1969, pruned_loss=0.03607, over 4830.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02936, over 971081.90 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 20:01:18,827 INFO [train.py:715] (7/8) Epoch 16, batch 26900, loss[loss=0.1221, simple_loss=0.1923, pruned_loss=0.02594, over 4779.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02922, over 971561.01 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 20:01:58,325 INFO [train.py:715] (7/8) Epoch 16, batch 26950, loss[loss=0.1338, simple_loss=0.2103, pruned_loss=0.02868, over 4782.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02937, over 971435.17 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 20:02:37,504 INFO [train.py:715] (7/8) Epoch 16, batch 27000, loss[loss=0.1506, simple_loss=0.2313, pruned_loss=0.03497, over 4911.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02969, over 971760.14 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 20:02:37,505 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 20:02:47,199 INFO [train.py:742] (7/8) Epoch 16, validation: loss=0.1048, simple_loss=0.1883, pruned_loss=0.01067, over 914524.00 frames. +2022-05-08 20:03:26,297 INFO [train.py:715] (7/8) Epoch 16, batch 27050, loss[loss=0.1303, simple_loss=0.202, pruned_loss=0.02927, over 4803.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02977, over 971948.70 frames.], batch size: 12, lr: 1.37e-04 +2022-05-08 20:04:08,234 INFO [train.py:715] (7/8) Epoch 16, batch 27100, loss[loss=0.1425, simple_loss=0.22, pruned_loss=0.03252, over 4952.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02946, over 971798.68 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 20:04:47,171 INFO [train.py:715] (7/8) Epoch 16, batch 27150, loss[loss=0.1524, simple_loss=0.2266, pruned_loss=0.03914, over 4877.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.0296, over 972036.73 frames.], batch size: 22, lr: 1.37e-04 +2022-05-08 20:05:26,606 INFO [train.py:715] (7/8) Epoch 16, batch 27200, loss[loss=0.1653, simple_loss=0.2178, pruned_loss=0.05635, over 4834.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2069, pruned_loss=0.0297, over 972384.31 frames.], batch size: 30, lr: 1.37e-04 +2022-05-08 20:06:05,791 INFO [train.py:715] (7/8) Epoch 16, batch 27250, loss[loss=0.137, simple_loss=0.2181, pruned_loss=0.02793, over 4969.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2067, pruned_loss=0.02949, over 971848.36 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:06:45,175 INFO [train.py:715] (7/8) Epoch 16, batch 27300, loss[loss=0.1389, simple_loss=0.2091, pruned_loss=0.03431, over 4941.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02949, over 972493.07 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 20:07:24,245 INFO [train.py:715] (7/8) Epoch 16, batch 27350, loss[loss=0.143, simple_loss=0.2207, pruned_loss=0.03263, over 4940.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02961, over 973293.23 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 20:08:03,612 INFO [train.py:715] (7/8) Epoch 16, batch 27400, loss[loss=0.1444, simple_loss=0.2128, pruned_loss=0.03805, over 4853.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02921, over 973092.36 frames.], batch size: 13, lr: 1.37e-04 +2022-05-08 20:08:42,904 INFO [train.py:715] (7/8) Epoch 16, batch 27450, loss[loss=0.1264, simple_loss=0.2055, pruned_loss=0.02362, over 4800.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02971, over 972925.26 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 20:09:21,910 INFO [train.py:715] (7/8) Epoch 16, batch 27500, loss[loss=0.1488, simple_loss=0.2263, pruned_loss=0.03559, over 4899.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2066, pruned_loss=0.02949, over 972145.11 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 20:10:01,270 INFO [train.py:715] (7/8) Epoch 16, batch 27550, loss[loss=0.1268, simple_loss=0.2049, pruned_loss=0.02433, over 4931.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02921, over 972264.20 frames.], batch size: 21, lr: 1.37e-04 +2022-05-08 20:10:41,121 INFO [train.py:715] (7/8) Epoch 16, batch 27600, loss[loss=0.1148, simple_loss=0.1934, pruned_loss=0.01808, over 4909.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02911, over 971301.10 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 20:11:20,147 INFO [train.py:715] (7/8) Epoch 16, batch 27650, loss[loss=0.1163, simple_loss=0.1929, pruned_loss=0.01985, over 4818.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02908, over 972116.89 frames.], batch size: 27, lr: 1.37e-04 +2022-05-08 20:11:59,673 INFO [train.py:715] (7/8) Epoch 16, batch 27700, loss[loss=0.1318, simple_loss=0.2122, pruned_loss=0.02575, over 4864.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02974, over 972668.88 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 20:12:38,981 INFO [train.py:715] (7/8) Epoch 16, batch 27750, loss[loss=0.1614, simple_loss=0.2336, pruned_loss=0.04459, over 4962.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02989, over 973848.03 frames.], batch size: 39, lr: 1.37e-04 +2022-05-08 20:13:18,196 INFO [train.py:715] (7/8) Epoch 16, batch 27800, loss[loss=0.1407, simple_loss=0.2106, pruned_loss=0.03543, over 4830.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.0295, over 973852.73 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:13:57,556 INFO [train.py:715] (7/8) Epoch 16, batch 27850, loss[loss=0.1331, simple_loss=0.2157, pruned_loss=0.02523, over 4823.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02958, over 973031.67 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:14:36,987 INFO [train.py:715] (7/8) Epoch 16, batch 27900, loss[loss=0.1492, simple_loss=0.2348, pruned_loss=0.0318, over 4910.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02985, over 973054.70 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 20:15:16,668 INFO [train.py:715] (7/8) Epoch 16, batch 27950, loss[loss=0.144, simple_loss=0.2206, pruned_loss=0.03372, over 4780.00 frames.], tot_loss[loss=0.1334, simple_loss=0.208, pruned_loss=0.02943, over 972165.88 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 20:15:55,980 INFO [train.py:715] (7/8) Epoch 16, batch 28000, loss[loss=0.1449, simple_loss=0.2171, pruned_loss=0.03636, over 4776.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2072, pruned_loss=0.02893, over 972305.12 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 20:16:35,541 INFO [train.py:715] (7/8) Epoch 16, batch 28050, loss[loss=0.1209, simple_loss=0.2084, pruned_loss=0.0167, over 4994.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02897, over 973151.54 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 20:17:15,208 INFO [train.py:715] (7/8) Epoch 16, batch 28100, loss[loss=0.1394, simple_loss=0.2086, pruned_loss=0.03512, over 4866.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2082, pruned_loss=0.02955, over 973709.31 frames.], batch size: 32, lr: 1.37e-04 +2022-05-08 20:17:54,189 INFO [train.py:715] (7/8) Epoch 16, batch 28150, loss[loss=0.1423, simple_loss=0.2033, pruned_loss=0.04062, over 4844.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2085, pruned_loss=0.02964, over 974185.57 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:18:33,941 INFO [train.py:715] (7/8) Epoch 16, batch 28200, loss[loss=0.1546, simple_loss=0.2245, pruned_loss=0.0423, over 4975.00 frames.], tot_loss[loss=0.135, simple_loss=0.2091, pruned_loss=0.03046, over 974525.13 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 20:19:13,272 INFO [train.py:715] (7/8) Epoch 16, batch 28250, loss[loss=0.1053, simple_loss=0.1763, pruned_loss=0.01717, over 4803.00 frames.], tot_loss[loss=0.135, simple_loss=0.2092, pruned_loss=0.03042, over 975107.84 frames.], batch size: 24, lr: 1.37e-04 +2022-05-08 20:19:51,891 INFO [train.py:715] (7/8) Epoch 16, batch 28300, loss[loss=0.1276, simple_loss=0.1962, pruned_loss=0.02955, over 4938.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.03022, over 974202.15 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 20:20:31,609 INFO [train.py:715] (7/8) Epoch 16, batch 28350, loss[loss=0.1581, simple_loss=0.2214, pruned_loss=0.04736, over 4738.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03012, over 973777.88 frames.], batch size: 16, lr: 1.37e-04 +2022-05-08 20:21:11,559 INFO [train.py:715] (7/8) Epoch 16, batch 28400, loss[loss=0.1283, simple_loss=0.2058, pruned_loss=0.02537, over 4807.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2083, pruned_loss=0.03027, over 973589.70 frames.], batch size: 25, lr: 1.37e-04 +2022-05-08 20:21:51,018 INFO [train.py:715] (7/8) Epoch 16, batch 28450, loss[loss=0.1321, simple_loss=0.2037, pruned_loss=0.03029, over 4918.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02952, over 973975.84 frames.], batch size: 23, lr: 1.37e-04 +2022-05-08 20:22:29,722 INFO [train.py:715] (7/8) Epoch 16, batch 28500, loss[loss=0.1541, simple_loss=0.2255, pruned_loss=0.04133, over 4847.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02987, over 973716.07 frames.], batch size: 20, lr: 1.37e-04 +2022-05-08 20:23:09,886 INFO [train.py:715] (7/8) Epoch 16, batch 28550, loss[loss=0.1313, simple_loss=0.2099, pruned_loss=0.02629, over 4910.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02981, over 972801.09 frames.], batch size: 17, lr: 1.37e-04 +2022-05-08 20:23:49,361 INFO [train.py:715] (7/8) Epoch 16, batch 28600, loss[loss=0.1786, simple_loss=0.2393, pruned_loss=0.05889, over 4972.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03002, over 973399.60 frames.], batch size: 15, lr: 1.37e-04 +2022-05-08 20:24:28,946 INFO [train.py:715] (7/8) Epoch 16, batch 28650, loss[loss=0.1658, simple_loss=0.2447, pruned_loss=0.04343, over 4770.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02974, over 973993.21 frames.], batch size: 18, lr: 1.37e-04 +2022-05-08 20:25:08,099 INFO [train.py:715] (7/8) Epoch 16, batch 28700, loss[loss=0.139, simple_loss=0.2203, pruned_loss=0.02881, over 4761.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.0297, over 973659.87 frames.], batch size: 19, lr: 1.37e-04 +2022-05-08 20:25:47,667 INFO [train.py:715] (7/8) Epoch 16, batch 28750, loss[loss=0.1737, simple_loss=0.2443, pruned_loss=0.05153, over 4778.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2077, pruned_loss=0.02989, over 974008.74 frames.], batch size: 14, lr: 1.37e-04 +2022-05-08 20:26:27,377 INFO [train.py:715] (7/8) Epoch 16, batch 28800, loss[loss=0.1495, simple_loss=0.2221, pruned_loss=0.03842, over 4810.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03001, over 973179.39 frames.], batch size: 25, lr: 1.36e-04 +2022-05-08 20:27:06,538 INFO [train.py:715] (7/8) Epoch 16, batch 28850, loss[loss=0.134, simple_loss=0.2117, pruned_loss=0.02813, over 4899.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02984, over 973073.13 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 20:27:46,355 INFO [train.py:715] (7/8) Epoch 16, batch 28900, loss[loss=0.1202, simple_loss=0.1919, pruned_loss=0.02425, over 4947.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02977, over 973142.89 frames.], batch size: 14, lr: 1.36e-04 +2022-05-08 20:28:25,937 INFO [train.py:715] (7/8) Epoch 16, batch 28950, loss[loss=0.1433, simple_loss=0.213, pruned_loss=0.03682, over 4690.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02971, over 972718.06 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:29:05,870 INFO [train.py:715] (7/8) Epoch 16, batch 29000, loss[loss=0.1252, simple_loss=0.2078, pruned_loss=0.02131, over 4972.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02928, over 972894.01 frames.], batch size: 28, lr: 1.36e-04 +2022-05-08 20:29:45,340 INFO [train.py:715] (7/8) Epoch 16, batch 29050, loss[loss=0.1235, simple_loss=0.1972, pruned_loss=0.02492, over 4681.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2085, pruned_loss=0.02954, over 972355.45 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:30:25,182 INFO [train.py:715] (7/8) Epoch 16, batch 29100, loss[loss=0.1544, simple_loss=0.2314, pruned_loss=0.03869, over 4788.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2085, pruned_loss=0.02952, over 972122.04 frames.], batch size: 14, lr: 1.36e-04 +2022-05-08 20:31:06,239 INFO [train.py:715] (7/8) Epoch 16, batch 29150, loss[loss=0.14, simple_loss=0.2176, pruned_loss=0.03115, over 4950.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2083, pruned_loss=0.02943, over 971676.04 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 20:31:46,273 INFO [train.py:715] (7/8) Epoch 16, batch 29200, loss[loss=0.1245, simple_loss=0.1892, pruned_loss=0.02997, over 4857.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02963, over 971994.49 frames.], batch size: 20, lr: 1.36e-04 +2022-05-08 20:32:27,445 INFO [train.py:715] (7/8) Epoch 16, batch 29250, loss[loss=0.124, simple_loss=0.2008, pruned_loss=0.0236, over 4843.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.03002, over 972135.58 frames.], batch size: 32, lr: 1.36e-04 +2022-05-08 20:33:08,443 INFO [train.py:715] (7/8) Epoch 16, batch 29300, loss[loss=0.1345, simple_loss=0.2117, pruned_loss=0.02868, over 4815.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.02998, over 971765.65 frames.], batch size: 27, lr: 1.36e-04 +2022-05-08 20:33:49,840 INFO [train.py:715] (7/8) Epoch 16, batch 29350, loss[loss=0.1494, simple_loss=0.2281, pruned_loss=0.03536, over 4893.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02981, over 972177.35 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 20:34:30,966 INFO [train.py:715] (7/8) Epoch 16, batch 29400, loss[loss=0.1163, simple_loss=0.1925, pruned_loss=0.02008, over 4872.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02945, over 971616.28 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 20:35:12,718 INFO [train.py:715] (7/8) Epoch 16, batch 29450, loss[loss=0.1258, simple_loss=0.2053, pruned_loss=0.02313, over 4784.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2092, pruned_loss=0.02967, over 972567.05 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 20:35:54,216 INFO [train.py:715] (7/8) Epoch 16, batch 29500, loss[loss=0.1504, simple_loss=0.2057, pruned_loss=0.04754, over 4847.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2083, pruned_loss=0.02954, over 972149.30 frames.], batch size: 32, lr: 1.36e-04 +2022-05-08 20:36:36,039 INFO [train.py:715] (7/8) Epoch 16, batch 29550, loss[loss=0.1394, simple_loss=0.2151, pruned_loss=0.03185, over 4817.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2084, pruned_loss=0.02941, over 972970.95 frames.], batch size: 25, lr: 1.36e-04 +2022-05-08 20:37:17,263 INFO [train.py:715] (7/8) Epoch 16, batch 29600, loss[loss=0.1042, simple_loss=0.1772, pruned_loss=0.01561, over 4754.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2084, pruned_loss=0.02965, over 972603.95 frames.], batch size: 12, lr: 1.36e-04 +2022-05-08 20:37:59,054 INFO [train.py:715] (7/8) Epoch 16, batch 29650, loss[loss=0.131, simple_loss=0.2075, pruned_loss=0.02719, over 4810.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2081, pruned_loss=0.02938, over 972613.08 frames.], batch size: 25, lr: 1.36e-04 +2022-05-08 20:38:40,543 INFO [train.py:715] (7/8) Epoch 16, batch 29700, loss[loss=0.09973, simple_loss=0.1692, pruned_loss=0.01511, over 4779.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02921, over 973035.17 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 20:39:21,787 INFO [train.py:715] (7/8) Epoch 16, batch 29750, loss[loss=0.1561, simple_loss=0.2286, pruned_loss=0.04185, over 4859.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2084, pruned_loss=0.02954, over 972620.89 frames.], batch size: 20, lr: 1.36e-04 +2022-05-08 20:40:02,894 INFO [train.py:715] (7/8) Epoch 16, batch 29800, loss[loss=0.1519, simple_loss=0.2262, pruned_loss=0.03879, over 4826.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02989, over 972004.89 frames.], batch size: 26, lr: 1.36e-04 +2022-05-08 20:40:44,776 INFO [train.py:715] (7/8) Epoch 16, batch 29850, loss[loss=0.135, simple_loss=0.21, pruned_loss=0.02994, over 4769.00 frames.], tot_loss[loss=0.134, simple_loss=0.2083, pruned_loss=0.02988, over 971858.38 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 20:41:26,350 INFO [train.py:715] (7/8) Epoch 16, batch 29900, loss[loss=0.1142, simple_loss=0.1935, pruned_loss=0.01747, over 4830.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02939, over 970673.89 frames.], batch size: 26, lr: 1.36e-04 +2022-05-08 20:42:07,630 INFO [train.py:715] (7/8) Epoch 16, batch 29950, loss[loss=0.1308, simple_loss=0.2066, pruned_loss=0.02751, over 4923.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02957, over 971398.35 frames.], batch size: 23, lr: 1.36e-04 +2022-05-08 20:42:50,229 INFO [train.py:715] (7/8) Epoch 16, batch 30000, loss[loss=0.1178, simple_loss=0.1894, pruned_loss=0.0231, over 4973.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02946, over 972351.37 frames.], batch size: 14, lr: 1.36e-04 +2022-05-08 20:42:50,230 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 20:43:01,793 INFO [train.py:742] (7/8) Epoch 16, validation: loss=0.1047, simple_loss=0.1883, pruned_loss=0.01058, over 914524.00 frames. +2022-05-08 20:43:44,298 INFO [train.py:715] (7/8) Epoch 16, batch 30050, loss[loss=0.136, simple_loss=0.2076, pruned_loss=0.03221, over 4892.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02966, over 972533.78 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 20:44:26,138 INFO [train.py:715] (7/8) Epoch 16, batch 30100, loss[loss=0.1086, simple_loss=0.1786, pruned_loss=0.01925, over 4693.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02962, over 971835.86 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:45:06,930 INFO [train.py:715] (7/8) Epoch 16, batch 30150, loss[loss=0.1278, simple_loss=0.1984, pruned_loss=0.02862, over 4792.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.03002, over 970706.24 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 20:45:48,623 INFO [train.py:715] (7/8) Epoch 16, batch 30200, loss[loss=0.1214, simple_loss=0.2012, pruned_loss=0.02081, over 4965.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.02999, over 970752.22 frames.], batch size: 14, lr: 1.36e-04 +2022-05-08 20:46:29,887 INFO [train.py:715] (7/8) Epoch 16, batch 30250, loss[loss=0.1282, simple_loss=0.2021, pruned_loss=0.02715, over 4782.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02953, over 970907.60 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 20:47:09,988 INFO [train.py:715] (7/8) Epoch 16, batch 30300, loss[loss=0.1187, simple_loss=0.1945, pruned_loss=0.02147, over 4980.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02972, over 972526.08 frames.], batch size: 31, lr: 1.36e-04 +2022-05-08 20:47:50,185 INFO [train.py:715] (7/8) Epoch 16, batch 30350, loss[loss=0.141, simple_loss=0.2108, pruned_loss=0.03555, over 4970.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02926, over 972893.94 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:48:30,611 INFO [train.py:715] (7/8) Epoch 16, batch 30400, loss[loss=0.1203, simple_loss=0.1883, pruned_loss=0.02613, over 4899.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02953, over 973229.95 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 20:49:10,257 INFO [train.py:715] (7/8) Epoch 16, batch 30450, loss[loss=0.1058, simple_loss=0.1755, pruned_loss=0.0181, over 4924.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02956, over 973019.93 frames.], batch size: 29, lr: 1.36e-04 +2022-05-08 20:49:49,413 INFO [train.py:715] (7/8) Epoch 16, batch 30500, loss[loss=0.1147, simple_loss=0.1818, pruned_loss=0.02375, over 4690.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02912, over 972449.57 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:50:29,339 INFO [train.py:715] (7/8) Epoch 16, batch 30550, loss[loss=0.0959, simple_loss=0.1676, pruned_loss=0.0121, over 4846.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02913, over 972218.23 frames.], batch size: 12, lr: 1.36e-04 +2022-05-08 20:51:09,802 INFO [train.py:715] (7/8) Epoch 16, batch 30600, loss[loss=0.1177, simple_loss=0.1923, pruned_loss=0.02148, over 4819.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02959, over 972444.96 frames.], batch size: 27, lr: 1.36e-04 +2022-05-08 20:51:49,047 INFO [train.py:715] (7/8) Epoch 16, batch 30650, loss[loss=0.1397, simple_loss=0.2081, pruned_loss=0.03567, over 4864.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02979, over 972026.88 frames.], batch size: 20, lr: 1.36e-04 +2022-05-08 20:52:28,800 INFO [train.py:715] (7/8) Epoch 16, batch 30700, loss[loss=0.1408, simple_loss=0.2137, pruned_loss=0.03398, over 4966.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02999, over 972234.97 frames.], batch size: 14, lr: 1.36e-04 +2022-05-08 20:53:10,035 INFO [train.py:715] (7/8) Epoch 16, batch 30750, loss[loss=0.1673, simple_loss=0.2276, pruned_loss=0.05354, over 4939.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2075, pruned_loss=0.02999, over 972264.05 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 20:53:49,625 INFO [train.py:715] (7/8) Epoch 16, batch 30800, loss[loss=0.1472, simple_loss=0.2298, pruned_loss=0.03235, over 4686.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02945, over 971437.74 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:54:28,448 INFO [train.py:715] (7/8) Epoch 16, batch 30850, loss[loss=0.1533, simple_loss=0.2293, pruned_loss=0.03861, over 4803.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2069, pruned_loss=0.02977, over 971239.55 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 20:55:08,444 INFO [train.py:715] (7/8) Epoch 16, batch 30900, loss[loss=0.2221, simple_loss=0.2834, pruned_loss=0.08046, over 4989.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.0302, over 972013.46 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:55:47,875 INFO [train.py:715] (7/8) Epoch 16, batch 30950, loss[loss=0.126, simple_loss=0.198, pruned_loss=0.02701, over 4961.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02995, over 972192.27 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:56:26,907 INFO [train.py:715] (7/8) Epoch 16, batch 31000, loss[loss=0.1402, simple_loss=0.2099, pruned_loss=0.03529, over 4914.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03001, over 972755.35 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 20:57:06,085 INFO [train.py:715] (7/8) Epoch 16, batch 31050, loss[loss=0.1252, simple_loss=0.2149, pruned_loss=0.01779, over 4919.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02969, over 972717.39 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 20:57:45,841 INFO [train.py:715] (7/8) Epoch 16, batch 31100, loss[loss=0.1332, simple_loss=0.2142, pruned_loss=0.02613, over 4779.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2075, pruned_loss=0.03001, over 972298.66 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 20:58:25,691 INFO [train.py:715] (7/8) Epoch 16, batch 31150, loss[loss=0.1712, simple_loss=0.2579, pruned_loss=0.04229, over 4774.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02998, over 972110.44 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 20:59:04,405 INFO [train.py:715] (7/8) Epoch 16, batch 31200, loss[loss=0.1425, simple_loss=0.219, pruned_loss=0.03294, over 4692.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2082, pruned_loss=0.03032, over 971815.36 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 20:59:44,074 INFO [train.py:715] (7/8) Epoch 16, batch 31250, loss[loss=0.1191, simple_loss=0.1975, pruned_loss=0.02033, over 4815.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02981, over 971707.26 frames.], batch size: 25, lr: 1.36e-04 +2022-05-08 21:00:23,616 INFO [train.py:715] (7/8) Epoch 16, batch 31300, loss[loss=0.09792, simple_loss=0.1794, pruned_loss=0.008228, over 4816.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02916, over 971731.04 frames.], batch size: 27, lr: 1.36e-04 +2022-05-08 21:01:03,210 INFO [train.py:715] (7/8) Epoch 16, batch 31350, loss[loss=0.1358, simple_loss=0.2085, pruned_loss=0.0316, over 4895.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02907, over 972439.21 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:01:42,656 INFO [train.py:715] (7/8) Epoch 16, batch 31400, loss[loss=0.1228, simple_loss=0.1968, pruned_loss=0.02439, over 4841.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02952, over 971897.13 frames.], batch size: 13, lr: 1.36e-04 +2022-05-08 21:02:22,721 INFO [train.py:715] (7/8) Epoch 16, batch 31450, loss[loss=0.1075, simple_loss=0.1781, pruned_loss=0.01847, over 4962.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02971, over 972207.43 frames.], batch size: 35, lr: 1.36e-04 +2022-05-08 21:03:01,699 INFO [train.py:715] (7/8) Epoch 16, batch 31500, loss[loss=0.1316, simple_loss=0.2113, pruned_loss=0.02596, over 4786.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02937, over 972102.94 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:03:40,544 INFO [train.py:715] (7/8) Epoch 16, batch 31550, loss[loss=0.1187, simple_loss=0.1891, pruned_loss=0.0242, over 4803.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02924, over 972273.83 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 21:04:19,807 INFO [train.py:715] (7/8) Epoch 16, batch 31600, loss[loss=0.1186, simple_loss=0.1926, pruned_loss=0.02225, over 4850.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2082, pruned_loss=0.0296, over 972111.46 frames.], batch size: 20, lr: 1.36e-04 +2022-05-08 21:04:58,922 INFO [train.py:715] (7/8) Epoch 16, batch 31650, loss[loss=0.1335, simple_loss=0.221, pruned_loss=0.023, over 4815.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2077, pruned_loss=0.02894, over 972238.20 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 21:05:37,856 INFO [train.py:715] (7/8) Epoch 16, batch 31700, loss[loss=0.1177, simple_loss=0.1977, pruned_loss=0.01886, over 4895.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2075, pruned_loss=0.02901, over 972050.54 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:06:17,109 INFO [train.py:715] (7/8) Epoch 16, batch 31750, loss[loss=0.1107, simple_loss=0.1873, pruned_loss=0.01704, over 4941.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2074, pruned_loss=0.02904, over 972261.33 frames.], batch size: 29, lr: 1.36e-04 +2022-05-08 21:06:56,941 INFO [train.py:715] (7/8) Epoch 16, batch 31800, loss[loss=0.1335, simple_loss=0.2083, pruned_loss=0.02936, over 4759.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2083, pruned_loss=0.02946, over 972665.64 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 21:07:36,869 INFO [train.py:715] (7/8) Epoch 16, batch 31850, loss[loss=0.152, simple_loss=0.2234, pruned_loss=0.04035, over 4827.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2079, pruned_loss=0.02911, over 972760.57 frames.], batch size: 20, lr: 1.36e-04 +2022-05-08 21:08:15,873 INFO [train.py:715] (7/8) Epoch 16, batch 31900, loss[loss=0.125, simple_loss=0.2078, pruned_loss=0.02109, over 4752.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2082, pruned_loss=0.02925, over 973043.13 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:08:55,066 INFO [train.py:715] (7/8) Epoch 16, batch 31950, loss[loss=0.1746, simple_loss=0.2435, pruned_loss=0.05292, over 4873.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2087, pruned_loss=0.02953, over 972854.73 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:09:34,441 INFO [train.py:715] (7/8) Epoch 16, batch 32000, loss[loss=0.1181, simple_loss=0.1927, pruned_loss=0.02179, over 4905.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02984, over 972849.68 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:10:13,551 INFO [train.py:715] (7/8) Epoch 16, batch 32050, loss[loss=0.1289, simple_loss=0.2103, pruned_loss=0.0238, over 4835.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2091, pruned_loss=0.03006, over 972923.28 frames.], batch size: 30, lr: 1.36e-04 +2022-05-08 21:10:53,097 INFO [train.py:715] (7/8) Epoch 16, batch 32100, loss[loss=0.1407, simple_loss=0.2138, pruned_loss=0.03374, over 4900.00 frames.], tot_loss[loss=0.1347, simple_loss=0.209, pruned_loss=0.03015, over 972458.17 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:11:32,625 INFO [train.py:715] (7/8) Epoch 16, batch 32150, loss[loss=0.1457, simple_loss=0.2189, pruned_loss=0.03626, over 4954.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2083, pruned_loss=0.02975, over 972637.67 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 21:12:12,709 INFO [train.py:715] (7/8) Epoch 16, batch 32200, loss[loss=0.1647, simple_loss=0.2313, pruned_loss=0.04902, over 4860.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02961, over 972254.95 frames.], batch size: 30, lr: 1.36e-04 +2022-05-08 21:12:51,835 INFO [train.py:715] (7/8) Epoch 16, batch 32250, loss[loss=0.1259, simple_loss=0.203, pruned_loss=0.02437, over 4818.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02894, over 973023.57 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 21:13:31,327 INFO [train.py:715] (7/8) Epoch 16, batch 32300, loss[loss=0.1185, simple_loss=0.1997, pruned_loss=0.01862, over 4760.00 frames.], tot_loss[loss=0.133, simple_loss=0.2076, pruned_loss=0.02922, over 972845.04 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 21:14:11,345 INFO [train.py:715] (7/8) Epoch 16, batch 32350, loss[loss=0.1647, simple_loss=0.2345, pruned_loss=0.04748, over 4924.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02916, over 973403.13 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 21:14:50,975 INFO [train.py:715] (7/8) Epoch 16, batch 32400, loss[loss=0.1292, simple_loss=0.2054, pruned_loss=0.02654, over 4801.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2074, pruned_loss=0.02893, over 972349.20 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 21:15:30,004 INFO [train.py:715] (7/8) Epoch 16, batch 32450, loss[loss=0.1094, simple_loss=0.1835, pruned_loss=0.01765, over 4818.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02924, over 972677.78 frames.], batch size: 26, lr: 1.36e-04 +2022-05-08 21:16:10,035 INFO [train.py:715] (7/8) Epoch 16, batch 32500, loss[loss=0.1332, simple_loss=0.2066, pruned_loss=0.02988, over 4973.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02919, over 972549.50 frames.], batch size: 31, lr: 1.36e-04 +2022-05-08 21:16:49,326 INFO [train.py:715] (7/8) Epoch 16, batch 32550, loss[loss=0.1212, simple_loss=0.2029, pruned_loss=0.01977, over 4873.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02903, over 972169.62 frames.], batch size: 20, lr: 1.36e-04 +2022-05-08 21:17:28,289 INFO [train.py:715] (7/8) Epoch 16, batch 32600, loss[loss=0.1615, simple_loss=0.2256, pruned_loss=0.04871, over 4903.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02922, over 972704.89 frames.], batch size: 32, lr: 1.36e-04 +2022-05-08 21:18:07,202 INFO [train.py:715] (7/8) Epoch 16, batch 32650, loss[loss=0.1302, simple_loss=0.2041, pruned_loss=0.02815, over 4814.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02983, over 972937.09 frames.], batch size: 26, lr: 1.36e-04 +2022-05-08 21:18:46,395 INFO [train.py:715] (7/8) Epoch 16, batch 32700, loss[loss=0.1158, simple_loss=0.1838, pruned_loss=0.02394, over 4709.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02956, over 972253.07 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 21:19:25,738 INFO [train.py:715] (7/8) Epoch 16, batch 32750, loss[loss=0.1267, simple_loss=0.2015, pruned_loss=0.02596, over 4989.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02971, over 971811.99 frames.], batch size: 27, lr: 1.36e-04 +2022-05-08 21:20:05,463 INFO [train.py:715] (7/8) Epoch 16, batch 32800, loss[loss=0.1364, simple_loss=0.2028, pruned_loss=0.03499, over 4826.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02969, over 972981.55 frames.], batch size: 13, lr: 1.36e-04 +2022-05-08 21:20:44,826 INFO [train.py:715] (7/8) Epoch 16, batch 32850, loss[loss=0.1224, simple_loss=0.2046, pruned_loss=0.02016, over 4810.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.0297, over 972727.07 frames.], batch size: 26, lr: 1.36e-04 +2022-05-08 21:21:24,460 INFO [train.py:715] (7/8) Epoch 16, batch 32900, loss[loss=0.1573, simple_loss=0.2297, pruned_loss=0.04243, over 4640.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02946, over 971834.78 frames.], batch size: 13, lr: 1.36e-04 +2022-05-08 21:22:03,428 INFO [train.py:715] (7/8) Epoch 16, batch 32950, loss[loss=0.1385, simple_loss=0.2183, pruned_loss=0.02937, over 4714.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2061, pruned_loss=0.02929, over 972629.58 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 21:22:42,574 INFO [train.py:715] (7/8) Epoch 16, batch 33000, loss[loss=0.1312, simple_loss=0.2133, pruned_loss=0.02459, over 4796.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02928, over 972949.37 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:22:42,575 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 21:22:55,771 INFO [train.py:742] (7/8) Epoch 16, validation: loss=0.105, simple_loss=0.1884, pruned_loss=0.01078, over 914524.00 frames. +2022-05-08 21:23:35,556 INFO [train.py:715] (7/8) Epoch 16, batch 33050, loss[loss=0.1528, simple_loss=0.2287, pruned_loss=0.03847, over 4878.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02899, over 973903.10 frames.], batch size: 22, lr: 1.36e-04 +2022-05-08 21:24:14,883 INFO [train.py:715] (7/8) Epoch 16, batch 33100, loss[loss=0.1048, simple_loss=0.1613, pruned_loss=0.02412, over 4787.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02911, over 973629.29 frames.], batch size: 12, lr: 1.36e-04 +2022-05-08 21:24:54,245 INFO [train.py:715] (7/8) Epoch 16, batch 33150, loss[loss=0.1298, simple_loss=0.2044, pruned_loss=0.02758, over 4990.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02923, over 973277.55 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:25:34,104 INFO [train.py:715] (7/8) Epoch 16, batch 33200, loss[loss=0.09317, simple_loss=0.1622, pruned_loss=0.01206, over 4789.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02899, over 973549.82 frames.], batch size: 12, lr: 1.36e-04 +2022-05-08 21:26:13,843 INFO [train.py:715] (7/8) Epoch 16, batch 33250, loss[loss=0.171, simple_loss=0.2423, pruned_loss=0.04984, over 4826.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02943, over 972889.57 frames.], batch size: 25, lr: 1.36e-04 +2022-05-08 21:26:53,435 INFO [train.py:715] (7/8) Epoch 16, batch 33300, loss[loss=0.1581, simple_loss=0.2446, pruned_loss=0.03582, over 4872.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2084, pruned_loss=0.02951, over 972985.76 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:27:32,749 INFO [train.py:715] (7/8) Epoch 16, batch 33350, loss[loss=0.1621, simple_loss=0.2297, pruned_loss=0.04724, over 4752.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2085, pruned_loss=0.0296, over 973118.94 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 21:28:12,219 INFO [train.py:715] (7/8) Epoch 16, batch 33400, loss[loss=0.1268, simple_loss=0.2067, pruned_loss=0.02347, over 4893.00 frames.], tot_loss[loss=0.134, simple_loss=0.2086, pruned_loss=0.02974, over 972544.28 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:28:51,454 INFO [train.py:715] (7/8) Epoch 16, batch 33450, loss[loss=0.133, simple_loss=0.2055, pruned_loss=0.03028, over 4903.00 frames.], tot_loss[loss=0.1348, simple_loss=0.2092, pruned_loss=0.03018, over 972672.09 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:29:30,509 INFO [train.py:715] (7/8) Epoch 16, batch 33500, loss[loss=0.138, simple_loss=0.2097, pruned_loss=0.03319, over 4906.00 frames.], tot_loss[loss=0.1352, simple_loss=0.2092, pruned_loss=0.03063, over 972931.14 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 21:30:09,464 INFO [train.py:715] (7/8) Epoch 16, batch 33550, loss[loss=0.1343, simple_loss=0.2043, pruned_loss=0.03222, over 4779.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2093, pruned_loss=0.03074, over 972610.73 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 21:30:49,230 INFO [train.py:715] (7/8) Epoch 16, batch 33600, loss[loss=0.1313, simple_loss=0.2064, pruned_loss=0.02809, over 4947.00 frames.], tot_loss[loss=0.1354, simple_loss=0.2095, pruned_loss=0.03062, over 972329.86 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 21:31:28,204 INFO [train.py:715] (7/8) Epoch 16, batch 33650, loss[loss=0.1335, simple_loss=0.2122, pruned_loss=0.02734, over 4874.00 frames.], tot_loss[loss=0.1344, simple_loss=0.2087, pruned_loss=0.0301, over 972406.29 frames.], batch size: 20, lr: 1.36e-04 +2022-05-08 21:32:07,846 INFO [train.py:715] (7/8) Epoch 16, batch 33700, loss[loss=0.148, simple_loss=0.2331, pruned_loss=0.03143, over 4957.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02946, over 971952.21 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 21:32:46,797 INFO [train.py:715] (7/8) Epoch 16, batch 33750, loss[loss=0.126, simple_loss=0.2042, pruned_loss=0.02393, over 4950.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02974, over 971918.10 frames.], batch size: 15, lr: 1.36e-04 +2022-05-08 21:33:25,788 INFO [train.py:715] (7/8) Epoch 16, batch 33800, loss[loss=0.1647, simple_loss=0.2386, pruned_loss=0.0454, over 4973.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02982, over 971979.13 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 21:34:05,049 INFO [train.py:715] (7/8) Epoch 16, batch 33850, loss[loss=0.1336, simple_loss=0.2053, pruned_loss=0.03099, over 4874.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02988, over 972809.54 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:34:44,275 INFO [train.py:715] (7/8) Epoch 16, batch 33900, loss[loss=0.1533, simple_loss=0.2194, pruned_loss=0.04354, over 4874.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02966, over 972828.71 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:35:24,623 INFO [train.py:715] (7/8) Epoch 16, batch 33950, loss[loss=0.1606, simple_loss=0.2292, pruned_loss=0.04604, over 4759.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.0293, over 971717.04 frames.], batch size: 19, lr: 1.36e-04 +2022-05-08 21:36:03,124 INFO [train.py:715] (7/8) Epoch 16, batch 34000, loss[loss=0.1195, simple_loss=0.1879, pruned_loss=0.02551, over 4758.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02919, over 971361.59 frames.], batch size: 14, lr: 1.36e-04 +2022-05-08 21:36:43,151 INFO [train.py:715] (7/8) Epoch 16, batch 34050, loss[loss=0.1152, simple_loss=0.1864, pruned_loss=0.02201, over 4949.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02887, over 973200.81 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 21:37:22,558 INFO [train.py:715] (7/8) Epoch 16, batch 34100, loss[loss=0.1504, simple_loss=0.2196, pruned_loss=0.04059, over 4787.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02899, over 973126.26 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 21:38:01,719 INFO [train.py:715] (7/8) Epoch 16, batch 34150, loss[loss=0.1607, simple_loss=0.2481, pruned_loss=0.03659, over 4851.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02951, over 972752.78 frames.], batch size: 20, lr: 1.36e-04 +2022-05-08 21:38:41,101 INFO [train.py:715] (7/8) Epoch 16, batch 34200, loss[loss=0.1331, simple_loss=0.2063, pruned_loss=0.02988, over 4794.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02906, over 973098.77 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:39:20,452 INFO [train.py:715] (7/8) Epoch 16, batch 34250, loss[loss=0.1171, simple_loss=0.1967, pruned_loss=0.01877, over 4813.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02927, over 972701.59 frames.], batch size: 21, lr: 1.36e-04 +2022-05-08 21:40:00,529 INFO [train.py:715] (7/8) Epoch 16, batch 34300, loss[loss=0.1191, simple_loss=0.1925, pruned_loss=0.02291, over 4976.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02922, over 972915.83 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 21:40:39,498 INFO [train.py:715] (7/8) Epoch 16, batch 34350, loss[loss=0.1401, simple_loss=0.2188, pruned_loss=0.03072, over 4933.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02903, over 973485.69 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 21:41:18,851 INFO [train.py:715] (7/8) Epoch 16, batch 34400, loss[loss=0.1241, simple_loss=0.2027, pruned_loss=0.02277, over 4921.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02905, over 972971.93 frames.], batch size: 18, lr: 1.36e-04 +2022-05-08 21:41:58,454 INFO [train.py:715] (7/8) Epoch 16, batch 34450, loss[loss=0.1047, simple_loss=0.1758, pruned_loss=0.0168, over 4790.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02874, over 973218.32 frames.], batch size: 12, lr: 1.36e-04 +2022-05-08 21:42:37,729 INFO [train.py:715] (7/8) Epoch 16, batch 34500, loss[loss=0.1278, simple_loss=0.2007, pruned_loss=0.02739, over 4974.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.029, over 973313.48 frames.], batch size: 35, lr: 1.36e-04 +2022-05-08 21:43:17,126 INFO [train.py:715] (7/8) Epoch 16, batch 34550, loss[loss=0.1117, simple_loss=0.1904, pruned_loss=0.01653, over 4960.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02871, over 973680.47 frames.], batch size: 24, lr: 1.36e-04 +2022-05-08 21:43:56,250 INFO [train.py:715] (7/8) Epoch 16, batch 34600, loss[loss=0.1464, simple_loss=0.22, pruned_loss=0.03643, over 4789.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02899, over 973003.93 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:44:36,200 INFO [train.py:715] (7/8) Epoch 16, batch 34650, loss[loss=0.1466, simple_loss=0.2169, pruned_loss=0.03817, over 4961.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.02967, over 973014.58 frames.], batch size: 39, lr: 1.36e-04 +2022-05-08 21:45:15,694 INFO [train.py:715] (7/8) Epoch 16, batch 34700, loss[loss=0.1375, simple_loss=0.2165, pruned_loss=0.02925, over 4780.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2072, pruned_loss=0.02976, over 972502.09 frames.], batch size: 17, lr: 1.36e-04 +2022-05-08 21:45:54,812 INFO [train.py:715] (7/8) Epoch 16, batch 34750, loss[loss=0.144, simple_loss=0.2189, pruned_loss=0.0345, over 4755.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02936, over 972813.45 frames.], batch size: 16, lr: 1.36e-04 +2022-05-08 21:46:32,021 INFO [train.py:715] (7/8) Epoch 16, batch 34800, loss[loss=0.1436, simple_loss=0.207, pruned_loss=0.04007, over 4922.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.0292, over 973396.14 frames.], batch size: 23, lr: 1.36e-04 +2022-05-08 21:47:23,863 INFO [train.py:715] (7/8) Epoch 17, batch 0, loss[loss=0.1682, simple_loss=0.2461, pruned_loss=0.04515, over 4825.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2461, pruned_loss=0.04515, over 4825.00 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 21:48:03,326 INFO [train.py:715] (7/8) Epoch 17, batch 50, loss[loss=0.1175, simple_loss=0.1856, pruned_loss=0.02473, over 4746.00 frames.], tot_loss[loss=0.1311, simple_loss=0.206, pruned_loss=0.02804, over 218859.35 frames.], batch size: 12, lr: 1.32e-04 +2022-05-08 21:48:44,384 INFO [train.py:715] (7/8) Epoch 17, batch 100, loss[loss=0.1198, simple_loss=0.1912, pruned_loss=0.0242, over 4801.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2071, pruned_loss=0.0287, over 385207.52 frames.], batch size: 24, lr: 1.32e-04 +2022-05-08 21:49:25,329 INFO [train.py:715] (7/8) Epoch 17, batch 150, loss[loss=0.1234, simple_loss=0.2039, pruned_loss=0.02143, over 4982.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02915, over 516140.02 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 21:50:06,398 INFO [train.py:715] (7/8) Epoch 17, batch 200, loss[loss=0.1349, simple_loss=0.2105, pruned_loss=0.0296, over 4784.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.029, over 617722.86 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 21:50:49,386 INFO [train.py:715] (7/8) Epoch 17, batch 250, loss[loss=0.1116, simple_loss=0.1867, pruned_loss=0.01819, over 4785.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02917, over 696502.73 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 21:51:30,989 INFO [train.py:715] (7/8) Epoch 17, batch 300, loss[loss=0.1278, simple_loss=0.1986, pruned_loss=0.02854, over 4963.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02978, over 758324.31 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 21:52:11,855 INFO [train.py:715] (7/8) Epoch 17, batch 350, loss[loss=0.1309, simple_loss=0.2054, pruned_loss=0.02822, over 4795.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2087, pruned_loss=0.03025, over 805077.15 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 21:52:52,794 INFO [train.py:715] (7/8) Epoch 17, batch 400, loss[loss=0.1303, simple_loss=0.1965, pruned_loss=0.03207, over 4756.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2085, pruned_loss=0.02999, over 843196.09 frames.], batch size: 19, lr: 1.32e-04 +2022-05-08 21:53:33,715 INFO [train.py:715] (7/8) Epoch 17, batch 450, loss[loss=0.1217, simple_loss=0.1931, pruned_loss=0.02518, over 4851.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02891, over 871832.95 frames.], batch size: 20, lr: 1.32e-04 +2022-05-08 21:54:14,773 INFO [train.py:715] (7/8) Epoch 17, batch 500, loss[loss=0.1336, simple_loss=0.2212, pruned_loss=0.02301, over 4983.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02967, over 894052.33 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 21:54:56,776 INFO [train.py:715] (7/8) Epoch 17, batch 550, loss[loss=0.128, simple_loss=0.2058, pruned_loss=0.0251, over 4992.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02946, over 911304.46 frames.], batch size: 28, lr: 1.32e-04 +2022-05-08 21:55:37,895 INFO [train.py:715] (7/8) Epoch 17, batch 600, loss[loss=0.1192, simple_loss=0.2029, pruned_loss=0.01773, over 4817.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2062, pruned_loss=0.02935, over 924724.10 frames.], batch size: 27, lr: 1.32e-04 +2022-05-08 21:56:20,091 INFO [train.py:715] (7/8) Epoch 17, batch 650, loss[loss=0.1194, simple_loss=0.2003, pruned_loss=0.01922, over 4807.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02879, over 935927.42 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 21:57:01,713 INFO [train.py:715] (7/8) Epoch 17, batch 700, loss[loss=0.1102, simple_loss=0.1828, pruned_loss=0.01876, over 4954.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02909, over 944127.48 frames.], batch size: 35, lr: 1.32e-04 +2022-05-08 21:57:42,595 INFO [train.py:715] (7/8) Epoch 17, batch 750, loss[loss=0.1453, simple_loss=0.207, pruned_loss=0.04177, over 4909.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02904, over 950143.47 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 21:58:23,377 INFO [train.py:715] (7/8) Epoch 17, batch 800, loss[loss=0.1255, simple_loss=0.204, pruned_loss=0.02345, over 4942.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02917, over 954585.35 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 21:59:03,984 INFO [train.py:715] (7/8) Epoch 17, batch 850, loss[loss=0.1417, simple_loss=0.2145, pruned_loss=0.03451, over 4772.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.0296, over 958556.14 frames.], batch size: 17, lr: 1.32e-04 +2022-05-08 21:59:45,378 INFO [train.py:715] (7/8) Epoch 17, batch 900, loss[loss=0.1298, simple_loss=0.211, pruned_loss=0.02435, over 4878.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02975, over 961054.18 frames.], batch size: 22, lr: 1.32e-04 +2022-05-08 22:00:26,263 INFO [train.py:715] (7/8) Epoch 17, batch 950, loss[loss=0.1399, simple_loss=0.2113, pruned_loss=0.03426, over 4926.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2079, pruned_loss=0.02992, over 963203.40 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 22:01:07,647 INFO [train.py:715] (7/8) Epoch 17, batch 1000, loss[loss=0.1238, simple_loss=0.196, pruned_loss=0.02577, over 4944.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02995, over 965364.82 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:01:48,892 INFO [train.py:715] (7/8) Epoch 17, batch 1050, loss[loss=0.1523, simple_loss=0.2178, pruned_loss=0.04341, over 4980.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02971, over 967239.81 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 22:02:29,885 INFO [train.py:715] (7/8) Epoch 17, batch 1100, loss[loss=0.1353, simple_loss=0.2049, pruned_loss=0.03283, over 4948.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02954, over 968613.44 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:03:10,394 INFO [train.py:715] (7/8) Epoch 17, batch 1150, loss[loss=0.1284, simple_loss=0.2149, pruned_loss=0.02096, over 4898.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03005, over 968854.72 frames.], batch size: 19, lr: 1.32e-04 +2022-05-08 22:03:51,862 INFO [train.py:715] (7/8) Epoch 17, batch 1200, loss[loss=0.1436, simple_loss=0.2183, pruned_loss=0.03446, over 4814.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02987, over 969655.52 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:04:32,831 INFO [train.py:715] (7/8) Epoch 17, batch 1250, loss[loss=0.1174, simple_loss=0.2029, pruned_loss=0.01595, over 4802.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.0295, over 970637.65 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:05:13,884 INFO [train.py:715] (7/8) Epoch 17, batch 1300, loss[loss=0.1419, simple_loss=0.2172, pruned_loss=0.03331, over 4852.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02916, over 970713.63 frames.], batch size: 20, lr: 1.32e-04 +2022-05-08 22:05:55,247 INFO [train.py:715] (7/8) Epoch 17, batch 1350, loss[loss=0.1185, simple_loss=0.1972, pruned_loss=0.01993, over 4702.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.0293, over 971124.11 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:06:36,196 INFO [train.py:715] (7/8) Epoch 17, batch 1400, loss[loss=0.1524, simple_loss=0.2403, pruned_loss=0.03227, over 4932.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02942, over 971044.88 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 22:07:16,805 INFO [train.py:715] (7/8) Epoch 17, batch 1450, loss[loss=0.1392, simple_loss=0.2232, pruned_loss=0.02761, over 4807.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02952, over 971723.13 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:07:57,515 INFO [train.py:715] (7/8) Epoch 17, batch 1500, loss[loss=0.1143, simple_loss=0.1909, pruned_loss=0.01881, over 4807.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.0297, over 972054.54 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:08:39,011 INFO [train.py:715] (7/8) Epoch 17, batch 1550, loss[loss=0.1366, simple_loss=0.2127, pruned_loss=0.03027, over 4816.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02932, over 970464.86 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:09:20,353 INFO [train.py:715] (7/8) Epoch 17, batch 1600, loss[loss=0.1309, simple_loss=0.2105, pruned_loss=0.02568, over 4770.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02951, over 970951.36 frames.], batch size: 17, lr: 1.32e-04 +2022-05-08 22:10:01,029 INFO [train.py:715] (7/8) Epoch 17, batch 1650, loss[loss=0.146, simple_loss=0.2305, pruned_loss=0.03074, over 4783.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02984, over 970181.52 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 22:10:42,369 INFO [train.py:715] (7/8) Epoch 17, batch 1700, loss[loss=0.1221, simple_loss=0.1974, pruned_loss=0.02339, over 4923.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03003, over 970966.98 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 22:11:23,613 INFO [train.py:715] (7/8) Epoch 17, batch 1750, loss[loss=0.1352, simple_loss=0.2145, pruned_loss=0.02798, over 4971.00 frames.], tot_loss[loss=0.1351, simple_loss=0.2089, pruned_loss=0.03063, over 971470.23 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:12:04,540 INFO [train.py:715] (7/8) Epoch 17, batch 1800, loss[loss=0.1132, simple_loss=0.1801, pruned_loss=0.02317, over 4869.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2083, pruned_loss=0.03038, over 971275.71 frames.], batch size: 20, lr: 1.32e-04 +2022-05-08 22:12:45,641 INFO [train.py:715] (7/8) Epoch 17, batch 1850, loss[loss=0.1243, simple_loss=0.1989, pruned_loss=0.0248, over 4939.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2085, pruned_loss=0.03022, over 971027.07 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 22:13:27,335 INFO [train.py:715] (7/8) Epoch 17, batch 1900, loss[loss=0.14, simple_loss=0.2104, pruned_loss=0.03475, over 4760.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02974, over 971903.18 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 22:14:08,485 INFO [train.py:715] (7/8) Epoch 17, batch 1950, loss[loss=0.1052, simple_loss=0.1794, pruned_loss=0.01552, over 4831.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02988, over 971463.93 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:14:49,364 INFO [train.py:715] (7/8) Epoch 17, batch 2000, loss[loss=0.1442, simple_loss=0.2234, pruned_loss=0.03249, over 4946.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2079, pruned_loss=0.03036, over 972064.11 frames.], batch size: 29, lr: 1.32e-04 +2022-05-08 22:15:30,345 INFO [train.py:715] (7/8) Epoch 17, batch 2050, loss[loss=0.126, simple_loss=0.2085, pruned_loss=0.02175, over 4830.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2072, pruned_loss=0.02997, over 971337.76 frames.], batch size: 26, lr: 1.32e-04 +2022-05-08 22:16:11,455 INFO [train.py:715] (7/8) Epoch 17, batch 2100, loss[loss=0.1331, simple_loss=0.2047, pruned_loss=0.03071, over 4928.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2065, pruned_loss=0.02954, over 971500.30 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 22:16:52,788 INFO [train.py:715] (7/8) Epoch 17, batch 2150, loss[loss=0.1127, simple_loss=0.1914, pruned_loss=0.01699, over 4966.00 frames.], tot_loss[loss=0.133, simple_loss=0.2067, pruned_loss=0.02964, over 972461.40 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:17:34,190 INFO [train.py:715] (7/8) Epoch 17, batch 2200, loss[loss=0.141, simple_loss=0.2087, pruned_loss=0.03661, over 4976.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2069, pruned_loss=0.02991, over 973143.09 frames.], batch size: 28, lr: 1.32e-04 +2022-05-08 22:18:15,308 INFO [train.py:715] (7/8) Epoch 17, batch 2250, loss[loss=0.1601, simple_loss=0.2347, pruned_loss=0.04272, over 4904.00 frames.], tot_loss[loss=0.133, simple_loss=0.2068, pruned_loss=0.02963, over 972814.13 frames.], batch size: 39, lr: 1.32e-04 +2022-05-08 22:18:56,046 INFO [train.py:715] (7/8) Epoch 17, batch 2300, loss[loss=0.1383, simple_loss=0.208, pruned_loss=0.03429, over 4788.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02952, over 972988.76 frames.], batch size: 24, lr: 1.32e-04 +2022-05-08 22:19:36,480 INFO [train.py:715] (7/8) Epoch 17, batch 2350, loss[loss=0.1377, simple_loss=0.2057, pruned_loss=0.03488, over 4778.00 frames.], tot_loss[loss=0.1324, simple_loss=0.206, pruned_loss=0.02939, over 972252.58 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 22:20:17,320 INFO [train.py:715] (7/8) Epoch 17, batch 2400, loss[loss=0.1426, simple_loss=0.2095, pruned_loss=0.03784, over 4978.00 frames.], tot_loss[loss=0.132, simple_loss=0.2056, pruned_loss=0.02918, over 972557.03 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:20:58,228 INFO [train.py:715] (7/8) Epoch 17, batch 2450, loss[loss=0.1223, simple_loss=0.1929, pruned_loss=0.02584, over 4829.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2062, pruned_loss=0.0292, over 971292.87 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:21:39,040 INFO [train.py:715] (7/8) Epoch 17, batch 2500, loss[loss=0.1365, simple_loss=0.206, pruned_loss=0.03348, over 4888.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02935, over 970743.64 frames.], batch size: 19, lr: 1.32e-04 +2022-05-08 22:22:20,006 INFO [train.py:715] (7/8) Epoch 17, batch 2550, loss[loss=0.1272, simple_loss=0.1985, pruned_loss=0.02796, over 4826.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2058, pruned_loss=0.02877, over 971172.67 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:23:00,990 INFO [train.py:715] (7/8) Epoch 17, batch 2600, loss[loss=0.1453, simple_loss=0.2218, pruned_loss=0.03435, over 4978.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2081, pruned_loss=0.02958, over 972563.77 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 22:23:42,205 INFO [train.py:715] (7/8) Epoch 17, batch 2650, loss[loss=0.1399, simple_loss=0.2164, pruned_loss=0.03172, over 4793.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02965, over 972679.11 frames.], batch size: 24, lr: 1.32e-04 +2022-05-08 22:24:22,847 INFO [train.py:715] (7/8) Epoch 17, batch 2700, loss[loss=0.1222, simple_loss=0.1942, pruned_loss=0.02509, over 4917.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02996, over 973735.19 frames.], batch size: 23, lr: 1.32e-04 +2022-05-08 22:25:04,067 INFO [train.py:715] (7/8) Epoch 17, batch 2750, loss[loss=0.1108, simple_loss=0.1856, pruned_loss=0.01795, over 4695.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02961, over 973469.58 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:25:44,596 INFO [train.py:715] (7/8) Epoch 17, batch 2800, loss[loss=0.1255, simple_loss=0.1906, pruned_loss=0.03024, over 4761.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2074, pruned_loss=0.03006, over 972819.63 frames.], batch size: 19, lr: 1.32e-04 +2022-05-08 22:26:25,557 INFO [train.py:715] (7/8) Epoch 17, batch 2850, loss[loss=0.1446, simple_loss=0.2188, pruned_loss=0.03523, over 4815.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02977, over 972465.18 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:27:06,306 INFO [train.py:715] (7/8) Epoch 17, batch 2900, loss[loss=0.1364, simple_loss=0.2092, pruned_loss=0.0318, over 4792.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02973, over 972710.30 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:27:47,269 INFO [train.py:715] (7/8) Epoch 17, batch 2950, loss[loss=0.122, simple_loss=0.1957, pruned_loss=0.0241, over 4945.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.02999, over 972931.44 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:28:28,420 INFO [train.py:715] (7/8) Epoch 17, batch 3000, loss[loss=0.1209, simple_loss=0.1989, pruned_loss=0.02149, over 4824.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2067, pruned_loss=0.02972, over 973278.72 frames.], batch size: 27, lr: 1.32e-04 +2022-05-08 22:28:28,420 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 22:28:43,493 INFO [train.py:742] (7/8) Epoch 17, validation: loss=0.1047, simple_loss=0.1882, pruned_loss=0.01063, over 914524.00 frames. +2022-05-08 22:29:24,684 INFO [train.py:715] (7/8) Epoch 17, batch 3050, loss[loss=0.1308, simple_loss=0.2032, pruned_loss=0.02919, over 4818.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2072, pruned_loss=0.02965, over 973836.90 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:30:05,286 INFO [train.py:715] (7/8) Epoch 17, batch 3100, loss[loss=0.09855, simple_loss=0.1767, pruned_loss=0.01022, over 4777.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02931, over 972898.45 frames.], batch size: 12, lr: 1.32e-04 +2022-05-08 22:30:46,410 INFO [train.py:715] (7/8) Epoch 17, batch 3150, loss[loss=0.1359, simple_loss=0.209, pruned_loss=0.03141, over 4895.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02959, over 973951.49 frames.], batch size: 17, lr: 1.32e-04 +2022-05-08 22:31:26,342 INFO [train.py:715] (7/8) Epoch 17, batch 3200, loss[loss=0.1228, simple_loss=0.1971, pruned_loss=0.02422, over 4911.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02948, over 974499.99 frames.], batch size: 17, lr: 1.32e-04 +2022-05-08 22:32:07,676 INFO [train.py:715] (7/8) Epoch 17, batch 3250, loss[loss=0.1403, simple_loss=0.2199, pruned_loss=0.03036, over 4926.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02923, over 974066.36 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 22:32:47,742 INFO [train.py:715] (7/8) Epoch 17, batch 3300, loss[loss=0.1545, simple_loss=0.2401, pruned_loss=0.03443, over 4898.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02892, over 973071.70 frames.], batch size: 19, lr: 1.32e-04 +2022-05-08 22:33:28,440 INFO [train.py:715] (7/8) Epoch 17, batch 3350, loss[loss=0.1391, simple_loss=0.2128, pruned_loss=0.03268, over 4982.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02893, over 972532.26 frames.], batch size: 35, lr: 1.32e-04 +2022-05-08 22:34:09,187 INFO [train.py:715] (7/8) Epoch 17, batch 3400, loss[loss=0.09942, simple_loss=0.1695, pruned_loss=0.01467, over 4835.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02943, over 972161.87 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:34:50,598 INFO [train.py:715] (7/8) Epoch 17, batch 3450, loss[loss=0.13, simple_loss=0.2122, pruned_loss=0.02383, over 4888.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2073, pruned_loss=0.02992, over 971584.17 frames.], batch size: 17, lr: 1.32e-04 +2022-05-08 22:35:30,944 INFO [train.py:715] (7/8) Epoch 17, batch 3500, loss[loss=0.1231, simple_loss=0.1901, pruned_loss=0.02801, over 4706.00 frames.], tot_loss[loss=0.1333, simple_loss=0.207, pruned_loss=0.02976, over 971503.14 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:36:11,166 INFO [train.py:715] (7/8) Epoch 17, batch 3550, loss[loss=0.1129, simple_loss=0.1887, pruned_loss=0.01857, over 4782.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02945, over 971668.99 frames.], batch size: 18, lr: 1.32e-04 +2022-05-08 22:36:52,135 INFO [train.py:715] (7/8) Epoch 17, batch 3600, loss[loss=0.1392, simple_loss=0.2179, pruned_loss=0.03025, over 4986.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02913, over 972009.48 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:37:31,756 INFO [train.py:715] (7/8) Epoch 17, batch 3650, loss[loss=0.1523, simple_loss=0.2251, pruned_loss=0.03975, over 4789.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02898, over 972232.19 frames.], batch size: 12, lr: 1.32e-04 +2022-05-08 22:38:11,921 INFO [train.py:715] (7/8) Epoch 17, batch 3700, loss[loss=0.1314, simple_loss=0.1995, pruned_loss=0.03166, over 4988.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02914, over 971790.72 frames.], batch size: 35, lr: 1.32e-04 +2022-05-08 22:38:52,849 INFO [train.py:715] (7/8) Epoch 17, batch 3750, loss[loss=0.1292, simple_loss=0.2119, pruned_loss=0.02324, over 4895.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02896, over 973015.08 frames.], batch size: 22, lr: 1.32e-04 +2022-05-08 22:39:33,615 INFO [train.py:715] (7/8) Epoch 17, batch 3800, loss[loss=0.1188, simple_loss=0.1952, pruned_loss=0.02122, over 4805.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02909, over 972566.89 frames.], batch size: 25, lr: 1.32e-04 +2022-05-08 22:40:14,220 INFO [train.py:715] (7/8) Epoch 17, batch 3850, loss[loss=0.1338, simple_loss=0.2006, pruned_loss=0.03354, over 4970.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2065, pruned_loss=0.02938, over 973179.29 frames.], batch size: 24, lr: 1.32e-04 +2022-05-08 22:40:54,280 INFO [train.py:715] (7/8) Epoch 17, batch 3900, loss[loss=0.1436, simple_loss=0.2097, pruned_loss=0.03871, over 4910.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2063, pruned_loss=0.02945, over 972542.95 frames.], batch size: 19, lr: 1.32e-04 +2022-05-08 22:41:35,759 INFO [train.py:715] (7/8) Epoch 17, batch 3950, loss[loss=0.1346, simple_loss=0.206, pruned_loss=0.03154, over 4860.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2068, pruned_loss=0.02978, over 972982.68 frames.], batch size: 20, lr: 1.32e-04 +2022-05-08 22:42:15,635 INFO [train.py:715] (7/8) Epoch 17, batch 4000, loss[loss=0.1575, simple_loss=0.2239, pruned_loss=0.04557, over 4901.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2064, pruned_loss=0.02951, over 973489.54 frames.], batch size: 38, lr: 1.32e-04 +2022-05-08 22:42:56,132 INFO [train.py:715] (7/8) Epoch 17, batch 4050, loss[loss=0.1394, simple_loss=0.2159, pruned_loss=0.0314, over 4924.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2066, pruned_loss=0.02961, over 973545.25 frames.], batch size: 29, lr: 1.32e-04 +2022-05-08 22:43:36,612 INFO [train.py:715] (7/8) Epoch 17, batch 4100, loss[loss=0.1433, simple_loss=0.2231, pruned_loss=0.03178, over 4974.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.02925, over 973406.35 frames.], batch size: 15, lr: 1.32e-04 +2022-05-08 22:44:17,667 INFO [train.py:715] (7/8) Epoch 17, batch 4150, loss[loss=0.1268, simple_loss=0.2114, pruned_loss=0.02112, over 4886.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02929, over 973248.92 frames.], batch size: 22, lr: 1.32e-04 +2022-05-08 22:44:56,906 INFO [train.py:715] (7/8) Epoch 17, batch 4200, loss[loss=0.1271, simple_loss=0.1958, pruned_loss=0.02924, over 4981.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02895, over 972923.84 frames.], batch size: 14, lr: 1.32e-04 +2022-05-08 22:45:36,944 INFO [train.py:715] (7/8) Epoch 17, batch 4250, loss[loss=0.1588, simple_loss=0.2315, pruned_loss=0.0431, over 4947.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02911, over 973048.63 frames.], batch size: 21, lr: 1.32e-04 +2022-05-08 22:46:18,118 INFO [train.py:715] (7/8) Epoch 17, batch 4300, loss[loss=0.1298, simple_loss=0.2061, pruned_loss=0.02676, over 4789.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02932, over 973211.93 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 22:46:58,168 INFO [train.py:715] (7/8) Epoch 17, batch 4350, loss[loss=0.1373, simple_loss=0.2108, pruned_loss=0.03187, over 4769.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2079, pruned_loss=0.0293, over 972957.27 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 22:47:38,043 INFO [train.py:715] (7/8) Epoch 17, batch 4400, loss[loss=0.1472, simple_loss=0.2127, pruned_loss=0.04092, over 4769.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2076, pruned_loss=0.02915, over 972317.33 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 22:48:18,896 INFO [train.py:715] (7/8) Epoch 17, batch 4450, loss[loss=0.1487, simple_loss=0.2168, pruned_loss=0.04031, over 4835.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2073, pruned_loss=0.02883, over 972179.83 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 22:48:59,885 INFO [train.py:715] (7/8) Epoch 17, batch 4500, loss[loss=0.1252, simple_loss=0.2017, pruned_loss=0.0243, over 4786.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02923, over 971852.38 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 22:49:39,759 INFO [train.py:715] (7/8) Epoch 17, batch 4550, loss[loss=0.158, simple_loss=0.2356, pruned_loss=0.04017, over 4933.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.0289, over 972174.26 frames.], batch size: 23, lr: 1.31e-04 +2022-05-08 22:50:20,190 INFO [train.py:715] (7/8) Epoch 17, batch 4600, loss[loss=0.1265, simple_loss=0.206, pruned_loss=0.02348, over 4695.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02897, over 971844.91 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 22:51:01,212 INFO [train.py:715] (7/8) Epoch 17, batch 4650, loss[loss=0.1193, simple_loss=0.1954, pruned_loss=0.02163, over 4977.00 frames.], tot_loss[loss=0.132, simple_loss=0.2069, pruned_loss=0.02858, over 971497.23 frames.], batch size: 39, lr: 1.31e-04 +2022-05-08 22:51:41,123 INFO [train.py:715] (7/8) Epoch 17, batch 4700, loss[loss=0.1232, simple_loss=0.1975, pruned_loss=0.02448, over 4915.00 frames.], tot_loss[loss=0.132, simple_loss=0.2069, pruned_loss=0.02856, over 970779.83 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 22:52:21,066 INFO [train.py:715] (7/8) Epoch 17, batch 4750, loss[loss=0.1256, simple_loss=0.2046, pruned_loss=0.02333, over 4802.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02878, over 971256.98 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 22:53:02,044 INFO [train.py:715] (7/8) Epoch 17, batch 4800, loss[loss=0.134, simple_loss=0.2012, pruned_loss=0.03336, over 4986.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02879, over 971630.92 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 22:53:42,827 INFO [train.py:715] (7/8) Epoch 17, batch 4850, loss[loss=0.1095, simple_loss=0.186, pruned_loss=0.01649, over 4907.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2058, pruned_loss=0.02881, over 972391.59 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 22:54:22,669 INFO [train.py:715] (7/8) Epoch 17, batch 4900, loss[loss=0.121, simple_loss=0.2001, pruned_loss=0.02093, over 4930.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02872, over 972119.58 frames.], batch size: 29, lr: 1.31e-04 +2022-05-08 22:55:03,098 INFO [train.py:715] (7/8) Epoch 17, batch 4950, loss[loss=0.1388, simple_loss=0.2127, pruned_loss=0.03247, over 4787.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.0289, over 972128.99 frames.], batch size: 14, lr: 1.31e-04 +2022-05-08 22:55:44,143 INFO [train.py:715] (7/8) Epoch 17, batch 5000, loss[loss=0.1354, simple_loss=0.2164, pruned_loss=0.02723, over 4820.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2075, pruned_loss=0.02887, over 972663.10 frames.], batch size: 27, lr: 1.31e-04 +2022-05-08 22:56:24,630 INFO [train.py:715] (7/8) Epoch 17, batch 5050, loss[loss=0.1695, simple_loss=0.2303, pruned_loss=0.05436, over 4985.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2079, pruned_loss=0.02925, over 973449.60 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 22:57:04,197 INFO [train.py:715] (7/8) Epoch 17, batch 5100, loss[loss=0.1548, simple_loss=0.2213, pruned_loss=0.04415, over 4876.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02931, over 973495.93 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 22:57:44,982 INFO [train.py:715] (7/8) Epoch 17, batch 5150, loss[loss=0.1295, simple_loss=0.208, pruned_loss=0.0255, over 4861.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02935, over 972808.22 frames.], batch size: 38, lr: 1.31e-04 +2022-05-08 22:58:26,130 INFO [train.py:715] (7/8) Epoch 17, batch 5200, loss[loss=0.1326, simple_loss=0.1935, pruned_loss=0.03584, over 4771.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02911, over 972780.59 frames.], batch size: 17, lr: 1.31e-04 +2022-05-08 22:59:05,342 INFO [train.py:715] (7/8) Epoch 17, batch 5250, loss[loss=0.1252, simple_loss=0.2082, pruned_loss=0.02113, over 4932.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02899, over 972710.21 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 22:59:44,889 INFO [train.py:715] (7/8) Epoch 17, batch 5300, loss[loss=0.1284, simple_loss=0.2005, pruned_loss=0.02812, over 4954.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02958, over 973156.90 frames.], batch size: 39, lr: 1.31e-04 +2022-05-08 23:00:25,449 INFO [train.py:715] (7/8) Epoch 17, batch 5350, loss[loss=0.1243, simple_loss=0.1941, pruned_loss=0.02729, over 4888.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02946, over 973077.28 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:01:06,241 INFO [train.py:715] (7/8) Epoch 17, batch 5400, loss[loss=0.1459, simple_loss=0.2242, pruned_loss=0.03381, over 4791.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02933, over 973036.55 frames.], batch size: 14, lr: 1.31e-04 +2022-05-08 23:01:45,348 INFO [train.py:715] (7/8) Epoch 17, batch 5450, loss[loss=0.09501, simple_loss=0.1683, pruned_loss=0.01085, over 4959.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02943, over 973182.08 frames.], batch size: 28, lr: 1.31e-04 +2022-05-08 23:02:26,552 INFO [train.py:715] (7/8) Epoch 17, batch 5500, loss[loss=0.133, simple_loss=0.2066, pruned_loss=0.02972, over 4881.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.03004, over 973428.47 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 23:03:07,880 INFO [train.py:715] (7/8) Epoch 17, batch 5550, loss[loss=0.1292, simple_loss=0.2095, pruned_loss=0.02441, over 4905.00 frames.], tot_loss[loss=0.1342, simple_loss=0.208, pruned_loss=0.03019, over 972698.33 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:03:46,993 INFO [train.py:715] (7/8) Epoch 17, batch 5600, loss[loss=0.1305, simple_loss=0.2095, pruned_loss=0.02572, over 4886.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02987, over 972210.29 frames.], batch size: 22, lr: 1.31e-04 +2022-05-08 23:04:27,251 INFO [train.py:715] (7/8) Epoch 17, batch 5650, loss[loss=0.1104, simple_loss=0.1806, pruned_loss=0.02009, over 4712.00 frames.], tot_loss[loss=0.1336, simple_loss=0.207, pruned_loss=0.0301, over 971741.99 frames.], batch size: 12, lr: 1.31e-04 +2022-05-08 23:05:08,285 INFO [train.py:715] (7/8) Epoch 17, batch 5700, loss[loss=0.1595, simple_loss=0.2358, pruned_loss=0.0416, over 4872.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2066, pruned_loss=0.02981, over 972400.33 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 23:05:48,477 INFO [train.py:715] (7/8) Epoch 17, batch 5750, loss[loss=0.1574, simple_loss=0.2241, pruned_loss=0.04531, over 4882.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2068, pruned_loss=0.02975, over 973585.80 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 23:06:27,751 INFO [train.py:715] (7/8) Epoch 17, batch 5800, loss[loss=0.1243, simple_loss=0.1992, pruned_loss=0.0247, over 4834.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2069, pruned_loss=0.0299, over 973997.61 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:07:08,775 INFO [train.py:715] (7/8) Epoch 17, batch 5850, loss[loss=0.142, simple_loss=0.2147, pruned_loss=0.03467, over 4687.00 frames.], tot_loss[loss=0.1326, simple_loss=0.206, pruned_loss=0.02958, over 973882.65 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:07:49,080 INFO [train.py:715] (7/8) Epoch 17, batch 5900, loss[loss=0.1227, simple_loss=0.1994, pruned_loss=0.023, over 4838.00 frames.], tot_loss[loss=0.133, simple_loss=0.2064, pruned_loss=0.02983, over 973114.45 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 23:08:29,710 INFO [train.py:715] (7/8) Epoch 17, batch 5950, loss[loss=0.1341, simple_loss=0.2095, pruned_loss=0.02939, over 4825.00 frames.], tot_loss[loss=0.1335, simple_loss=0.207, pruned_loss=0.03001, over 972589.14 frames.], batch size: 26, lr: 1.31e-04 +2022-05-08 23:09:09,146 INFO [train.py:715] (7/8) Epoch 17, batch 6000, loss[loss=0.1456, simple_loss=0.2169, pruned_loss=0.03711, over 4932.00 frames.], tot_loss[loss=0.1324, simple_loss=0.206, pruned_loss=0.0294, over 973019.04 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:09:09,147 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 23:09:23,454 INFO [train.py:742] (7/8) Epoch 17, validation: loss=0.1047, simple_loss=0.1881, pruned_loss=0.01069, over 914524.00 frames. +2022-05-08 23:10:02,838 INFO [train.py:715] (7/8) Epoch 17, batch 6050, loss[loss=0.1246, simple_loss=0.1963, pruned_loss=0.02646, over 4820.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2071, pruned_loss=0.02985, over 973098.76 frames.], batch size: 13, lr: 1.31e-04 +2022-05-08 23:10:43,311 INFO [train.py:715] (7/8) Epoch 17, batch 6100, loss[loss=0.1154, simple_loss=0.1924, pruned_loss=0.0192, over 4819.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02984, over 972642.31 frames.], batch size: 27, lr: 1.31e-04 +2022-05-08 23:11:22,426 INFO [train.py:715] (7/8) Epoch 17, batch 6150, loss[loss=0.117, simple_loss=0.1925, pruned_loss=0.02071, over 4917.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02942, over 972109.05 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:12:02,008 INFO [train.py:715] (7/8) Epoch 17, batch 6200, loss[loss=0.1416, simple_loss=0.2155, pruned_loss=0.0339, over 4858.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02936, over 972481.72 frames.], batch size: 32, lr: 1.31e-04 +2022-05-08 23:12:42,486 INFO [train.py:715] (7/8) Epoch 17, batch 6250, loss[loss=0.1285, simple_loss=0.2088, pruned_loss=0.02404, over 4752.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.0291, over 972423.73 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 23:13:22,269 INFO [train.py:715] (7/8) Epoch 17, batch 6300, loss[loss=0.1146, simple_loss=0.1888, pruned_loss=0.02021, over 4891.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.0291, over 972200.13 frames.], batch size: 22, lr: 1.31e-04 +2022-05-08 23:14:01,682 INFO [train.py:715] (7/8) Epoch 17, batch 6350, loss[loss=0.1418, simple_loss=0.212, pruned_loss=0.0358, over 4796.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02886, over 972498.31 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:14:41,499 INFO [train.py:715] (7/8) Epoch 17, batch 6400, loss[loss=0.1407, simple_loss=0.2185, pruned_loss=0.03143, over 4974.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2063, pruned_loss=0.02927, over 971668.80 frames.], batch size: 35, lr: 1.31e-04 +2022-05-08 23:15:21,778 INFO [train.py:715] (7/8) Epoch 17, batch 6450, loss[loss=0.1161, simple_loss=0.1921, pruned_loss=0.02007, over 4878.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02891, over 972329.94 frames.], batch size: 32, lr: 1.31e-04 +2022-05-08 23:16:01,147 INFO [train.py:715] (7/8) Epoch 17, batch 6500, loss[loss=0.1229, simple_loss=0.1888, pruned_loss=0.02849, over 4907.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02887, over 973157.61 frames.], batch size: 17, lr: 1.31e-04 +2022-05-08 23:16:40,479 INFO [train.py:715] (7/8) Epoch 17, batch 6550, loss[loss=0.1232, simple_loss=0.1975, pruned_loss=0.02446, over 4807.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02898, over 973236.54 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:17:20,847 INFO [train.py:715] (7/8) Epoch 17, batch 6600, loss[loss=0.131, simple_loss=0.198, pruned_loss=0.03194, over 4781.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2061, pruned_loss=0.02899, over 973016.16 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:18:01,036 INFO [train.py:715] (7/8) Epoch 17, batch 6650, loss[loss=0.1193, simple_loss=0.1947, pruned_loss=0.02195, over 4741.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2056, pruned_loss=0.02897, over 973347.13 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 23:18:40,484 INFO [train.py:715] (7/8) Epoch 17, batch 6700, loss[loss=0.09958, simple_loss=0.1704, pruned_loss=0.01436, over 4823.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02857, over 973336.20 frames.], batch size: 27, lr: 1.31e-04 +2022-05-08 23:19:20,731 INFO [train.py:715] (7/8) Epoch 17, batch 6750, loss[loss=0.1202, simple_loss=0.1911, pruned_loss=0.02464, over 4966.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02892, over 973582.99 frames.], batch size: 35, lr: 1.31e-04 +2022-05-08 23:20:00,498 INFO [train.py:715] (7/8) Epoch 17, batch 6800, loss[loss=0.1289, simple_loss=0.203, pruned_loss=0.02738, over 4976.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02882, over 972591.19 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:20:41,165 INFO [train.py:715] (7/8) Epoch 17, batch 6850, loss[loss=0.1171, simple_loss=0.2014, pruned_loss=0.01639, over 4748.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02881, over 972132.55 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 23:21:20,243 INFO [train.py:715] (7/8) Epoch 17, batch 6900, loss[loss=0.1331, simple_loss=0.2062, pruned_loss=0.03, over 4925.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.0289, over 972228.33 frames.], batch size: 39, lr: 1.31e-04 +2022-05-08 23:22:00,929 INFO [train.py:715] (7/8) Epoch 17, batch 6950, loss[loss=0.1596, simple_loss=0.2419, pruned_loss=0.03862, over 4915.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.0291, over 972045.69 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:22:40,673 INFO [train.py:715] (7/8) Epoch 17, batch 7000, loss[loss=0.127, simple_loss=0.2043, pruned_loss=0.02484, over 4923.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2075, pruned_loss=0.02906, over 971881.77 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:23:20,236 INFO [train.py:715] (7/8) Epoch 17, batch 7050, loss[loss=0.1464, simple_loss=0.2094, pruned_loss=0.04169, over 4874.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.0295, over 972120.02 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 23:24:00,500 INFO [train.py:715] (7/8) Epoch 17, batch 7100, loss[loss=0.1408, simple_loss=0.2247, pruned_loss=0.0285, over 4967.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02976, over 972311.04 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:24:40,023 INFO [train.py:715] (7/8) Epoch 17, batch 7150, loss[loss=0.1265, simple_loss=0.1927, pruned_loss=0.03021, over 4970.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02961, over 971780.97 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:25:19,631 INFO [train.py:715] (7/8) Epoch 17, batch 7200, loss[loss=0.1255, simple_loss=0.1998, pruned_loss=0.02567, over 4811.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02971, over 971618.63 frames.], batch size: 26, lr: 1.31e-04 +2022-05-08 23:25:58,585 INFO [train.py:715] (7/8) Epoch 17, batch 7250, loss[loss=0.1484, simple_loss=0.2295, pruned_loss=0.0336, over 4845.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.02985, over 971981.28 frames.], batch size: 13, lr: 1.31e-04 +2022-05-08 23:26:39,075 INFO [train.py:715] (7/8) Epoch 17, batch 7300, loss[loss=0.1316, simple_loss=0.1901, pruned_loss=0.03657, over 4812.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03011, over 972651.24 frames.], batch size: 12, lr: 1.31e-04 +2022-05-08 23:27:18,027 INFO [train.py:715] (7/8) Epoch 17, batch 7350, loss[loss=0.1135, simple_loss=0.1932, pruned_loss=0.01684, over 4902.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02983, over 971680.64 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:27:56,387 INFO [train.py:715] (7/8) Epoch 17, batch 7400, loss[loss=0.1642, simple_loss=0.2493, pruned_loss=0.03951, over 4813.00 frames.], tot_loss[loss=0.134, simple_loss=0.2079, pruned_loss=0.03006, over 971442.26 frames.], batch size: 25, lr: 1.31e-04 +2022-05-08 23:28:36,428 INFO [train.py:715] (7/8) Epoch 17, batch 7450, loss[loss=0.1277, simple_loss=0.2094, pruned_loss=0.02302, over 4738.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2082, pruned_loss=0.02998, over 971646.25 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 23:29:15,432 INFO [train.py:715] (7/8) Epoch 17, batch 7500, loss[loss=0.1404, simple_loss=0.2106, pruned_loss=0.03507, over 4872.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02971, over 973052.00 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 23:29:55,164 INFO [train.py:715] (7/8) Epoch 17, batch 7550, loss[loss=0.1437, simple_loss=0.219, pruned_loss=0.03422, over 4917.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02957, over 972758.01 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:30:34,492 INFO [train.py:715] (7/8) Epoch 17, batch 7600, loss[loss=0.1443, simple_loss=0.2149, pruned_loss=0.03688, over 4757.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02949, over 972690.67 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 23:31:14,613 INFO [train.py:715] (7/8) Epoch 17, batch 7650, loss[loss=0.1602, simple_loss=0.2503, pruned_loss=0.03508, over 4940.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.0296, over 971973.11 frames.], batch size: 29, lr: 1.31e-04 +2022-05-08 23:31:54,497 INFO [train.py:715] (7/8) Epoch 17, batch 7700, loss[loss=0.1245, simple_loss=0.1995, pruned_loss=0.02476, over 4840.00 frames.], tot_loss[loss=0.1333, simple_loss=0.208, pruned_loss=0.02924, over 971879.37 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 23:32:33,793 INFO [train.py:715] (7/8) Epoch 17, batch 7750, loss[loss=0.14, simple_loss=0.2295, pruned_loss=0.02529, over 4860.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2074, pruned_loss=0.02896, over 972981.48 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 23:33:14,392 INFO [train.py:715] (7/8) Epoch 17, batch 7800, loss[loss=0.1119, simple_loss=0.1832, pruned_loss=0.02034, over 4818.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02864, over 972467.01 frames.], batch size: 26, lr: 1.31e-04 +2022-05-08 23:33:54,607 INFO [train.py:715] (7/8) Epoch 17, batch 7850, loss[loss=0.1339, simple_loss=0.2053, pruned_loss=0.03126, over 4769.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02892, over 972174.76 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:34:34,854 INFO [train.py:715] (7/8) Epoch 17, batch 7900, loss[loss=0.1224, simple_loss=0.1959, pruned_loss=0.02445, over 4848.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02919, over 973702.49 frames.], batch size: 32, lr: 1.31e-04 +2022-05-08 23:35:13,815 INFO [train.py:715] (7/8) Epoch 17, batch 7950, loss[loss=0.1271, simple_loss=0.2018, pruned_loss=0.02617, over 4809.00 frames.], tot_loss[loss=0.133, simple_loss=0.2078, pruned_loss=0.0291, over 974004.75 frames.], batch size: 12, lr: 1.31e-04 +2022-05-08 23:35:53,565 INFO [train.py:715] (7/8) Epoch 17, batch 8000, loss[loss=0.1502, simple_loss=0.2174, pruned_loss=0.04149, over 4985.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02914, over 973762.79 frames.], batch size: 25, lr: 1.31e-04 +2022-05-08 23:36:33,459 INFO [train.py:715] (7/8) Epoch 17, batch 8050, loss[loss=0.1318, simple_loss=0.2135, pruned_loss=0.02498, over 4858.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02931, over 972999.52 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 23:37:12,791 INFO [train.py:715] (7/8) Epoch 17, batch 8100, loss[loss=0.127, simple_loss=0.2002, pruned_loss=0.0269, over 4768.00 frames.], tot_loss[loss=0.133, simple_loss=0.2078, pruned_loss=0.02913, over 972137.35 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:37:52,691 INFO [train.py:715] (7/8) Epoch 17, batch 8150, loss[loss=0.1318, simple_loss=0.2114, pruned_loss=0.02612, over 4919.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2076, pruned_loss=0.0291, over 971372.53 frames.], batch size: 17, lr: 1.31e-04 +2022-05-08 23:38:32,365 INFO [train.py:715] (7/8) Epoch 17, batch 8200, loss[loss=0.111, simple_loss=0.1895, pruned_loss=0.01623, over 4746.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.02846, over 972021.12 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:39:14,694 INFO [train.py:715] (7/8) Epoch 17, batch 8250, loss[loss=0.1447, simple_loss=0.2146, pruned_loss=0.03736, over 4745.00 frames.], tot_loss[loss=0.1322, simple_loss=0.207, pruned_loss=0.02868, over 972084.75 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:39:53,908 INFO [train.py:715] (7/8) Epoch 17, batch 8300, loss[loss=0.1309, simple_loss=0.2079, pruned_loss=0.02701, over 4824.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2068, pruned_loss=0.02875, over 972432.04 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:40:33,622 INFO [train.py:715] (7/8) Epoch 17, batch 8350, loss[loss=0.1468, simple_loss=0.2328, pruned_loss=0.03037, over 4879.00 frames.], tot_loss[loss=0.133, simple_loss=0.2077, pruned_loss=0.02915, over 972342.81 frames.], batch size: 22, lr: 1.31e-04 +2022-05-08 23:41:13,217 INFO [train.py:715] (7/8) Epoch 17, batch 8400, loss[loss=0.1416, simple_loss=0.208, pruned_loss=0.03756, over 4987.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2072, pruned_loss=0.02883, over 972858.68 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:41:52,764 INFO [train.py:715] (7/8) Epoch 17, batch 8450, loss[loss=0.1138, simple_loss=0.1896, pruned_loss=0.01902, over 4809.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02924, over 972886.90 frames.], batch size: 21, lr: 1.31e-04 +2022-05-08 23:42:32,330 INFO [train.py:715] (7/8) Epoch 17, batch 8500, loss[loss=0.1127, simple_loss=0.1921, pruned_loss=0.01663, over 4969.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02936, over 973437.07 frames.], batch size: 14, lr: 1.31e-04 +2022-05-08 23:43:12,147 INFO [train.py:715] (7/8) Epoch 17, batch 8550, loss[loss=0.1299, simple_loss=0.211, pruned_loss=0.02437, over 4790.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02961, over 972762.39 frames.], batch size: 17, lr: 1.31e-04 +2022-05-08 23:43:52,009 INFO [train.py:715] (7/8) Epoch 17, batch 8600, loss[loss=0.1495, simple_loss=0.2193, pruned_loss=0.0398, over 4916.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2068, pruned_loss=0.02975, over 972675.79 frames.], batch size: 23, lr: 1.31e-04 +2022-05-08 23:44:31,016 INFO [train.py:715] (7/8) Epoch 17, batch 8650, loss[loss=0.12, simple_loss=0.1918, pruned_loss=0.02406, over 4747.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2072, pruned_loss=0.02988, over 972757.54 frames.], batch size: 16, lr: 1.31e-04 +2022-05-08 23:45:10,887 INFO [train.py:715] (7/8) Epoch 17, batch 8700, loss[loss=0.1273, simple_loss=0.2003, pruned_loss=0.02714, over 4847.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2068, pruned_loss=0.02992, over 973178.82 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 23:45:50,295 INFO [train.py:715] (7/8) Epoch 17, batch 8750, loss[loss=0.1374, simple_loss=0.2114, pruned_loss=0.03176, over 4760.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2068, pruned_loss=0.02987, over 972903.31 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:46:29,856 INFO [train.py:715] (7/8) Epoch 17, batch 8800, loss[loss=0.1219, simple_loss=0.1927, pruned_loss=0.02557, over 4899.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2066, pruned_loss=0.02965, over 973141.76 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:47:09,590 INFO [train.py:715] (7/8) Epoch 17, batch 8850, loss[loss=0.09873, simple_loss=0.1737, pruned_loss=0.01189, over 4982.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02899, over 972310.21 frames.], batch size: 28, lr: 1.31e-04 +2022-05-08 23:47:48,800 INFO [train.py:715] (7/8) Epoch 17, batch 8900, loss[loss=0.1337, simple_loss=0.2002, pruned_loss=0.03362, over 4916.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02896, over 972492.26 frames.], batch size: 17, lr: 1.31e-04 +2022-05-08 23:48:28,445 INFO [train.py:715] (7/8) Epoch 17, batch 8950, loss[loss=0.1191, simple_loss=0.189, pruned_loss=0.02458, over 4960.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02864, over 972763.72 frames.], batch size: 14, lr: 1.31e-04 +2022-05-08 23:49:07,471 INFO [train.py:715] (7/8) Epoch 17, batch 9000, loss[loss=0.1135, simple_loss=0.1842, pruned_loss=0.02144, over 4782.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.0285, over 972929.29 frames.], batch size: 12, lr: 1.31e-04 +2022-05-08 23:49:07,472 INFO [train.py:733] (7/8) Computing validation loss +2022-05-08 23:49:17,247 INFO [train.py:742] (7/8) Epoch 17, validation: loss=0.1048, simple_loss=0.1882, pruned_loss=0.01072, over 914524.00 frames. +2022-05-08 23:49:56,412 INFO [train.py:715] (7/8) Epoch 17, batch 9050, loss[loss=0.1667, simple_loss=0.2328, pruned_loss=0.05026, over 4855.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02927, over 972591.07 frames.], batch size: 30, lr: 1.31e-04 +2022-05-08 23:50:36,248 INFO [train.py:715] (7/8) Epoch 17, batch 9100, loss[loss=0.1341, simple_loss=0.2131, pruned_loss=0.02752, over 4808.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02945, over 972098.67 frames.], batch size: 25, lr: 1.31e-04 +2022-05-08 23:51:15,869 INFO [train.py:715] (7/8) Epoch 17, batch 9150, loss[loss=0.113, simple_loss=0.1883, pruned_loss=0.01882, over 4789.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02876, over 973828.54 frames.], batch size: 24, lr: 1.31e-04 +2022-05-08 23:51:54,746 INFO [train.py:715] (7/8) Epoch 17, batch 9200, loss[loss=0.1337, simple_loss=0.215, pruned_loss=0.02619, over 4819.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.0289, over 973393.31 frames.], batch size: 25, lr: 1.31e-04 +2022-05-08 23:52:34,935 INFO [train.py:715] (7/8) Epoch 17, batch 9250, loss[loss=0.137, simple_loss=0.2141, pruned_loss=0.02998, over 4861.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02892, over 972275.26 frames.], batch size: 20, lr: 1.31e-04 +2022-05-08 23:53:14,616 INFO [train.py:715] (7/8) Epoch 17, batch 9300, loss[loss=0.1475, simple_loss=0.2226, pruned_loss=0.03614, over 4984.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2077, pruned_loss=0.0293, over 972121.23 frames.], batch size: 25, lr: 1.31e-04 +2022-05-08 23:53:53,952 INFO [train.py:715] (7/8) Epoch 17, batch 9350, loss[loss=0.1286, simple_loss=0.201, pruned_loss=0.0281, over 4828.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02928, over 971292.76 frames.], batch size: 15, lr: 1.31e-04 +2022-05-08 23:54:33,281 INFO [train.py:715] (7/8) Epoch 17, batch 9400, loss[loss=0.1789, simple_loss=0.2451, pruned_loss=0.05638, over 4880.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02897, over 972024.85 frames.], batch size: 19, lr: 1.31e-04 +2022-05-08 23:55:13,701 INFO [train.py:715] (7/8) Epoch 17, batch 9450, loss[loss=0.1332, simple_loss=0.2064, pruned_loss=0.02998, over 4930.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2064, pruned_loss=0.02831, over 972074.55 frames.], batch size: 23, lr: 1.31e-04 +2022-05-08 23:55:53,693 INFO [train.py:715] (7/8) Epoch 17, batch 9500, loss[loss=0.1412, simple_loss=0.2088, pruned_loss=0.03682, over 4773.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02899, over 971867.45 frames.], batch size: 14, lr: 1.31e-04 +2022-05-08 23:56:32,927 INFO [train.py:715] (7/8) Epoch 17, batch 9550, loss[loss=0.1518, simple_loss=0.2171, pruned_loss=0.04321, over 4979.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02868, over 972630.25 frames.], batch size: 35, lr: 1.31e-04 +2022-05-08 23:57:12,484 INFO [train.py:715] (7/8) Epoch 17, batch 9600, loss[loss=0.116, simple_loss=0.1864, pruned_loss=0.02284, over 4834.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.0287, over 972101.70 frames.], batch size: 13, lr: 1.31e-04 +2022-05-08 23:57:52,758 INFO [train.py:715] (7/8) Epoch 17, batch 9650, loss[loss=0.1522, simple_loss=0.214, pruned_loss=0.04515, over 4649.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02919, over 971785.36 frames.], batch size: 13, lr: 1.31e-04 +2022-05-08 23:58:31,949 INFO [train.py:715] (7/8) Epoch 17, batch 9700, loss[loss=0.1244, simple_loss=0.209, pruned_loss=0.01988, over 4776.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.0289, over 971559.08 frames.], batch size: 18, lr: 1.31e-04 +2022-05-08 23:59:11,715 INFO [train.py:715] (7/8) Epoch 17, batch 9750, loss[loss=0.1302, simple_loss=0.1984, pruned_loss=0.03096, over 4746.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02874, over 971006.65 frames.], batch size: 12, lr: 1.31e-04 +2022-05-08 23:59:51,458 INFO [train.py:715] (7/8) Epoch 17, batch 9800, loss[loss=0.1435, simple_loss=0.2148, pruned_loss=0.0361, over 4958.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02885, over 971328.85 frames.], batch size: 35, lr: 1.31e-04 +2022-05-09 00:00:31,047 INFO [train.py:715] (7/8) Epoch 17, batch 9850, loss[loss=0.09087, simple_loss=0.1647, pruned_loss=0.008542, over 4808.00 frames.], tot_loss[loss=0.1317, simple_loss=0.206, pruned_loss=0.02875, over 971487.00 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:01:10,442 INFO [train.py:715] (7/8) Epoch 17, batch 9900, loss[loss=0.1119, simple_loss=0.1815, pruned_loss=0.02118, over 4658.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02914, over 971276.25 frames.], batch size: 13, lr: 1.31e-04 +2022-05-09 00:01:49,848 INFO [train.py:715] (7/8) Epoch 17, batch 9950, loss[loss=0.1178, simple_loss=0.1942, pruned_loss=0.02073, over 4796.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02925, over 970661.07 frames.], batch size: 24, lr: 1.31e-04 +2022-05-09 00:02:30,140 INFO [train.py:715] (7/8) Epoch 17, batch 10000, loss[loss=0.1433, simple_loss=0.207, pruned_loss=0.0398, over 4790.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2059, pruned_loss=0.02915, over 970913.44 frames.], batch size: 12, lr: 1.31e-04 +2022-05-09 00:03:09,389 INFO [train.py:715] (7/8) Epoch 17, batch 10050, loss[loss=0.1538, simple_loss=0.2177, pruned_loss=0.04495, over 4697.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2065, pruned_loss=0.02971, over 970097.58 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:03:48,275 INFO [train.py:715] (7/8) Epoch 17, batch 10100, loss[loss=0.1561, simple_loss=0.2238, pruned_loss=0.04424, over 4740.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2077, pruned_loss=0.02996, over 971394.63 frames.], batch size: 16, lr: 1.31e-04 +2022-05-09 00:04:27,592 INFO [train.py:715] (7/8) Epoch 17, batch 10150, loss[loss=0.1318, simple_loss=0.2037, pruned_loss=0.02999, over 4878.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2076, pruned_loss=0.02982, over 971335.54 frames.], batch size: 22, lr: 1.31e-04 +2022-05-09 00:05:06,928 INFO [train.py:715] (7/8) Epoch 17, batch 10200, loss[loss=0.1248, simple_loss=0.2023, pruned_loss=0.02359, over 4950.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02971, over 971293.26 frames.], batch size: 39, lr: 1.31e-04 +2022-05-09 00:05:44,870 INFO [train.py:715] (7/8) Epoch 17, batch 10250, loss[loss=0.1235, simple_loss=0.2014, pruned_loss=0.02281, over 4684.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.02957, over 971577.23 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:06:24,650 INFO [train.py:715] (7/8) Epoch 17, batch 10300, loss[loss=0.1194, simple_loss=0.1946, pruned_loss=0.02213, over 4788.00 frames.], tot_loss[loss=0.1338, simple_loss=0.208, pruned_loss=0.02984, over 971161.26 frames.], batch size: 14, lr: 1.31e-04 +2022-05-09 00:07:04,575 INFO [train.py:715] (7/8) Epoch 17, batch 10350, loss[loss=0.156, simple_loss=0.242, pruned_loss=0.03499, over 4844.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2075, pruned_loss=0.02987, over 970475.59 frames.], batch size: 34, lr: 1.31e-04 +2022-05-09 00:07:43,244 INFO [train.py:715] (7/8) Epoch 17, batch 10400, loss[loss=0.1237, simple_loss=0.1789, pruned_loss=0.0343, over 4773.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02962, over 970775.96 frames.], batch size: 12, lr: 1.31e-04 +2022-05-09 00:08:22,343 INFO [train.py:715] (7/8) Epoch 17, batch 10450, loss[loss=0.1226, simple_loss=0.2006, pruned_loss=0.02231, over 4921.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02912, over 971473.58 frames.], batch size: 23, lr: 1.31e-04 +2022-05-09 00:09:02,377 INFO [train.py:715] (7/8) Epoch 17, batch 10500, loss[loss=0.1219, simple_loss=0.1933, pruned_loss=0.02524, over 4986.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02923, over 971683.34 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:09:41,414 INFO [train.py:715] (7/8) Epoch 17, batch 10550, loss[loss=0.138, simple_loss=0.2207, pruned_loss=0.0276, over 4932.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.029, over 971965.94 frames.], batch size: 24, lr: 1.31e-04 +2022-05-09 00:10:19,763 INFO [train.py:715] (7/8) Epoch 17, batch 10600, loss[loss=0.1056, simple_loss=0.1838, pruned_loss=0.01371, over 4922.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02897, over 971839.10 frames.], batch size: 18, lr: 1.31e-04 +2022-05-09 00:10:59,065 INFO [train.py:715] (7/8) Epoch 17, batch 10650, loss[loss=0.1454, simple_loss=0.215, pruned_loss=0.03795, over 4796.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02906, over 971713.08 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:11:38,577 INFO [train.py:715] (7/8) Epoch 17, batch 10700, loss[loss=0.1424, simple_loss=0.2187, pruned_loss=0.03309, over 4865.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02888, over 972160.02 frames.], batch size: 32, lr: 1.31e-04 +2022-05-09 00:12:17,257 INFO [train.py:715] (7/8) Epoch 17, batch 10750, loss[loss=0.1374, simple_loss=0.204, pruned_loss=0.03537, over 4990.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.0293, over 972578.07 frames.], batch size: 14, lr: 1.31e-04 +2022-05-09 00:12:56,257 INFO [train.py:715] (7/8) Epoch 17, batch 10800, loss[loss=0.1251, simple_loss=0.1977, pruned_loss=0.02629, over 4893.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02922, over 972560.57 frames.], batch size: 22, lr: 1.31e-04 +2022-05-09 00:13:36,022 INFO [train.py:715] (7/8) Epoch 17, batch 10850, loss[loss=0.1104, simple_loss=0.1883, pruned_loss=0.01627, over 4926.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.0293, over 972465.50 frames.], batch size: 29, lr: 1.31e-04 +2022-05-09 00:14:15,589 INFO [train.py:715] (7/8) Epoch 17, batch 10900, loss[loss=0.1142, simple_loss=0.1921, pruned_loss=0.01814, over 4913.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02896, over 973026.33 frames.], batch size: 17, lr: 1.31e-04 +2022-05-09 00:14:53,759 INFO [train.py:715] (7/8) Epoch 17, batch 10950, loss[loss=0.1431, simple_loss=0.2185, pruned_loss=0.03379, over 4757.00 frames.], tot_loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.02894, over 972232.02 frames.], batch size: 19, lr: 1.31e-04 +2022-05-09 00:15:33,875 INFO [train.py:715] (7/8) Epoch 17, batch 11000, loss[loss=0.1151, simple_loss=0.1822, pruned_loss=0.02399, over 4853.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02921, over 972191.90 frames.], batch size: 34, lr: 1.31e-04 +2022-05-09 00:16:13,746 INFO [train.py:715] (7/8) Epoch 17, batch 11050, loss[loss=0.1289, simple_loss=0.2101, pruned_loss=0.02385, over 4779.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02919, over 972711.85 frames.], batch size: 18, lr: 1.31e-04 +2022-05-09 00:16:52,425 INFO [train.py:715] (7/8) Epoch 17, batch 11100, loss[loss=0.1309, simple_loss=0.1946, pruned_loss=0.03359, over 4865.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.0294, over 972338.19 frames.], batch size: 13, lr: 1.31e-04 +2022-05-09 00:17:31,479 INFO [train.py:715] (7/8) Epoch 17, batch 11150, loss[loss=0.1129, simple_loss=0.1982, pruned_loss=0.01386, over 4938.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02888, over 972730.00 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:18:11,487 INFO [train.py:715] (7/8) Epoch 17, batch 11200, loss[loss=0.1254, simple_loss=0.2022, pruned_loss=0.0243, over 4929.00 frames.], tot_loss[loss=0.1317, simple_loss=0.206, pruned_loss=0.02873, over 972552.81 frames.], batch size: 23, lr: 1.31e-04 +2022-05-09 00:18:51,604 INFO [train.py:715] (7/8) Epoch 17, batch 11250, loss[loss=0.1366, simple_loss=0.2103, pruned_loss=0.0314, over 4892.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.0291, over 973242.91 frames.], batch size: 19, lr: 1.31e-04 +2022-05-09 00:19:29,834 INFO [train.py:715] (7/8) Epoch 17, batch 11300, loss[loss=0.1241, simple_loss=0.1966, pruned_loss=0.02575, over 4944.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02866, over 973321.77 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:20:09,304 INFO [train.py:715] (7/8) Epoch 17, batch 11350, loss[loss=0.1318, simple_loss=0.2127, pruned_loss=0.02551, over 4961.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02892, over 973783.76 frames.], batch size: 39, lr: 1.31e-04 +2022-05-09 00:20:49,487 INFO [train.py:715] (7/8) Epoch 17, batch 11400, loss[loss=0.1312, simple_loss=0.2158, pruned_loss=0.02332, over 4885.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02915, over 974412.93 frames.], batch size: 22, lr: 1.31e-04 +2022-05-09 00:21:28,500 INFO [train.py:715] (7/8) Epoch 17, batch 11450, loss[loss=0.1171, simple_loss=0.1972, pruned_loss=0.01855, over 4897.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02879, over 974975.34 frames.], batch size: 19, lr: 1.31e-04 +2022-05-09 00:22:07,512 INFO [train.py:715] (7/8) Epoch 17, batch 11500, loss[loss=0.1138, simple_loss=0.1944, pruned_loss=0.01656, over 4949.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2073, pruned_loss=0.02889, over 974045.73 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:22:47,221 INFO [train.py:715] (7/8) Epoch 17, batch 11550, loss[loss=0.1313, simple_loss=0.2006, pruned_loss=0.03099, over 4894.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02901, over 973842.88 frames.], batch size: 17, lr: 1.31e-04 +2022-05-09 00:23:27,158 INFO [train.py:715] (7/8) Epoch 17, batch 11600, loss[loss=0.1402, simple_loss=0.2169, pruned_loss=0.03177, over 4771.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02904, over 973124.58 frames.], batch size: 18, lr: 1.31e-04 +2022-05-09 00:24:05,130 INFO [train.py:715] (7/8) Epoch 17, batch 11650, loss[loss=0.1409, simple_loss=0.2157, pruned_loss=0.03307, over 4736.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2076, pruned_loss=0.02899, over 973193.94 frames.], batch size: 16, lr: 1.31e-04 +2022-05-09 00:24:44,953 INFO [train.py:715] (7/8) Epoch 17, batch 11700, loss[loss=0.1158, simple_loss=0.1912, pruned_loss=0.02017, over 4811.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2078, pruned_loss=0.02922, over 972683.51 frames.], batch size: 26, lr: 1.31e-04 +2022-05-09 00:25:24,936 INFO [train.py:715] (7/8) Epoch 17, batch 11750, loss[loss=0.1547, simple_loss=0.2235, pruned_loss=0.04298, over 4866.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02931, over 972124.94 frames.], batch size: 38, lr: 1.31e-04 +2022-05-09 00:26:03,881 INFO [train.py:715] (7/8) Epoch 17, batch 11800, loss[loss=0.13, simple_loss=0.2107, pruned_loss=0.02464, over 4967.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02947, over 972133.63 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:26:42,875 INFO [train.py:715] (7/8) Epoch 17, batch 11850, loss[loss=0.1499, simple_loss=0.2289, pruned_loss=0.03542, over 4817.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.0296, over 973068.08 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:27:22,144 INFO [train.py:715] (7/8) Epoch 17, batch 11900, loss[loss=0.14, simple_loss=0.2113, pruned_loss=0.03435, over 4784.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02947, over 972901.07 frames.], batch size: 14, lr: 1.31e-04 +2022-05-09 00:28:01,951 INFO [train.py:715] (7/8) Epoch 17, batch 11950, loss[loss=0.1278, simple_loss=0.2151, pruned_loss=0.02021, over 4815.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02937, over 973258.56 frames.], batch size: 26, lr: 1.31e-04 +2022-05-09 00:28:40,964 INFO [train.py:715] (7/8) Epoch 17, batch 12000, loss[loss=0.1308, simple_loss=0.2026, pruned_loss=0.02954, over 4835.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02937, over 973075.72 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:28:40,965 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 00:28:52,719 INFO [train.py:742] (7/8) Epoch 17, validation: loss=0.1048, simple_loss=0.1882, pruned_loss=0.0107, over 914524.00 frames. +2022-05-09 00:29:31,826 INFO [train.py:715] (7/8) Epoch 17, batch 12050, loss[loss=0.1303, simple_loss=0.2112, pruned_loss=0.02471, over 4931.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02924, over 972651.82 frames.], batch size: 29, lr: 1.31e-04 +2022-05-09 00:30:10,919 INFO [train.py:715] (7/8) Epoch 17, batch 12100, loss[loss=0.1234, simple_loss=0.201, pruned_loss=0.02292, over 4982.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02934, over 972891.50 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:30:50,930 INFO [train.py:715] (7/8) Epoch 17, batch 12150, loss[loss=0.1246, simple_loss=0.191, pruned_loss=0.02909, over 4807.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02929, over 973512.97 frames.], batch size: 21, lr: 1.31e-04 +2022-05-09 00:31:29,663 INFO [train.py:715] (7/8) Epoch 17, batch 12200, loss[loss=0.1427, simple_loss=0.2165, pruned_loss=0.03443, over 4839.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02896, over 972662.82 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:32:08,197 INFO [train.py:715] (7/8) Epoch 17, batch 12250, loss[loss=0.164, simple_loss=0.2322, pruned_loss=0.04789, over 4971.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02889, over 971975.52 frames.], batch size: 24, lr: 1.31e-04 +2022-05-09 00:32:47,687 INFO [train.py:715] (7/8) Epoch 17, batch 12300, loss[loss=0.1216, simple_loss=0.1934, pruned_loss=0.02485, over 4834.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02922, over 971218.12 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:33:26,861 INFO [train.py:715] (7/8) Epoch 17, batch 12350, loss[loss=0.1184, simple_loss=0.1952, pruned_loss=0.02084, over 4832.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02956, over 970955.62 frames.], batch size: 13, lr: 1.31e-04 +2022-05-09 00:34:05,567 INFO [train.py:715] (7/8) Epoch 17, batch 12400, loss[loss=0.1631, simple_loss=0.2414, pruned_loss=0.04239, over 4959.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2074, pruned_loss=0.0297, over 970566.17 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:34:44,617 INFO [train.py:715] (7/8) Epoch 17, batch 12450, loss[loss=0.1231, simple_loss=0.2002, pruned_loss=0.023, over 4738.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02969, over 971257.42 frames.], batch size: 12, lr: 1.31e-04 +2022-05-09 00:35:24,999 INFO [train.py:715] (7/8) Epoch 17, batch 12500, loss[loss=0.1134, simple_loss=0.1916, pruned_loss=0.01756, over 4917.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02966, over 971288.52 frames.], batch size: 29, lr: 1.31e-04 +2022-05-09 00:36:03,578 INFO [train.py:715] (7/8) Epoch 17, batch 12550, loss[loss=0.1194, simple_loss=0.1915, pruned_loss=0.0236, over 4972.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.0297, over 971640.58 frames.], batch size: 25, lr: 1.31e-04 +2022-05-09 00:36:42,925 INFO [train.py:715] (7/8) Epoch 17, batch 12600, loss[loss=0.1402, simple_loss=0.2037, pruned_loss=0.03838, over 4933.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2073, pruned_loss=0.02991, over 971805.03 frames.], batch size: 18, lr: 1.31e-04 +2022-05-09 00:37:22,858 INFO [train.py:715] (7/8) Epoch 17, batch 12650, loss[loss=0.1277, simple_loss=0.1998, pruned_loss=0.02781, over 4948.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02961, over 970602.22 frames.], batch size: 39, lr: 1.31e-04 +2022-05-09 00:38:02,853 INFO [train.py:715] (7/8) Epoch 17, batch 12700, loss[loss=0.1306, simple_loss=0.2031, pruned_loss=0.0291, over 4759.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02957, over 971500.14 frames.], batch size: 19, lr: 1.31e-04 +2022-05-09 00:38:42,159 INFO [train.py:715] (7/8) Epoch 17, batch 12750, loss[loss=0.1603, simple_loss=0.2369, pruned_loss=0.04182, over 4776.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02971, over 971364.32 frames.], batch size: 17, lr: 1.31e-04 +2022-05-09 00:39:20,963 INFO [train.py:715] (7/8) Epoch 17, batch 12800, loss[loss=0.1106, simple_loss=0.1861, pruned_loss=0.01754, over 4880.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2076, pruned_loss=0.03004, over 972257.21 frames.], batch size: 16, lr: 1.31e-04 +2022-05-09 00:40:00,601 INFO [train.py:715] (7/8) Epoch 17, batch 12850, loss[loss=0.1167, simple_loss=0.1954, pruned_loss=0.01897, over 4895.00 frames.], tot_loss[loss=0.134, simple_loss=0.2081, pruned_loss=0.02992, over 973407.65 frames.], batch size: 16, lr: 1.31e-04 +2022-05-09 00:40:39,040 INFO [train.py:715] (7/8) Epoch 17, batch 12900, loss[loss=0.1203, simple_loss=0.1952, pruned_loss=0.02269, over 4837.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02981, over 974084.47 frames.], batch size: 15, lr: 1.31e-04 +2022-05-09 00:41:18,425 INFO [train.py:715] (7/8) Epoch 17, batch 12950, loss[loss=0.1507, simple_loss=0.2352, pruned_loss=0.03314, over 4817.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02916, over 974754.61 frames.], batch size: 25, lr: 1.31e-04 +2022-05-09 00:41:57,020 INFO [train.py:715] (7/8) Epoch 17, batch 13000, loss[loss=0.1284, simple_loss=0.204, pruned_loss=0.0264, over 4791.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02927, over 974493.96 frames.], batch size: 14, lr: 1.31e-04 +2022-05-09 00:42:36,102 INFO [train.py:715] (7/8) Epoch 17, batch 13050, loss[loss=0.1341, simple_loss=0.197, pruned_loss=0.03564, over 4794.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02947, over 974128.23 frames.], batch size: 14, lr: 1.31e-04 +2022-05-09 00:43:15,222 INFO [train.py:715] (7/8) Epoch 17, batch 13100, loss[loss=0.1349, simple_loss=0.216, pruned_loss=0.02688, over 4958.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02902, over 973689.07 frames.], batch size: 24, lr: 1.31e-04 +2022-05-09 00:43:54,021 INFO [train.py:715] (7/8) Epoch 17, batch 13150, loss[loss=0.09837, simple_loss=0.1758, pruned_loss=0.01047, over 4705.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02916, over 973554.87 frames.], batch size: 12, lr: 1.31e-04 +2022-05-09 00:44:33,799 INFO [train.py:715] (7/8) Epoch 17, batch 13200, loss[loss=0.1327, simple_loss=0.2031, pruned_loss=0.03112, over 4787.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2078, pruned_loss=0.02993, over 974226.70 frames.], batch size: 13, lr: 1.31e-04 +2022-05-09 00:45:12,325 INFO [train.py:715] (7/8) Epoch 17, batch 13250, loss[loss=0.1166, simple_loss=0.1713, pruned_loss=0.03095, over 4755.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2075, pruned_loss=0.03007, over 974444.67 frames.], batch size: 12, lr: 1.31e-04 +2022-05-09 00:45:51,625 INFO [train.py:715] (7/8) Epoch 17, batch 13300, loss[loss=0.1535, simple_loss=0.2351, pruned_loss=0.03595, over 4804.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2063, pruned_loss=0.02952, over 973681.51 frames.], batch size: 25, lr: 1.31e-04 +2022-05-09 00:46:30,710 INFO [train.py:715] (7/8) Epoch 17, batch 13350, loss[loss=0.1208, simple_loss=0.1958, pruned_loss=0.02285, over 4967.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2055, pruned_loss=0.02882, over 974054.19 frames.], batch size: 24, lr: 1.31e-04 +2022-05-09 00:47:09,941 INFO [train.py:715] (7/8) Epoch 17, batch 13400, loss[loss=0.1239, simple_loss=0.2072, pruned_loss=0.02033, over 4988.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02889, over 973655.41 frames.], batch size: 28, lr: 1.31e-04 +2022-05-09 00:47:49,251 INFO [train.py:715] (7/8) Epoch 17, batch 13450, loss[loss=0.1192, simple_loss=0.1912, pruned_loss=0.02362, over 4699.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02906, over 973653.47 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 00:48:27,713 INFO [train.py:715] (7/8) Epoch 17, batch 13500, loss[loss=0.1274, simple_loss=0.2139, pruned_loss=0.02047, over 4967.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02948, over 973505.66 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 00:49:07,420 INFO [train.py:715] (7/8) Epoch 17, batch 13550, loss[loss=0.1152, simple_loss=0.1888, pruned_loss=0.0208, over 4902.00 frames.], tot_loss[loss=0.1336, simple_loss=0.208, pruned_loss=0.02953, over 972880.09 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 00:49:45,773 INFO [train.py:715] (7/8) Epoch 17, batch 13600, loss[loss=0.1242, simple_loss=0.1916, pruned_loss=0.0284, over 4783.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02952, over 972894.52 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 00:50:24,816 INFO [train.py:715] (7/8) Epoch 17, batch 13650, loss[loss=0.1225, simple_loss=0.1951, pruned_loss=0.02494, over 4931.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02895, over 972953.46 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 00:51:04,636 INFO [train.py:715] (7/8) Epoch 17, batch 13700, loss[loss=0.1207, simple_loss=0.1904, pruned_loss=0.02548, over 4976.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2051, pruned_loss=0.02821, over 973431.87 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 00:51:43,955 INFO [train.py:715] (7/8) Epoch 17, batch 13750, loss[loss=0.148, simple_loss=0.2165, pruned_loss=0.03975, over 4961.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02848, over 973000.43 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 00:52:24,099 INFO [train.py:715] (7/8) Epoch 17, batch 13800, loss[loss=0.1089, simple_loss=0.1827, pruned_loss=0.01752, over 4827.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02842, over 973467.29 frames.], batch size: 30, lr: 1.30e-04 +2022-05-09 00:53:03,511 INFO [train.py:715] (7/8) Epoch 17, batch 13850, loss[loss=0.1418, simple_loss=0.2201, pruned_loss=0.03177, over 4873.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02841, over 973469.17 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 00:53:43,319 INFO [train.py:715] (7/8) Epoch 17, batch 13900, loss[loss=0.1345, simple_loss=0.2173, pruned_loss=0.02588, over 4812.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2058, pruned_loss=0.02872, over 973574.27 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 00:54:22,809 INFO [train.py:715] (7/8) Epoch 17, batch 13950, loss[loss=0.1203, simple_loss=0.1916, pruned_loss=0.02453, over 4857.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02895, over 973242.55 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 00:55:02,840 INFO [train.py:715] (7/8) Epoch 17, batch 14000, loss[loss=0.1458, simple_loss=0.2179, pruned_loss=0.0368, over 4789.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02915, over 972481.78 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 00:55:42,002 INFO [train.py:715] (7/8) Epoch 17, batch 14050, loss[loss=0.1305, simple_loss=0.2038, pruned_loss=0.02867, over 4897.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02975, over 974136.06 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 00:56:21,076 INFO [train.py:715] (7/8) Epoch 17, batch 14100, loss[loss=0.1563, simple_loss=0.2295, pruned_loss=0.04152, over 4761.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2081, pruned_loss=0.03013, over 973594.46 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 00:57:01,246 INFO [train.py:715] (7/8) Epoch 17, batch 14150, loss[loss=0.1458, simple_loss=0.2181, pruned_loss=0.03674, over 4784.00 frames.], tot_loss[loss=0.1343, simple_loss=0.208, pruned_loss=0.03033, over 972868.66 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 00:57:40,318 INFO [train.py:715] (7/8) Epoch 17, batch 14200, loss[loss=0.1303, simple_loss=0.2068, pruned_loss=0.02685, over 4883.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02987, over 973457.17 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 00:58:19,833 INFO [train.py:715] (7/8) Epoch 17, batch 14250, loss[loss=0.1216, simple_loss=0.2006, pruned_loss=0.02133, over 4820.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02957, over 973256.45 frames.], batch size: 27, lr: 1.30e-04 +2022-05-09 00:58:59,000 INFO [train.py:715] (7/8) Epoch 17, batch 14300, loss[loss=0.1322, simple_loss=0.2088, pruned_loss=0.02778, over 4831.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02957, over 972657.51 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 00:59:38,850 INFO [train.py:715] (7/8) Epoch 17, batch 14350, loss[loss=0.122, simple_loss=0.2055, pruned_loss=0.01922, over 4742.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02936, over 972661.81 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:00:17,888 INFO [train.py:715] (7/8) Epoch 17, batch 14400, loss[loss=0.1436, simple_loss=0.2195, pruned_loss=0.03383, over 4870.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02949, over 972217.37 frames.], batch size: 38, lr: 1.30e-04 +2022-05-09 01:00:56,579 INFO [train.py:715] (7/8) Epoch 17, batch 14450, loss[loss=0.1273, simple_loss=0.204, pruned_loss=0.0253, over 4913.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02948, over 972142.15 frames.], batch size: 23, lr: 1.30e-04 +2022-05-09 01:01:36,313 INFO [train.py:715] (7/8) Epoch 17, batch 14500, loss[loss=0.137, simple_loss=0.2215, pruned_loss=0.02623, over 4905.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2089, pruned_loss=0.02973, over 972169.66 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 01:02:15,672 INFO [train.py:715] (7/8) Epoch 17, batch 14550, loss[loss=0.1455, simple_loss=0.2303, pruned_loss=0.03037, over 4804.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2085, pruned_loss=0.02964, over 972326.60 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 01:02:54,149 INFO [train.py:715] (7/8) Epoch 17, batch 14600, loss[loss=0.1211, simple_loss=0.1967, pruned_loss=0.02275, over 4969.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2083, pruned_loss=0.02951, over 972358.61 frames.], batch size: 28, lr: 1.30e-04 +2022-05-09 01:03:33,787 INFO [train.py:715] (7/8) Epoch 17, batch 14650, loss[loss=0.1081, simple_loss=0.1793, pruned_loss=0.01847, over 4823.00 frames.], tot_loss[loss=0.1333, simple_loss=0.208, pruned_loss=0.02934, over 971634.11 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:04:13,439 INFO [train.py:715] (7/8) Epoch 17, batch 14700, loss[loss=0.1279, simple_loss=0.202, pruned_loss=0.02685, over 4812.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02902, over 972117.33 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 01:04:52,651 INFO [train.py:715] (7/8) Epoch 17, batch 14750, loss[loss=0.1168, simple_loss=0.1883, pruned_loss=0.02265, over 4990.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02878, over 971565.75 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 01:05:31,537 INFO [train.py:715] (7/8) Epoch 17, batch 14800, loss[loss=0.1184, simple_loss=0.2002, pruned_loss=0.01835, over 4797.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02855, over 971676.83 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:06:11,604 INFO [train.py:715] (7/8) Epoch 17, batch 14850, loss[loss=0.1228, simple_loss=0.1984, pruned_loss=0.02354, over 4883.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02856, over 971868.39 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:06:50,386 INFO [train.py:715] (7/8) Epoch 17, batch 14900, loss[loss=0.1576, simple_loss=0.2321, pruned_loss=0.0415, over 4931.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02866, over 973085.30 frames.], batch size: 39, lr: 1.30e-04 +2022-05-09 01:07:29,331 INFO [train.py:715] (7/8) Epoch 17, batch 14950, loss[loss=0.1182, simple_loss=0.1954, pruned_loss=0.02048, over 4878.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2065, pruned_loss=0.02845, over 972273.98 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 01:08:09,015 INFO [train.py:715] (7/8) Epoch 17, batch 15000, loss[loss=0.12, simple_loss=0.2031, pruned_loss=0.01845, over 4761.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02872, over 971055.95 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:08:09,016 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 01:08:19,082 INFO [train.py:742] (7/8) Epoch 17, validation: loss=0.1046, simple_loss=0.1881, pruned_loss=0.01059, over 914524.00 frames. +2022-05-09 01:08:59,146 INFO [train.py:715] (7/8) Epoch 17, batch 15050, loss[loss=0.1335, simple_loss=0.205, pruned_loss=0.03099, over 4939.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02883, over 971441.74 frames.], batch size: 23, lr: 1.30e-04 +2022-05-09 01:09:38,652 INFO [train.py:715] (7/8) Epoch 17, batch 15100, loss[loss=0.1466, simple_loss=0.2214, pruned_loss=0.0359, over 4920.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02902, over 971806.93 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 01:10:17,575 INFO [train.py:715] (7/8) Epoch 17, batch 15150, loss[loss=0.148, simple_loss=0.2307, pruned_loss=0.03271, over 4868.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02933, over 971732.12 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:10:56,611 INFO [train.py:715] (7/8) Epoch 17, batch 15200, loss[loss=0.1128, simple_loss=0.1844, pruned_loss=0.02059, over 4835.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02926, over 971961.09 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:11:36,236 INFO [train.py:715] (7/8) Epoch 17, batch 15250, loss[loss=0.1244, simple_loss=0.2052, pruned_loss=0.0218, over 4948.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02966, over 971962.64 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:12:15,603 INFO [train.py:715] (7/8) Epoch 17, batch 15300, loss[loss=0.1459, simple_loss=0.2146, pruned_loss=0.0386, over 4747.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02955, over 971764.83 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:12:53,857 INFO [train.py:715] (7/8) Epoch 17, batch 15350, loss[loss=0.1446, simple_loss=0.2143, pruned_loss=0.03744, over 4984.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02947, over 971929.29 frames.], batch size: 28, lr: 1.30e-04 +2022-05-09 01:13:33,405 INFO [train.py:715] (7/8) Epoch 17, batch 15400, loss[loss=0.1344, simple_loss=0.2084, pruned_loss=0.0302, over 4836.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02925, over 971950.23 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 01:14:12,478 INFO [train.py:715] (7/8) Epoch 17, batch 15450, loss[loss=0.1225, simple_loss=0.1959, pruned_loss=0.02457, over 4690.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02943, over 972909.32 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:14:51,820 INFO [train.py:715] (7/8) Epoch 17, batch 15500, loss[loss=0.1415, simple_loss=0.208, pruned_loss=0.03753, over 4828.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02955, over 972761.10 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:15:30,652 INFO [train.py:715] (7/8) Epoch 17, batch 15550, loss[loss=0.1273, simple_loss=0.1997, pruned_loss=0.0274, over 4967.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02967, over 972366.28 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 01:16:10,372 INFO [train.py:715] (7/8) Epoch 17, batch 15600, loss[loss=0.1356, simple_loss=0.2154, pruned_loss=0.02793, over 4988.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02946, over 972657.63 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 01:16:49,789 INFO [train.py:715] (7/8) Epoch 17, batch 15650, loss[loss=0.147, simple_loss=0.21, pruned_loss=0.04206, over 4975.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02925, over 972456.12 frames.], batch size: 35, lr: 1.30e-04 +2022-05-09 01:17:27,917 INFO [train.py:715] (7/8) Epoch 17, batch 15700, loss[loss=0.1427, simple_loss=0.2003, pruned_loss=0.04251, over 4772.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02957, over 971721.13 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 01:18:07,730 INFO [train.py:715] (7/8) Epoch 17, batch 15750, loss[loss=0.09742, simple_loss=0.1659, pruned_loss=0.01449, over 4782.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02923, over 971627.56 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 01:18:47,135 INFO [train.py:715] (7/8) Epoch 17, batch 15800, loss[loss=0.1174, simple_loss=0.1947, pruned_loss=0.02004, over 4993.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02961, over 971597.45 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 01:19:26,082 INFO [train.py:715] (7/8) Epoch 17, batch 15850, loss[loss=0.123, simple_loss=0.1968, pruned_loss=0.02463, over 4762.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2084, pruned_loss=0.02996, over 972029.17 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:20:04,719 INFO [train.py:715] (7/8) Epoch 17, batch 15900, loss[loss=0.1381, simple_loss=0.236, pruned_loss=0.02004, over 4866.00 frames.], tot_loss[loss=0.134, simple_loss=0.2085, pruned_loss=0.02977, over 972815.28 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 01:20:44,129 INFO [train.py:715] (7/8) Epoch 17, batch 15950, loss[loss=0.1372, simple_loss=0.2157, pruned_loss=0.02934, over 4983.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02957, over 972459.68 frames.], batch size: 28, lr: 1.30e-04 +2022-05-09 01:21:23,628 INFO [train.py:715] (7/8) Epoch 17, batch 16000, loss[loss=0.1334, simple_loss=0.2092, pruned_loss=0.02885, over 4807.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02951, over 972259.09 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 01:22:02,017 INFO [train.py:715] (7/8) Epoch 17, batch 16050, loss[loss=0.1263, simple_loss=0.2049, pruned_loss=0.02388, over 4804.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02903, over 971988.50 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 01:22:42,060 INFO [train.py:715] (7/8) Epoch 17, batch 16100, loss[loss=0.1776, simple_loss=0.2514, pruned_loss=0.05189, over 4827.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2083, pruned_loss=0.02968, over 971925.74 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 01:23:21,961 INFO [train.py:715] (7/8) Epoch 17, batch 16150, loss[loss=0.1076, simple_loss=0.1876, pruned_loss=0.01382, over 4964.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02949, over 972110.34 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 01:24:01,721 INFO [train.py:715] (7/8) Epoch 17, batch 16200, loss[loss=0.1436, simple_loss=0.2253, pruned_loss=0.0309, over 4741.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02904, over 972434.57 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:24:43,129 INFO [train.py:715] (7/8) Epoch 17, batch 16250, loss[loss=0.1205, simple_loss=0.1995, pruned_loss=0.02075, over 4772.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02896, over 972373.27 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 01:25:23,141 INFO [train.py:715] (7/8) Epoch 17, batch 16300, loss[loss=0.1264, simple_loss=0.1999, pruned_loss=0.02645, over 4811.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02841, over 970850.36 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:26:02,219 INFO [train.py:715] (7/8) Epoch 17, batch 16350, loss[loss=0.1209, simple_loss=0.1938, pruned_loss=0.02395, over 4832.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02842, over 971514.29 frames.], batch size: 30, lr: 1.30e-04 +2022-05-09 01:26:40,872 INFO [train.py:715] (7/8) Epoch 17, batch 16400, loss[loss=0.1597, simple_loss=0.2409, pruned_loss=0.03927, over 4842.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.0289, over 971528.76 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:27:20,588 INFO [train.py:715] (7/8) Epoch 17, batch 16450, loss[loss=0.1305, simple_loss=0.2077, pruned_loss=0.0267, over 4913.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02916, over 971369.66 frames.], batch size: 23, lr: 1.30e-04 +2022-05-09 01:28:00,551 INFO [train.py:715] (7/8) Epoch 17, batch 16500, loss[loss=0.1442, simple_loss=0.2057, pruned_loss=0.04133, over 4856.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02917, over 972266.12 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 01:28:39,573 INFO [train.py:715] (7/8) Epoch 17, batch 16550, loss[loss=0.1588, simple_loss=0.2272, pruned_loss=0.04523, over 4694.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.0293, over 972157.88 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:29:18,075 INFO [train.py:715] (7/8) Epoch 17, batch 16600, loss[loss=0.1398, simple_loss=0.207, pruned_loss=0.03631, over 4706.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.0293, over 973059.64 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:29:58,264 INFO [train.py:715] (7/8) Epoch 17, batch 16650, loss[loss=0.1196, simple_loss=0.1876, pruned_loss=0.02577, over 4752.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02951, over 971965.84 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 01:30:38,047 INFO [train.py:715] (7/8) Epoch 17, batch 16700, loss[loss=0.1259, simple_loss=0.1964, pruned_loss=0.02767, over 4959.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.02925, over 972754.88 frames.], batch size: 39, lr: 1.30e-04 +2022-05-09 01:31:16,491 INFO [train.py:715] (7/8) Epoch 17, batch 16750, loss[loss=0.1709, simple_loss=0.2501, pruned_loss=0.0458, over 4976.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02959, over 973172.38 frames.], batch size: 28, lr: 1.30e-04 +2022-05-09 01:31:56,310 INFO [train.py:715] (7/8) Epoch 17, batch 16800, loss[loss=0.1151, simple_loss=0.1818, pruned_loss=0.02423, over 4861.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2076, pruned_loss=0.02972, over 974102.78 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 01:32:35,733 INFO [train.py:715] (7/8) Epoch 17, batch 16850, loss[loss=0.1108, simple_loss=0.1836, pruned_loss=0.01899, over 4935.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02939, over 973887.63 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:33:15,643 INFO [train.py:715] (7/8) Epoch 17, batch 16900, loss[loss=0.1281, simple_loss=0.2111, pruned_loss=0.02253, over 4968.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02926, over 974352.03 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 01:33:53,866 INFO [train.py:715] (7/8) Epoch 17, batch 16950, loss[loss=0.1257, simple_loss=0.2059, pruned_loss=0.02275, over 4789.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02917, over 974706.63 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 01:34:33,425 INFO [train.py:715] (7/8) Epoch 17, batch 17000, loss[loss=0.1457, simple_loss=0.2219, pruned_loss=0.03471, over 4777.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02967, over 974617.31 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 01:35:12,912 INFO [train.py:715] (7/8) Epoch 17, batch 17050, loss[loss=0.1342, simple_loss=0.2119, pruned_loss=0.02828, over 4881.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02957, over 973706.39 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:35:51,175 INFO [train.py:715] (7/8) Epoch 17, batch 17100, loss[loss=0.1482, simple_loss=0.2241, pruned_loss=0.03611, over 4854.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02938, over 973646.28 frames.], batch size: 32, lr: 1.30e-04 +2022-05-09 01:36:30,682 INFO [train.py:715] (7/8) Epoch 17, batch 17150, loss[loss=0.1608, simple_loss=0.2292, pruned_loss=0.04621, over 4899.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02925, over 972926.02 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 01:37:10,052 INFO [train.py:715] (7/8) Epoch 17, batch 17200, loss[loss=0.1173, simple_loss=0.183, pruned_loss=0.02578, over 4755.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02922, over 973389.15 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:37:48,553 INFO [train.py:715] (7/8) Epoch 17, batch 17250, loss[loss=0.1521, simple_loss=0.2316, pruned_loss=0.0363, over 4910.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.02932, over 972789.53 frames.], batch size: 38, lr: 1.30e-04 +2022-05-09 01:38:26,822 INFO [train.py:715] (7/8) Epoch 17, batch 17300, loss[loss=0.1341, simple_loss=0.2097, pruned_loss=0.02924, over 4937.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2065, pruned_loss=0.02949, over 972429.64 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:39:06,136 INFO [train.py:715] (7/8) Epoch 17, batch 17350, loss[loss=0.1268, simple_loss=0.206, pruned_loss=0.02381, over 4791.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2065, pruned_loss=0.02936, over 972044.43 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 01:39:45,337 INFO [train.py:715] (7/8) Epoch 17, batch 17400, loss[loss=0.1496, simple_loss=0.2243, pruned_loss=0.03742, over 4753.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02886, over 972617.09 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 01:40:23,321 INFO [train.py:715] (7/8) Epoch 17, batch 17450, loss[loss=0.1212, simple_loss=0.191, pruned_loss=0.02573, over 4768.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02917, over 972364.05 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:41:03,010 INFO [train.py:715] (7/8) Epoch 17, batch 17500, loss[loss=0.1273, simple_loss=0.1971, pruned_loss=0.02877, over 4994.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02898, over 972978.88 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 01:41:42,135 INFO [train.py:715] (7/8) Epoch 17, batch 17550, loss[loss=0.1084, simple_loss=0.1912, pruned_loss=0.01283, over 4983.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02968, over 972764.33 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 01:42:20,890 INFO [train.py:715] (7/8) Epoch 17, batch 17600, loss[loss=0.1888, simple_loss=0.2678, pruned_loss=0.05494, over 4824.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2089, pruned_loss=0.03006, over 972254.49 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:42:59,401 INFO [train.py:715] (7/8) Epoch 17, batch 17650, loss[loss=0.1259, simple_loss=0.1868, pruned_loss=0.03251, over 4854.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2091, pruned_loss=0.0302, over 971918.39 frames.], batch size: 30, lr: 1.30e-04 +2022-05-09 01:43:38,883 INFO [train.py:715] (7/8) Epoch 17, batch 17700, loss[loss=0.1407, simple_loss=0.2247, pruned_loss=0.02831, over 4863.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02993, over 971193.06 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 01:44:17,600 INFO [train.py:715] (7/8) Epoch 17, batch 17750, loss[loss=0.1027, simple_loss=0.1677, pruned_loss=0.01885, over 4876.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2075, pruned_loss=0.03001, over 972251.19 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 01:44:56,092 INFO [train.py:715] (7/8) Epoch 17, batch 17800, loss[loss=0.1236, simple_loss=0.1915, pruned_loss=0.02787, over 4978.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02937, over 972440.90 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 01:45:35,676 INFO [train.py:715] (7/8) Epoch 17, batch 17850, loss[loss=0.1394, simple_loss=0.2118, pruned_loss=0.03352, over 4850.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.02966, over 972758.19 frames.], batch size: 32, lr: 1.30e-04 +2022-05-09 01:46:14,672 INFO [train.py:715] (7/8) Epoch 17, batch 17900, loss[loss=0.1163, simple_loss=0.1721, pruned_loss=0.03027, over 4853.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02965, over 972660.94 frames.], batch size: 12, lr: 1.30e-04 +2022-05-09 01:46:54,014 INFO [train.py:715] (7/8) Epoch 17, batch 17950, loss[loss=0.163, simple_loss=0.2309, pruned_loss=0.04752, over 4783.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02954, over 971757.97 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 01:47:32,275 INFO [train.py:715] (7/8) Epoch 17, batch 18000, loss[loss=0.1263, simple_loss=0.1974, pruned_loss=0.02762, over 4989.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02962, over 972317.65 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 01:47:32,276 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 01:47:42,062 INFO [train.py:742] (7/8) Epoch 17, validation: loss=0.1047, simple_loss=0.1881, pruned_loss=0.01066, over 914524.00 frames. +2022-05-09 01:48:20,791 INFO [train.py:715] (7/8) Epoch 17, batch 18050, loss[loss=0.1296, simple_loss=0.1943, pruned_loss=0.03247, over 4837.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2087, pruned_loss=0.03038, over 971801.30 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:49:00,412 INFO [train.py:715] (7/8) Epoch 17, batch 18100, loss[loss=0.1217, simple_loss=0.1867, pruned_loss=0.02831, over 4933.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2081, pruned_loss=0.0299, over 972234.94 frames.], batch size: 23, lr: 1.30e-04 +2022-05-09 01:49:39,799 INFO [train.py:715] (7/8) Epoch 17, batch 18150, loss[loss=0.1698, simple_loss=0.2478, pruned_loss=0.04588, over 4951.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2086, pruned_loss=0.02991, over 973295.85 frames.], batch size: 39, lr: 1.30e-04 +2022-05-09 01:50:17,779 INFO [train.py:715] (7/8) Epoch 17, batch 18200, loss[loss=0.1228, simple_loss=0.1901, pruned_loss=0.02774, over 4818.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02958, over 973519.95 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 01:50:57,530 INFO [train.py:715] (7/8) Epoch 17, batch 18250, loss[loss=0.1303, simple_loss=0.2203, pruned_loss=0.02012, over 4879.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2078, pruned_loss=0.0294, over 973411.75 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 01:51:37,060 INFO [train.py:715] (7/8) Epoch 17, batch 18300, loss[loss=0.1226, simple_loss=0.1972, pruned_loss=0.024, over 4977.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02883, over 973325.99 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:52:15,575 INFO [train.py:715] (7/8) Epoch 17, batch 18350, loss[loss=0.1266, simple_loss=0.2071, pruned_loss=0.02308, over 4653.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.0289, over 973111.18 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:52:55,004 INFO [train.py:715] (7/8) Epoch 17, batch 18400, loss[loss=0.1651, simple_loss=0.2316, pruned_loss=0.04929, over 4786.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2079, pruned_loss=0.02932, over 972895.89 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 01:53:33,900 INFO [train.py:715] (7/8) Epoch 17, batch 18450, loss[loss=0.1297, simple_loss=0.203, pruned_loss=0.02817, over 4974.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02915, over 972247.24 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 01:54:13,086 INFO [train.py:715] (7/8) Epoch 17, batch 18500, loss[loss=0.1469, simple_loss=0.2213, pruned_loss=0.03629, over 4641.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02905, over 971334.36 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:54:51,415 INFO [train.py:715] (7/8) Epoch 17, batch 18550, loss[loss=0.1305, simple_loss=0.2108, pruned_loss=0.02508, over 4855.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2078, pruned_loss=0.02902, over 971392.36 frames.], batch size: 32, lr: 1.30e-04 +2022-05-09 01:55:30,375 INFO [train.py:715] (7/8) Epoch 17, batch 18600, loss[loss=0.1329, simple_loss=0.2137, pruned_loss=0.0261, over 4815.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2074, pruned_loss=0.02854, over 971904.17 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 01:56:09,533 INFO [train.py:715] (7/8) Epoch 17, batch 18650, loss[loss=0.1402, simple_loss=0.2119, pruned_loss=0.03418, over 4936.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2079, pruned_loss=0.02933, over 972226.66 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 01:56:47,377 INFO [train.py:715] (7/8) Epoch 17, batch 18700, loss[loss=0.1273, simple_loss=0.2029, pruned_loss=0.02587, over 4818.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2083, pruned_loss=0.02941, over 971484.33 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 01:57:27,056 INFO [train.py:715] (7/8) Epoch 17, batch 18750, loss[loss=0.1387, simple_loss=0.2012, pruned_loss=0.03804, over 4864.00 frames.], tot_loss[loss=0.1333, simple_loss=0.208, pruned_loss=0.02932, over 970943.50 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 01:58:06,644 INFO [train.py:715] (7/8) Epoch 17, batch 18800, loss[loss=0.1657, simple_loss=0.248, pruned_loss=0.04171, over 4944.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2084, pruned_loss=0.02965, over 972051.20 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 01:58:45,348 INFO [train.py:715] (7/8) Epoch 17, batch 18850, loss[loss=0.1511, simple_loss=0.226, pruned_loss=0.03813, over 4827.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2085, pruned_loss=0.02952, over 972428.77 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 01:59:23,454 INFO [train.py:715] (7/8) Epoch 17, batch 18900, loss[loss=0.115, simple_loss=0.1892, pruned_loss=0.02045, over 4787.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2086, pruned_loss=0.02953, over 972789.75 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:00:02,551 INFO [train.py:715] (7/8) Epoch 17, batch 18950, loss[loss=0.1359, simple_loss=0.2085, pruned_loss=0.03161, over 4851.00 frames.], tot_loss[loss=0.133, simple_loss=0.2076, pruned_loss=0.02916, over 973177.85 frames.], batch size: 30, lr: 1.30e-04 +2022-05-09 02:00:41,834 INFO [train.py:715] (7/8) Epoch 17, batch 19000, loss[loss=0.1331, simple_loss=0.2094, pruned_loss=0.02844, over 4954.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02944, over 973354.77 frames.], batch size: 39, lr: 1.30e-04 +2022-05-09 02:01:20,328 INFO [train.py:715] (7/8) Epoch 17, batch 19050, loss[loss=0.1086, simple_loss=0.184, pruned_loss=0.0166, over 4937.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02969, over 973480.22 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 02:01:59,757 INFO [train.py:715] (7/8) Epoch 17, batch 19100, loss[loss=0.1079, simple_loss=0.1837, pruned_loss=0.01604, over 4916.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02956, over 973527.45 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 02:02:38,888 INFO [train.py:715] (7/8) Epoch 17, batch 19150, loss[loss=0.1497, simple_loss=0.2122, pruned_loss=0.04364, over 4786.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02952, over 973688.86 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 02:03:17,329 INFO [train.py:715] (7/8) Epoch 17, batch 19200, loss[loss=0.1288, simple_loss=0.2084, pruned_loss=0.02464, over 4927.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02918, over 973826.13 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 02:03:56,165 INFO [train.py:715] (7/8) Epoch 17, batch 19250, loss[loss=0.1352, simple_loss=0.2108, pruned_loss=0.02981, over 4886.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02879, over 973081.04 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 02:04:35,740 INFO [train.py:715] (7/8) Epoch 17, batch 19300, loss[loss=0.1226, simple_loss=0.1915, pruned_loss=0.02682, over 4847.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2072, pruned_loss=0.02894, over 972109.62 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 02:05:15,462 INFO [train.py:715] (7/8) Epoch 17, batch 19350, loss[loss=0.1489, simple_loss=0.2277, pruned_loss=0.03508, over 4794.00 frames.], tot_loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.02885, over 972397.12 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 02:05:54,629 INFO [train.py:715] (7/8) Epoch 17, batch 19400, loss[loss=0.1394, simple_loss=0.2313, pruned_loss=0.02369, over 4888.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02855, over 972971.59 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 02:06:34,195 INFO [train.py:715] (7/8) Epoch 17, batch 19450, loss[loss=0.1189, simple_loss=0.1993, pruned_loss=0.01922, over 4935.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02877, over 972813.28 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 02:07:13,758 INFO [train.py:715] (7/8) Epoch 17, batch 19500, loss[loss=0.1139, simple_loss=0.1924, pruned_loss=0.01772, over 4992.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02821, over 972333.79 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 02:07:53,346 INFO [train.py:715] (7/8) Epoch 17, batch 19550, loss[loss=0.1175, simple_loss=0.1863, pruned_loss=0.02433, over 4793.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2059, pruned_loss=0.02827, over 972742.04 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 02:08:31,625 INFO [train.py:715] (7/8) Epoch 17, batch 19600, loss[loss=0.1032, simple_loss=0.184, pruned_loss=0.01119, over 4968.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02866, over 973285.61 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:09:11,588 INFO [train.py:715] (7/8) Epoch 17, batch 19650, loss[loss=0.1244, simple_loss=0.1975, pruned_loss=0.02566, over 4847.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02926, over 973070.75 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 02:09:51,452 INFO [train.py:715] (7/8) Epoch 17, batch 19700, loss[loss=0.1266, simple_loss=0.2096, pruned_loss=0.02178, over 4981.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2083, pruned_loss=0.03006, over 972958.02 frames.], batch size: 39, lr: 1.30e-04 +2022-05-09 02:10:30,062 INFO [train.py:715] (7/8) Epoch 17, batch 19750, loss[loss=0.1183, simple_loss=0.1855, pruned_loss=0.02554, over 4833.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2085, pruned_loss=0.02945, over 972646.27 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:11:09,369 INFO [train.py:715] (7/8) Epoch 17, batch 19800, loss[loss=0.1227, simple_loss=0.1987, pruned_loss=0.02333, over 4888.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2084, pruned_loss=0.02945, over 972425.63 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 02:11:47,963 INFO [train.py:715] (7/8) Epoch 17, batch 19850, loss[loss=0.1448, simple_loss=0.2246, pruned_loss=0.03248, over 4896.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2074, pruned_loss=0.02892, over 972849.43 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 02:12:26,933 INFO [train.py:715] (7/8) Epoch 17, batch 19900, loss[loss=0.1237, simple_loss=0.1928, pruned_loss=0.02731, over 4938.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2077, pruned_loss=0.02908, over 973030.69 frames.], batch size: 35, lr: 1.30e-04 +2022-05-09 02:13:05,191 INFO [train.py:715] (7/8) Epoch 17, batch 19950, loss[loss=0.1183, simple_loss=0.1969, pruned_loss=0.01989, over 4800.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2085, pruned_loss=0.02945, over 972636.14 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 02:13:44,430 INFO [train.py:715] (7/8) Epoch 17, batch 20000, loss[loss=0.1186, simple_loss=0.2059, pruned_loss=0.01561, over 4886.00 frames.], tot_loss[loss=0.133, simple_loss=0.208, pruned_loss=0.02904, over 973510.89 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 02:14:24,055 INFO [train.py:715] (7/8) Epoch 17, batch 20050, loss[loss=0.1301, simple_loss=0.2089, pruned_loss=0.02563, over 4776.00 frames.], tot_loss[loss=0.134, simple_loss=0.2087, pruned_loss=0.0297, over 973308.84 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:15:03,204 INFO [train.py:715] (7/8) Epoch 17, batch 20100, loss[loss=0.124, simple_loss=0.1991, pruned_loss=0.02441, over 4989.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2084, pruned_loss=0.02933, over 973084.39 frames.], batch size: 31, lr: 1.30e-04 +2022-05-09 02:15:42,014 INFO [train.py:715] (7/8) Epoch 17, batch 20150, loss[loss=0.1359, simple_loss=0.2011, pruned_loss=0.03535, over 4936.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2079, pruned_loss=0.02964, over 973799.92 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 02:16:22,284 INFO [train.py:715] (7/8) Epoch 17, batch 20200, loss[loss=0.1194, simple_loss=0.1884, pruned_loss=0.02518, over 4933.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.0295, over 974363.99 frames.], batch size: 23, lr: 1.30e-04 +2022-05-09 02:17:02,699 INFO [train.py:715] (7/8) Epoch 17, batch 20250, loss[loss=0.145, simple_loss=0.213, pruned_loss=0.03852, over 4887.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02958, over 973981.56 frames.], batch size: 22, lr: 1.30e-04 +2022-05-09 02:17:40,778 INFO [train.py:715] (7/8) Epoch 17, batch 20300, loss[loss=0.1115, simple_loss=0.19, pruned_loss=0.01655, over 4763.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02937, over 973446.97 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 02:18:20,508 INFO [train.py:715] (7/8) Epoch 17, batch 20350, loss[loss=0.1176, simple_loss=0.189, pruned_loss=0.02311, over 4979.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02954, over 972543.17 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 02:19:00,637 INFO [train.py:715] (7/8) Epoch 17, batch 20400, loss[loss=0.1017, simple_loss=0.1746, pruned_loss=0.01441, over 4829.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02944, over 971951.26 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 02:19:39,225 INFO [train.py:715] (7/8) Epoch 17, batch 20450, loss[loss=0.1216, simple_loss=0.1938, pruned_loss=0.02467, over 4817.00 frames.], tot_loss[loss=0.1333, simple_loss=0.208, pruned_loss=0.02933, over 971723.35 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 02:20:17,925 INFO [train.py:715] (7/8) Epoch 17, batch 20500, loss[loss=0.1241, simple_loss=0.2056, pruned_loss=0.02129, over 4989.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2081, pruned_loss=0.02936, over 971892.72 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 02:20:57,778 INFO [train.py:715] (7/8) Epoch 17, batch 20550, loss[loss=0.1487, simple_loss=0.2301, pruned_loss=0.03368, over 4909.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2082, pruned_loss=0.02919, over 972661.72 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 02:21:36,916 INFO [train.py:715] (7/8) Epoch 17, batch 20600, loss[loss=0.1412, simple_loss=0.2172, pruned_loss=0.03262, over 4915.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2073, pruned_loss=0.02866, over 972240.32 frames.], batch size: 39, lr: 1.30e-04 +2022-05-09 02:22:15,103 INFO [train.py:715] (7/8) Epoch 17, batch 20650, loss[loss=0.1382, simple_loss=0.2147, pruned_loss=0.0308, over 4823.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2072, pruned_loss=0.02891, over 972475.57 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 02:22:54,073 INFO [train.py:715] (7/8) Epoch 17, batch 20700, loss[loss=0.09831, simple_loss=0.1735, pruned_loss=0.01153, over 4939.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02875, over 972598.64 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 02:23:33,732 INFO [train.py:715] (7/8) Epoch 17, batch 20750, loss[loss=0.1412, simple_loss=0.2282, pruned_loss=0.02708, over 4861.00 frames.], tot_loss[loss=0.133, simple_loss=0.2078, pruned_loss=0.02909, over 971661.83 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 02:24:12,681 INFO [train.py:715] (7/8) Epoch 17, batch 20800, loss[loss=0.1359, simple_loss=0.209, pruned_loss=0.03138, over 4899.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02858, over 971417.12 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:24:51,254 INFO [train.py:715] (7/8) Epoch 17, batch 20850, loss[loss=0.1273, simple_loss=0.2007, pruned_loss=0.0269, over 4689.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02885, over 971186.16 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:25:30,265 INFO [train.py:715] (7/8) Epoch 17, batch 20900, loss[loss=0.1215, simple_loss=0.188, pruned_loss=0.02754, over 4863.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02916, over 971590.81 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 02:26:10,248 INFO [train.py:715] (7/8) Epoch 17, batch 20950, loss[loss=0.1236, simple_loss=0.2008, pruned_loss=0.02325, over 4934.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02931, over 971629.90 frames.], batch size: 23, lr: 1.30e-04 +2022-05-09 02:26:48,268 INFO [train.py:715] (7/8) Epoch 17, batch 21000, loss[loss=0.1347, simple_loss=0.2176, pruned_loss=0.02595, over 4927.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02969, over 971368.57 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 02:26:48,268 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 02:27:00,911 INFO [train.py:742] (7/8) Epoch 17, validation: loss=0.1049, simple_loss=0.1882, pruned_loss=0.01077, over 914524.00 frames. +2022-05-09 02:27:38,929 INFO [train.py:715] (7/8) Epoch 17, batch 21050, loss[loss=0.1371, simple_loss=0.2108, pruned_loss=0.03175, over 4982.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2083, pruned_loss=0.02992, over 971633.73 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 02:28:18,321 INFO [train.py:715] (7/8) Epoch 17, batch 21100, loss[loss=0.1707, simple_loss=0.2423, pruned_loss=0.04955, over 4953.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2078, pruned_loss=0.02963, over 971670.48 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:28:58,370 INFO [train.py:715] (7/8) Epoch 17, batch 21150, loss[loss=0.132, simple_loss=0.2092, pruned_loss=0.02744, over 4832.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.0296, over 972095.30 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:29:37,029 INFO [train.py:715] (7/8) Epoch 17, batch 21200, loss[loss=0.1168, simple_loss=0.1875, pruned_loss=0.02308, over 4977.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.0298, over 972188.92 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 02:30:15,713 INFO [train.py:715] (7/8) Epoch 17, batch 21250, loss[loss=0.1172, simple_loss=0.1855, pruned_loss=0.02444, over 4688.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02953, over 971784.81 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:30:55,578 INFO [train.py:715] (7/8) Epoch 17, batch 21300, loss[loss=0.1665, simple_loss=0.2322, pruned_loss=0.05041, over 4767.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02924, over 971383.38 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 02:31:35,367 INFO [train.py:715] (7/8) Epoch 17, batch 21350, loss[loss=0.1384, simple_loss=0.2023, pruned_loss=0.03728, over 4773.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02949, over 971361.26 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 02:32:13,592 INFO [train.py:715] (7/8) Epoch 17, batch 21400, loss[loss=0.15, simple_loss=0.2347, pruned_loss=0.03268, over 4902.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02936, over 971273.08 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 02:32:53,761 INFO [train.py:715] (7/8) Epoch 17, batch 21450, loss[loss=0.1623, simple_loss=0.2327, pruned_loss=0.04596, over 4780.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02925, over 971397.94 frames.], batch size: 14, lr: 1.30e-04 +2022-05-09 02:33:33,551 INFO [train.py:715] (7/8) Epoch 17, batch 21500, loss[loss=0.1159, simple_loss=0.1965, pruned_loss=0.0177, over 4972.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02909, over 972177.73 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 02:34:12,046 INFO [train.py:715] (7/8) Epoch 17, batch 21550, loss[loss=0.1241, simple_loss=0.1937, pruned_loss=0.02727, over 4892.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02934, over 972453.61 frames.], batch size: 19, lr: 1.30e-04 +2022-05-09 02:34:51,494 INFO [train.py:715] (7/8) Epoch 17, batch 21600, loss[loss=0.1365, simple_loss=0.2079, pruned_loss=0.0326, over 4924.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02952, over 972481.79 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 02:35:31,960 INFO [train.py:715] (7/8) Epoch 17, batch 21650, loss[loss=0.1211, simple_loss=0.1952, pruned_loss=0.02349, over 4984.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02924, over 972267.81 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 02:36:11,049 INFO [train.py:715] (7/8) Epoch 17, batch 21700, loss[loss=0.1265, simple_loss=0.1973, pruned_loss=0.02788, over 4881.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02939, over 972290.38 frames.], batch size: 32, lr: 1.30e-04 +2022-05-09 02:36:49,699 INFO [train.py:715] (7/8) Epoch 17, batch 21750, loss[loss=0.1355, simple_loss=0.2105, pruned_loss=0.03026, over 4960.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2071, pruned_loss=0.02885, over 972450.75 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 02:37:29,249 INFO [train.py:715] (7/8) Epoch 17, batch 21800, loss[loss=0.1333, simple_loss=0.2102, pruned_loss=0.02821, over 4848.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02909, over 972337.89 frames.], batch size: 30, lr: 1.30e-04 +2022-05-09 02:38:08,213 INFO [train.py:715] (7/8) Epoch 17, batch 21850, loss[loss=0.1218, simple_loss=0.1937, pruned_loss=0.02502, over 4850.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02938, over 972414.18 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 02:38:47,461 INFO [train.py:715] (7/8) Epoch 17, batch 21900, loss[loss=0.128, simple_loss=0.2043, pruned_loss=0.02588, over 4780.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02955, over 971737.91 frames.], batch size: 18, lr: 1.30e-04 +2022-05-09 02:39:25,953 INFO [train.py:715] (7/8) Epoch 17, batch 21950, loss[loss=0.1451, simple_loss=0.2184, pruned_loss=0.03592, over 4827.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02963, over 972384.47 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:40:05,673 INFO [train.py:715] (7/8) Epoch 17, batch 22000, loss[loss=0.1191, simple_loss=0.1856, pruned_loss=0.02631, over 4644.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02911, over 972182.43 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 02:40:45,439 INFO [train.py:715] (7/8) Epoch 17, batch 22050, loss[loss=0.1061, simple_loss=0.187, pruned_loss=0.01264, over 4828.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02866, over 972001.85 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 02:41:23,869 INFO [train.py:715] (7/8) Epoch 17, batch 22100, loss[loss=0.1635, simple_loss=0.2299, pruned_loss=0.04854, over 4860.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02873, over 971430.09 frames.], batch size: 20, lr: 1.30e-04 +2022-05-09 02:42:03,598 INFO [train.py:715] (7/8) Epoch 17, batch 22150, loss[loss=0.1193, simple_loss=0.2032, pruned_loss=0.01772, over 4797.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.029, over 971602.64 frames.], batch size: 21, lr: 1.30e-04 +2022-05-09 02:42:43,496 INFO [train.py:715] (7/8) Epoch 17, batch 22200, loss[loss=0.1418, simple_loss=0.2116, pruned_loss=0.03599, over 4974.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02865, over 971532.22 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 02:43:22,390 INFO [train.py:715] (7/8) Epoch 17, batch 22250, loss[loss=0.1505, simple_loss=0.2213, pruned_loss=0.03985, over 4974.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02864, over 972709.04 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:44:01,343 INFO [train.py:715] (7/8) Epoch 17, batch 22300, loss[loss=0.1322, simple_loss=0.202, pruned_loss=0.03123, over 4815.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02879, over 972371.90 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 02:44:41,267 INFO [train.py:715] (7/8) Epoch 17, batch 22350, loss[loss=0.1265, simple_loss=0.2034, pruned_loss=0.0248, over 4918.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02889, over 972405.64 frames.], batch size: 23, lr: 1.30e-04 +2022-05-09 02:45:20,837 INFO [train.py:715] (7/8) Epoch 17, batch 22400, loss[loss=0.1554, simple_loss=0.2189, pruned_loss=0.046, over 4836.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02899, over 972839.38 frames.], batch size: 15, lr: 1.30e-04 +2022-05-09 02:45:59,652 INFO [train.py:715] (7/8) Epoch 17, batch 22450, loss[loss=0.1277, simple_loss=0.2062, pruned_loss=0.0246, over 4774.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02915, over 973063.97 frames.], batch size: 17, lr: 1.30e-04 +2022-05-09 02:46:38,626 INFO [train.py:715] (7/8) Epoch 17, batch 22500, loss[loss=0.129, simple_loss=0.2011, pruned_loss=0.02852, over 4835.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2075, pruned_loss=0.02909, over 972625.42 frames.], batch size: 26, lr: 1.30e-04 +2022-05-09 02:47:18,396 INFO [train.py:715] (7/8) Epoch 17, batch 22550, loss[loss=0.1371, simple_loss=0.1957, pruned_loss=0.03927, over 4820.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02876, over 972563.14 frames.], batch size: 13, lr: 1.30e-04 +2022-05-09 02:47:56,727 INFO [train.py:715] (7/8) Epoch 17, batch 22600, loss[loss=0.14, simple_loss=0.2289, pruned_loss=0.02554, over 4958.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.0289, over 972263.96 frames.], batch size: 24, lr: 1.30e-04 +2022-05-09 02:48:36,270 INFO [train.py:715] (7/8) Epoch 17, batch 22650, loss[loss=0.1309, simple_loss=0.2002, pruned_loss=0.03083, over 4931.00 frames.], tot_loss[loss=0.1321, simple_loss=0.207, pruned_loss=0.02862, over 972727.45 frames.], batch size: 29, lr: 1.30e-04 +2022-05-09 02:49:15,730 INFO [train.py:715] (7/8) Epoch 17, batch 22700, loss[loss=0.1388, simple_loss=0.2174, pruned_loss=0.03008, over 4826.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2083, pruned_loss=0.02917, over 972226.56 frames.], batch size: 25, lr: 1.30e-04 +2022-05-09 02:49:54,666 INFO [train.py:715] (7/8) Epoch 17, batch 22750, loss[loss=0.1265, simple_loss=0.2044, pruned_loss=0.02428, over 4931.00 frames.], tot_loss[loss=0.133, simple_loss=0.208, pruned_loss=0.02896, over 972398.11 frames.], batch size: 23, lr: 1.30e-04 +2022-05-09 02:50:33,048 INFO [train.py:715] (7/8) Epoch 17, batch 22800, loss[loss=0.1615, simple_loss=0.2384, pruned_loss=0.04236, over 4748.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2082, pruned_loss=0.02875, over 972042.87 frames.], batch size: 16, lr: 1.30e-04 +2022-05-09 02:51:12,439 INFO [train.py:715] (7/8) Epoch 17, batch 22850, loss[loss=0.1335, simple_loss=0.2024, pruned_loss=0.03232, over 4952.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2082, pruned_loss=0.02908, over 972007.30 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 02:51:52,340 INFO [train.py:715] (7/8) Epoch 17, batch 22900, loss[loss=0.1048, simple_loss=0.1803, pruned_loss=0.01465, over 4845.00 frames.], tot_loss[loss=0.133, simple_loss=0.2079, pruned_loss=0.0291, over 972077.82 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 02:52:30,191 INFO [train.py:715] (7/8) Epoch 17, batch 22950, loss[loss=0.1241, simple_loss=0.2024, pruned_loss=0.02294, over 4696.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2072, pruned_loss=0.02879, over 971930.50 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 02:53:10,089 INFO [train.py:715] (7/8) Epoch 17, batch 23000, loss[loss=0.1211, simple_loss=0.2, pruned_loss=0.02104, over 4934.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02919, over 971231.98 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 02:53:50,345 INFO [train.py:715] (7/8) Epoch 17, batch 23050, loss[loss=0.1225, simple_loss=0.1881, pruned_loss=0.02844, over 4662.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02902, over 970519.94 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 02:54:29,514 INFO [train.py:715] (7/8) Epoch 17, batch 23100, loss[loss=0.1461, simple_loss=0.217, pruned_loss=0.0376, over 4955.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02866, over 969686.15 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 02:55:07,925 INFO [train.py:715] (7/8) Epoch 17, batch 23150, loss[loss=0.1211, simple_loss=0.2059, pruned_loss=0.01816, over 4790.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02909, over 969837.32 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 02:55:47,706 INFO [train.py:715] (7/8) Epoch 17, batch 23200, loss[loss=0.127, simple_loss=0.2026, pruned_loss=0.02571, over 4889.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02947, over 970570.70 frames.], batch size: 22, lr: 1.29e-04 +2022-05-09 02:56:27,706 INFO [train.py:715] (7/8) Epoch 17, batch 23250, loss[loss=0.133, simple_loss=0.2063, pruned_loss=0.02984, over 4769.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02941, over 970289.77 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 02:57:05,639 INFO [train.py:715] (7/8) Epoch 17, batch 23300, loss[loss=0.1157, simple_loss=0.1954, pruned_loss=0.01799, over 4775.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02944, over 971425.33 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 02:57:44,995 INFO [train.py:715] (7/8) Epoch 17, batch 23350, loss[loss=0.1317, simple_loss=0.2035, pruned_loss=0.02992, over 4793.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02952, over 971482.38 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 02:58:25,091 INFO [train.py:715] (7/8) Epoch 17, batch 23400, loss[loss=0.1201, simple_loss=0.1965, pruned_loss=0.02185, over 4902.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2061, pruned_loss=0.02907, over 972216.88 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 02:59:03,868 INFO [train.py:715] (7/8) Epoch 17, batch 23450, loss[loss=0.1341, simple_loss=0.2088, pruned_loss=0.02973, over 4849.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02889, over 972108.05 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 02:59:42,963 INFO [train.py:715] (7/8) Epoch 17, batch 23500, loss[loss=0.1318, simple_loss=0.2162, pruned_loss=0.02368, over 4789.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02884, over 971077.57 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 03:00:22,280 INFO [train.py:715] (7/8) Epoch 17, batch 23550, loss[loss=0.09898, simple_loss=0.1666, pruned_loss=0.0157, over 4636.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.02867, over 970735.53 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 03:01:01,967 INFO [train.py:715] (7/8) Epoch 17, batch 23600, loss[loss=0.1412, simple_loss=0.2151, pruned_loss=0.03361, over 4870.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.02866, over 971421.71 frames.], batch size: 20, lr: 1.29e-04 +2022-05-09 03:01:40,307 INFO [train.py:715] (7/8) Epoch 17, batch 23650, loss[loss=0.1498, simple_loss=0.2275, pruned_loss=0.03601, over 4933.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02887, over 972060.09 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 03:02:19,923 INFO [train.py:715] (7/8) Epoch 17, batch 23700, loss[loss=0.169, simple_loss=0.2317, pruned_loss=0.05317, over 4893.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.02834, over 971972.99 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 03:02:59,509 INFO [train.py:715] (7/8) Epoch 17, batch 23750, loss[loss=0.1247, simple_loss=0.2011, pruned_loss=0.02412, over 4792.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02862, over 972227.72 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:03:38,382 INFO [train.py:715] (7/8) Epoch 17, batch 23800, loss[loss=0.1202, simple_loss=0.1973, pruned_loss=0.02152, over 4928.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02864, over 972632.75 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 03:04:16,664 INFO [train.py:715] (7/8) Epoch 17, batch 23850, loss[loss=0.1455, simple_loss=0.2186, pruned_loss=0.03624, over 4788.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02879, over 973006.56 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:04:56,707 INFO [train.py:715] (7/8) Epoch 17, batch 23900, loss[loss=0.1363, simple_loss=0.2147, pruned_loss=0.02896, over 4760.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02883, over 972322.28 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:05:35,871 INFO [train.py:715] (7/8) Epoch 17, batch 23950, loss[loss=0.1331, simple_loss=0.2101, pruned_loss=0.02804, over 4904.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02902, over 972322.84 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:06:14,200 INFO [train.py:715] (7/8) Epoch 17, batch 24000, loss[loss=0.1104, simple_loss=0.1933, pruned_loss=0.01376, over 4898.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02945, over 972927.58 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:06:14,201 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 03:06:24,068 INFO [train.py:742] (7/8) Epoch 17, validation: loss=0.1047, simple_loss=0.1881, pruned_loss=0.01067, over 914524.00 frames. +2022-05-09 03:07:02,581 INFO [train.py:715] (7/8) Epoch 17, batch 24050, loss[loss=0.1225, simple_loss=0.2017, pruned_loss=0.02168, over 4943.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.02929, over 972275.59 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 03:07:41,976 INFO [train.py:715] (7/8) Epoch 17, batch 24100, loss[loss=0.1725, simple_loss=0.2475, pruned_loss=0.04873, over 4921.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2061, pruned_loss=0.02937, over 971690.39 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 03:08:22,152 INFO [train.py:715] (7/8) Epoch 17, batch 24150, loss[loss=0.1296, simple_loss=0.2111, pruned_loss=0.02407, over 4902.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2057, pruned_loss=0.02898, over 973243.78 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:09:00,904 INFO [train.py:715] (7/8) Epoch 17, batch 24200, loss[loss=0.09917, simple_loss=0.1763, pruned_loss=0.01102, over 4931.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.02846, over 973494.64 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 03:09:42,456 INFO [train.py:715] (7/8) Epoch 17, batch 24250, loss[loss=0.1122, simple_loss=0.1888, pruned_loss=0.01776, over 4784.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.02838, over 973394.21 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:10:23,061 INFO [train.py:715] (7/8) Epoch 17, batch 24300, loss[loss=0.1383, simple_loss=0.2135, pruned_loss=0.03153, over 4973.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.0285, over 973391.69 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:11:02,614 INFO [train.py:715] (7/8) Epoch 17, batch 24350, loss[loss=0.1366, simple_loss=0.2203, pruned_loss=0.02644, over 4913.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02857, over 973610.19 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:11:41,992 INFO [train.py:715] (7/8) Epoch 17, batch 24400, loss[loss=0.1347, simple_loss=0.2171, pruned_loss=0.02612, over 4925.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02893, over 974359.42 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 03:12:21,140 INFO [train.py:715] (7/8) Epoch 17, batch 24450, loss[loss=0.1095, simple_loss=0.1743, pruned_loss=0.0223, over 4731.00 frames.], tot_loss[loss=0.132, simple_loss=0.2059, pruned_loss=0.02907, over 973887.12 frames.], batch size: 12, lr: 1.29e-04 +2022-05-09 03:13:01,330 INFO [train.py:715] (7/8) Epoch 17, batch 24500, loss[loss=0.1306, simple_loss=0.2061, pruned_loss=0.02753, over 4746.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2062, pruned_loss=0.02916, over 973074.04 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:13:40,455 INFO [train.py:715] (7/8) Epoch 17, batch 24550, loss[loss=0.1446, simple_loss=0.2105, pruned_loss=0.03941, over 4874.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2052, pruned_loss=0.02854, over 972836.75 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 03:14:19,286 INFO [train.py:715] (7/8) Epoch 17, batch 24600, loss[loss=0.1302, simple_loss=0.2018, pruned_loss=0.02929, over 4838.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2053, pruned_loss=0.02877, over 973849.52 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 03:14:59,440 INFO [train.py:715] (7/8) Epoch 17, batch 24650, loss[loss=0.1242, simple_loss=0.1967, pruned_loss=0.02581, over 4844.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02873, over 973545.42 frames.], batch size: 12, lr: 1.29e-04 +2022-05-09 03:15:39,740 INFO [train.py:715] (7/8) Epoch 17, batch 24700, loss[loss=0.1336, simple_loss=0.2118, pruned_loss=0.02774, over 4815.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2057, pruned_loss=0.02882, over 973017.96 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 03:16:18,265 INFO [train.py:715] (7/8) Epoch 17, batch 24750, loss[loss=0.1143, simple_loss=0.1931, pruned_loss=0.01774, over 4992.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02883, over 973161.49 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:16:58,095 INFO [train.py:715] (7/8) Epoch 17, batch 24800, loss[loss=0.1401, simple_loss=0.2188, pruned_loss=0.03068, over 4960.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.02929, over 972677.73 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:17:37,936 INFO [train.py:715] (7/8) Epoch 17, batch 24850, loss[loss=0.1376, simple_loss=0.21, pruned_loss=0.03261, over 4805.00 frames.], tot_loss[loss=0.132, simple_loss=0.2058, pruned_loss=0.02916, over 972953.66 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:18:17,569 INFO [train.py:715] (7/8) Epoch 17, batch 24900, loss[loss=0.1374, simple_loss=0.2091, pruned_loss=0.03288, over 4834.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2056, pruned_loss=0.02908, over 972054.26 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:18:56,115 INFO [train.py:715] (7/8) Epoch 17, batch 24950, loss[loss=0.1414, simple_loss=0.2156, pruned_loss=0.03357, over 4941.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02902, over 971997.14 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 03:19:35,622 INFO [train.py:715] (7/8) Epoch 17, batch 25000, loss[loss=0.1378, simple_loss=0.2093, pruned_loss=0.03322, over 4951.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2063, pruned_loss=0.02926, over 973252.60 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 03:20:14,003 INFO [train.py:715] (7/8) Epoch 17, batch 25050, loss[loss=0.1351, simple_loss=0.2033, pruned_loss=0.03348, over 4853.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.0295, over 971992.57 frames.], batch size: 32, lr: 1.29e-04 +2022-05-09 03:20:53,000 INFO [train.py:715] (7/8) Epoch 17, batch 25100, loss[loss=0.1141, simple_loss=0.1943, pruned_loss=0.01698, over 4949.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02971, over 972061.55 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 03:21:32,981 INFO [train.py:715] (7/8) Epoch 17, batch 25150, loss[loss=0.1254, simple_loss=0.2035, pruned_loss=0.02365, over 4867.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02918, over 972173.02 frames.], batch size: 20, lr: 1.29e-04 +2022-05-09 03:22:12,875 INFO [train.py:715] (7/8) Epoch 17, batch 25200, loss[loss=0.1069, simple_loss=0.1813, pruned_loss=0.01627, over 4839.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02904, over 972437.33 frames.], batch size: 12, lr: 1.29e-04 +2022-05-09 03:22:51,916 INFO [train.py:715] (7/8) Epoch 17, batch 25250, loss[loss=0.1258, simple_loss=0.204, pruned_loss=0.02375, over 4707.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02866, over 972148.04 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:23:31,037 INFO [train.py:715] (7/8) Epoch 17, batch 25300, loss[loss=0.1412, simple_loss=0.2166, pruned_loss=0.03295, over 4829.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02871, over 972276.56 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:24:11,040 INFO [train.py:715] (7/8) Epoch 17, batch 25350, loss[loss=0.1202, simple_loss=0.1961, pruned_loss=0.02215, over 4800.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02878, over 972615.72 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 03:24:49,787 INFO [train.py:715] (7/8) Epoch 17, batch 25400, loss[loss=0.1637, simple_loss=0.2374, pruned_loss=0.04498, over 4943.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2061, pruned_loss=0.02904, over 971197.66 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 03:25:28,941 INFO [train.py:715] (7/8) Epoch 17, batch 25450, loss[loss=0.1414, simple_loss=0.2174, pruned_loss=0.03268, over 4808.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2062, pruned_loss=0.02915, over 971356.94 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 03:26:08,067 INFO [train.py:715] (7/8) Epoch 17, batch 25500, loss[loss=0.1243, simple_loss=0.2037, pruned_loss=0.02241, over 4877.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2061, pruned_loss=0.02905, over 972422.67 frames.], batch size: 22, lr: 1.29e-04 +2022-05-09 03:26:47,843 INFO [train.py:715] (7/8) Epoch 17, batch 25550, loss[loss=0.1164, simple_loss=0.188, pruned_loss=0.0224, over 4796.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02901, over 972972.06 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:27:26,922 INFO [train.py:715] (7/8) Epoch 17, batch 25600, loss[loss=0.1313, simple_loss=0.2079, pruned_loss=0.02739, over 4820.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02891, over 973634.58 frames.], batch size: 27, lr: 1.29e-04 +2022-05-09 03:28:05,427 INFO [train.py:715] (7/8) Epoch 17, batch 25650, loss[loss=0.1338, simple_loss=0.2016, pruned_loss=0.03302, over 4970.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02883, over 972834.33 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:28:45,201 INFO [train.py:715] (7/8) Epoch 17, batch 25700, loss[loss=0.1122, simple_loss=0.1898, pruned_loss=0.01728, over 4750.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02879, over 971784.01 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:29:24,295 INFO [train.py:715] (7/8) Epoch 17, batch 25750, loss[loss=0.1383, simple_loss=0.2131, pruned_loss=0.0318, over 4848.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02886, over 972035.77 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 03:30:03,680 INFO [train.py:715] (7/8) Epoch 17, batch 25800, loss[loss=0.1601, simple_loss=0.2283, pruned_loss=0.04594, over 4752.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02909, over 971842.99 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 03:30:43,165 INFO [train.py:715] (7/8) Epoch 17, batch 25850, loss[loss=0.1352, simple_loss=0.2063, pruned_loss=0.03208, over 4888.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.0292, over 972897.89 frames.], batch size: 22, lr: 1.29e-04 +2022-05-09 03:31:22,527 INFO [train.py:715] (7/8) Epoch 17, batch 25900, loss[loss=0.2054, simple_loss=0.2656, pruned_loss=0.07261, over 4913.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02958, over 972597.88 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:32:01,049 INFO [train.py:715] (7/8) Epoch 17, batch 25950, loss[loss=0.1264, simple_loss=0.2101, pruned_loss=0.02129, over 4806.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02972, over 973423.73 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 03:32:39,480 INFO [train.py:715] (7/8) Epoch 17, batch 26000, loss[loss=0.1322, simple_loss=0.2112, pruned_loss=0.02659, over 4780.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2067, pruned_loss=0.02957, over 973568.15 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:33:19,122 INFO [train.py:715] (7/8) Epoch 17, batch 26050, loss[loss=0.1503, simple_loss=0.2266, pruned_loss=0.03697, over 4881.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2067, pruned_loss=0.02957, over 972737.99 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 03:33:57,730 INFO [train.py:715] (7/8) Epoch 17, batch 26100, loss[loss=0.1039, simple_loss=0.1722, pruned_loss=0.01781, over 4794.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2057, pruned_loss=0.02924, over 971788.56 frames.], batch size: 12, lr: 1.29e-04 +2022-05-09 03:34:37,127 INFO [train.py:715] (7/8) Epoch 17, batch 26150, loss[loss=0.1546, simple_loss=0.2324, pruned_loss=0.03838, over 4900.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2058, pruned_loss=0.0292, over 972014.36 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:35:16,510 INFO [train.py:715] (7/8) Epoch 17, batch 26200, loss[loss=0.1405, simple_loss=0.2144, pruned_loss=0.03331, over 4942.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2056, pruned_loss=0.02931, over 972293.01 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 03:35:56,480 INFO [train.py:715] (7/8) Epoch 17, batch 26250, loss[loss=0.1431, simple_loss=0.2137, pruned_loss=0.03631, over 4980.00 frames.], tot_loss[loss=0.132, simple_loss=0.2057, pruned_loss=0.02915, over 972600.78 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 03:36:35,145 INFO [train.py:715] (7/8) Epoch 17, batch 26300, loss[loss=0.1227, simple_loss=0.193, pruned_loss=0.02623, over 4831.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2064, pruned_loss=0.02953, over 971577.84 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:37:13,922 INFO [train.py:715] (7/8) Epoch 17, batch 26350, loss[loss=0.1155, simple_loss=0.1883, pruned_loss=0.02142, over 4913.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02881, over 972346.89 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 03:37:53,867 INFO [train.py:715] (7/8) Epoch 17, batch 26400, loss[loss=0.141, simple_loss=0.2126, pruned_loss=0.03472, over 4776.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02912, over 971725.01 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:38:32,579 INFO [train.py:715] (7/8) Epoch 17, batch 26450, loss[loss=0.1455, simple_loss=0.2238, pruned_loss=0.03354, over 4850.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02915, over 972180.62 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 03:39:11,789 INFO [train.py:715] (7/8) Epoch 17, batch 26500, loss[loss=0.1279, simple_loss=0.1948, pruned_loss=0.03046, over 4897.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02915, over 972316.70 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:39:51,011 INFO [train.py:715] (7/8) Epoch 17, batch 26550, loss[loss=0.1263, simple_loss=0.2097, pruned_loss=0.02152, over 4797.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02892, over 971615.27 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 03:40:29,944 INFO [train.py:715] (7/8) Epoch 17, batch 26600, loss[loss=0.1258, simple_loss=0.2081, pruned_loss=0.02181, over 4767.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02876, over 971170.03 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 03:41:08,354 INFO [train.py:715] (7/8) Epoch 17, batch 26650, loss[loss=0.1358, simple_loss=0.2093, pruned_loss=0.03122, over 4832.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02919, over 971368.58 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 03:41:47,383 INFO [train.py:715] (7/8) Epoch 17, batch 26700, loss[loss=0.1433, simple_loss=0.2263, pruned_loss=0.03015, over 4989.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2066, pruned_loss=0.02951, over 971691.89 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 03:42:26,787 INFO [train.py:715] (7/8) Epoch 17, batch 26750, loss[loss=0.1416, simple_loss=0.2187, pruned_loss=0.03229, over 4767.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2066, pruned_loss=0.02943, over 971148.83 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 03:43:05,135 INFO [train.py:715] (7/8) Epoch 17, batch 26800, loss[loss=0.108, simple_loss=0.1832, pruned_loss=0.01645, over 4854.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.0295, over 972218.01 frames.], batch size: 34, lr: 1.29e-04 +2022-05-09 03:43:43,938 INFO [train.py:715] (7/8) Epoch 17, batch 26850, loss[loss=0.1181, simple_loss=0.1882, pruned_loss=0.024, over 4935.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2062, pruned_loss=0.02932, over 972758.89 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:44:23,815 INFO [train.py:715] (7/8) Epoch 17, batch 26900, loss[loss=0.1261, simple_loss=0.1986, pruned_loss=0.02675, over 4698.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02879, over 972637.37 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 03:45:02,978 INFO [train.py:715] (7/8) Epoch 17, batch 26950, loss[loss=0.1486, simple_loss=0.2302, pruned_loss=0.03355, over 4818.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2055, pruned_loss=0.02864, over 972881.73 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 03:45:41,693 INFO [train.py:715] (7/8) Epoch 17, batch 27000, loss[loss=0.146, simple_loss=0.2297, pruned_loss=0.03116, over 4971.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02889, over 972833.02 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 03:45:41,694 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 03:45:51,480 INFO [train.py:742] (7/8) Epoch 17, validation: loss=0.1047, simple_loss=0.188, pruned_loss=0.0107, over 914524.00 frames. +2022-05-09 03:46:30,444 INFO [train.py:715] (7/8) Epoch 17, batch 27050, loss[loss=0.1608, simple_loss=0.234, pruned_loss=0.0438, over 4938.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2058, pruned_loss=0.02919, over 971751.06 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 03:47:09,963 INFO [train.py:715] (7/8) Epoch 17, batch 27100, loss[loss=0.1098, simple_loss=0.1926, pruned_loss=0.01351, over 4971.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2051, pruned_loss=0.02889, over 971651.73 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 03:47:49,463 INFO [train.py:715] (7/8) Epoch 17, batch 27150, loss[loss=0.1258, simple_loss=0.2006, pruned_loss=0.02547, over 4777.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02892, over 972143.78 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:48:27,669 INFO [train.py:715] (7/8) Epoch 17, batch 27200, loss[loss=0.1585, simple_loss=0.2197, pruned_loss=0.04861, over 4959.00 frames.], tot_loss[loss=0.131, simple_loss=0.205, pruned_loss=0.02849, over 971782.34 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 03:49:06,449 INFO [train.py:715] (7/8) Epoch 17, batch 27250, loss[loss=0.167, simple_loss=0.2485, pruned_loss=0.04272, over 4814.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2048, pruned_loss=0.02813, over 972040.47 frames.], batch size: 27, lr: 1.29e-04 +2022-05-09 03:49:46,074 INFO [train.py:715] (7/8) Epoch 17, batch 27300, loss[loss=0.1259, simple_loss=0.1975, pruned_loss=0.02715, over 4902.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02834, over 971872.46 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:50:25,161 INFO [train.py:715] (7/8) Epoch 17, batch 27350, loss[loss=0.1189, simple_loss=0.1958, pruned_loss=0.02098, over 4905.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02848, over 971953.70 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:51:04,596 INFO [train.py:715] (7/8) Epoch 17, batch 27400, loss[loss=0.1292, simple_loss=0.2129, pruned_loss=0.02281, over 4818.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02894, over 972010.91 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 03:51:43,499 INFO [train.py:715] (7/8) Epoch 17, batch 27450, loss[loss=0.1285, simple_loss=0.2138, pruned_loss=0.02161, over 4923.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2073, pruned_loss=0.02898, over 971922.86 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:52:23,145 INFO [train.py:715] (7/8) Epoch 17, batch 27500, loss[loss=0.1316, simple_loss=0.2013, pruned_loss=0.03093, over 4921.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02927, over 972493.66 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 03:53:01,814 INFO [train.py:715] (7/8) Epoch 17, batch 27550, loss[loss=0.1456, simple_loss=0.2083, pruned_loss=0.04146, over 4899.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2066, pruned_loss=0.02941, over 971976.19 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 03:53:40,304 INFO [train.py:715] (7/8) Epoch 17, batch 27600, loss[loss=0.142, simple_loss=0.2207, pruned_loss=0.03162, over 4836.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02937, over 972050.00 frames.], batch size: 27, lr: 1.29e-04 +2022-05-09 03:54:19,258 INFO [train.py:715] (7/8) Epoch 17, batch 27650, loss[loss=0.1213, simple_loss=0.1943, pruned_loss=0.02416, over 4972.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02934, over 972246.88 frames.], batch size: 28, lr: 1.29e-04 +2022-05-09 03:54:57,850 INFO [train.py:715] (7/8) Epoch 17, batch 27700, loss[loss=0.1397, simple_loss=0.2154, pruned_loss=0.03199, over 4803.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02955, over 972437.31 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:55:37,180 INFO [train.py:715] (7/8) Epoch 17, batch 27750, loss[loss=0.1176, simple_loss=0.1865, pruned_loss=0.02433, over 4965.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.0295, over 973334.34 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:56:16,914 INFO [train.py:715] (7/8) Epoch 17, batch 27800, loss[loss=0.1475, simple_loss=0.2282, pruned_loss=0.0334, over 4964.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.0292, over 973404.88 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 03:56:57,479 INFO [train.py:715] (7/8) Epoch 17, batch 27850, loss[loss=0.1509, simple_loss=0.2269, pruned_loss=0.0374, over 4986.00 frames.], tot_loss[loss=0.1341, simple_loss=0.208, pruned_loss=0.03013, over 973637.04 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 03:57:37,277 INFO [train.py:715] (7/8) Epoch 17, batch 27900, loss[loss=0.1337, simple_loss=0.2107, pruned_loss=0.02834, over 4982.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.03, over 974017.15 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 03:58:16,550 INFO [train.py:715] (7/8) Epoch 17, batch 27950, loss[loss=0.1386, simple_loss=0.2105, pruned_loss=0.03335, over 4962.00 frames.], tot_loss[loss=0.1345, simple_loss=0.2084, pruned_loss=0.0303, over 973383.28 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 03:58:56,517 INFO [train.py:715] (7/8) Epoch 17, batch 28000, loss[loss=0.115, simple_loss=0.1913, pruned_loss=0.01938, over 4837.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2084, pruned_loss=0.03011, over 973225.10 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 03:59:36,521 INFO [train.py:715] (7/8) Epoch 17, batch 28050, loss[loss=0.13, simple_loss=0.2117, pruned_loss=0.02414, over 4983.00 frames.], tot_loss[loss=0.1337, simple_loss=0.208, pruned_loss=0.02972, over 973322.23 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 04:00:15,249 INFO [train.py:715] (7/8) Epoch 17, batch 28100, loss[loss=0.1226, simple_loss=0.1994, pruned_loss=0.0229, over 4910.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02939, over 972984.22 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 04:00:54,613 INFO [train.py:715] (7/8) Epoch 17, batch 28150, loss[loss=0.1252, simple_loss=0.1891, pruned_loss=0.03065, over 4790.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2069, pruned_loss=0.02996, over 972624.67 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:01:33,615 INFO [train.py:715] (7/8) Epoch 17, batch 28200, loss[loss=0.1062, simple_loss=0.1915, pruned_loss=0.01046, over 4921.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2065, pruned_loss=0.02989, over 972703.28 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:02:12,004 INFO [train.py:715] (7/8) Epoch 17, batch 28250, loss[loss=0.14, simple_loss=0.2095, pruned_loss=0.03528, over 4845.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2065, pruned_loss=0.0299, over 972799.08 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 04:02:50,449 INFO [train.py:715] (7/8) Epoch 17, batch 28300, loss[loss=0.1336, simple_loss=0.2057, pruned_loss=0.03079, over 4908.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.03, over 972077.69 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 04:03:29,619 INFO [train.py:715] (7/8) Epoch 17, batch 28350, loss[loss=0.1207, simple_loss=0.2016, pruned_loss=0.01984, over 4975.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2072, pruned_loss=0.03, over 972394.91 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 04:04:09,196 INFO [train.py:715] (7/8) Epoch 17, batch 28400, loss[loss=0.1267, simple_loss=0.2075, pruned_loss=0.02291, over 4930.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2071, pruned_loss=0.02964, over 972533.46 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 04:04:48,218 INFO [train.py:715] (7/8) Epoch 17, batch 28450, loss[loss=0.1029, simple_loss=0.1825, pruned_loss=0.01167, over 4815.00 frames.], tot_loss[loss=0.133, simple_loss=0.2068, pruned_loss=0.02961, over 973417.43 frames.], batch size: 27, lr: 1.29e-04 +2022-05-09 04:05:26,444 INFO [train.py:715] (7/8) Epoch 17, batch 28500, loss[loss=0.1481, simple_loss=0.2143, pruned_loss=0.04101, over 4752.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02943, over 972822.39 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:06:06,461 INFO [train.py:715] (7/8) Epoch 17, batch 28550, loss[loss=0.139, simple_loss=0.218, pruned_loss=0.03002, over 4943.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02922, over 973255.33 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 04:06:45,102 INFO [train.py:715] (7/8) Epoch 17, batch 28600, loss[loss=0.1293, simple_loss=0.2106, pruned_loss=0.02406, over 4815.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02932, over 972430.95 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 04:07:23,873 INFO [train.py:715] (7/8) Epoch 17, batch 28650, loss[loss=0.1195, simple_loss=0.1898, pruned_loss=0.02458, over 4941.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02887, over 973066.43 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:08:02,258 INFO [train.py:715] (7/8) Epoch 17, batch 28700, loss[loss=0.1312, simple_loss=0.2094, pruned_loss=0.02649, over 4829.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2072, pruned_loss=0.02877, over 973479.58 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 04:08:41,579 INFO [train.py:715] (7/8) Epoch 17, batch 28750, loss[loss=0.1396, simple_loss=0.2053, pruned_loss=0.03694, over 4750.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02902, over 972517.90 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:09:20,211 INFO [train.py:715] (7/8) Epoch 17, batch 28800, loss[loss=0.1213, simple_loss=0.1942, pruned_loss=0.0242, over 4790.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02872, over 973174.65 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 04:09:58,908 INFO [train.py:715] (7/8) Epoch 17, batch 28850, loss[loss=0.1293, simple_loss=0.201, pruned_loss=0.02881, over 4918.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02877, over 973368.21 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:10:37,992 INFO [train.py:715] (7/8) Epoch 17, batch 28900, loss[loss=0.1593, simple_loss=0.2385, pruned_loss=0.04003, over 4951.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02934, over 972256.67 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:11:16,520 INFO [train.py:715] (7/8) Epoch 17, batch 28950, loss[loss=0.1233, simple_loss=0.1945, pruned_loss=0.02604, over 4770.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2083, pruned_loss=0.02951, over 972404.00 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:11:54,925 INFO [train.py:715] (7/8) Epoch 17, batch 29000, loss[loss=0.1249, simple_loss=0.1902, pruned_loss=0.02982, over 4814.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02935, over 972537.57 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 04:12:33,662 INFO [train.py:715] (7/8) Epoch 17, batch 29050, loss[loss=0.1206, simple_loss=0.1876, pruned_loss=0.02681, over 4800.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02898, over 972225.35 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 04:13:13,014 INFO [train.py:715] (7/8) Epoch 17, batch 29100, loss[loss=0.1161, simple_loss=0.1834, pruned_loss=0.02444, over 4744.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02902, over 971648.79 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:13:51,912 INFO [train.py:715] (7/8) Epoch 17, batch 29150, loss[loss=0.1246, simple_loss=0.1928, pruned_loss=0.02818, over 4839.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02908, over 971961.43 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 04:14:30,019 INFO [train.py:715] (7/8) Epoch 17, batch 29200, loss[loss=0.1331, simple_loss=0.2086, pruned_loss=0.02877, over 4842.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02891, over 971403.99 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:15:09,519 INFO [train.py:715] (7/8) Epoch 17, batch 29250, loss[loss=0.1187, simple_loss=0.1937, pruned_loss=0.02185, over 4661.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.0289, over 972043.51 frames.], batch size: 13, lr: 1.29e-04 +2022-05-09 04:15:49,148 INFO [train.py:715] (7/8) Epoch 17, batch 29300, loss[loss=0.127, simple_loss=0.2041, pruned_loss=0.02493, over 4801.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02885, over 972274.09 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:16:27,572 INFO [train.py:715] (7/8) Epoch 17, batch 29350, loss[loss=0.1682, simple_loss=0.2405, pruned_loss=0.04799, over 4793.00 frames.], tot_loss[loss=0.132, simple_loss=0.2068, pruned_loss=0.02858, over 971455.67 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:17:06,157 INFO [train.py:715] (7/8) Epoch 17, batch 29400, loss[loss=0.1473, simple_loss=0.2191, pruned_loss=0.03776, over 4788.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2069, pruned_loss=0.02872, over 971107.22 frames.], batch size: 14, lr: 1.29e-04 +2022-05-09 04:17:45,840 INFO [train.py:715] (7/8) Epoch 17, batch 29450, loss[loss=0.1284, simple_loss=0.2111, pruned_loss=0.02288, over 4990.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2071, pruned_loss=0.0287, over 971731.37 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 04:18:24,964 INFO [train.py:715] (7/8) Epoch 17, batch 29500, loss[loss=0.1534, simple_loss=0.2154, pruned_loss=0.04576, over 4683.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02906, over 971725.58 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:19:03,883 INFO [train.py:715] (7/8) Epoch 17, batch 29550, loss[loss=0.1409, simple_loss=0.2147, pruned_loss=0.03355, over 4784.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02935, over 971470.11 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 04:19:43,167 INFO [train.py:715] (7/8) Epoch 17, batch 29600, loss[loss=0.1091, simple_loss=0.1808, pruned_loss=0.01875, over 4757.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02929, over 970894.31 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:20:22,746 INFO [train.py:715] (7/8) Epoch 17, batch 29650, loss[loss=0.1323, simple_loss=0.204, pruned_loss=0.03031, over 4873.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02943, over 971600.00 frames.], batch size: 20, lr: 1.29e-04 +2022-05-09 04:21:01,517 INFO [train.py:715] (7/8) Epoch 17, batch 29700, loss[loss=0.1394, simple_loss=0.2168, pruned_loss=0.03103, over 4906.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.0296, over 971783.32 frames.], batch size: 22, lr: 1.29e-04 +2022-05-09 04:21:40,464 INFO [train.py:715] (7/8) Epoch 17, batch 29750, loss[loss=0.1193, simple_loss=0.1858, pruned_loss=0.0264, over 4746.00 frames.], tot_loss[loss=0.1332, simple_loss=0.207, pruned_loss=0.02971, over 971304.01 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:22:20,623 INFO [train.py:715] (7/8) Epoch 17, batch 29800, loss[loss=0.1251, simple_loss=0.2001, pruned_loss=0.02509, over 4900.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02917, over 972153.86 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:22:59,616 INFO [train.py:715] (7/8) Epoch 17, batch 29850, loss[loss=0.1411, simple_loss=0.2209, pruned_loss=0.03065, over 4904.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.029, over 972783.33 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:23:38,913 INFO [train.py:715] (7/8) Epoch 17, batch 29900, loss[loss=0.1124, simple_loss=0.1868, pruned_loss=0.01901, over 4915.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.0292, over 972959.08 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 04:24:18,622 INFO [train.py:715] (7/8) Epoch 17, batch 29950, loss[loss=0.1335, simple_loss=0.203, pruned_loss=0.03206, over 4838.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02906, over 972305.11 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 04:24:58,025 INFO [train.py:715] (7/8) Epoch 17, batch 30000, loss[loss=0.1215, simple_loss=0.1859, pruned_loss=0.02851, over 4804.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02909, over 972797.08 frames.], batch size: 26, lr: 1.29e-04 +2022-05-09 04:24:58,026 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 04:25:08,261 INFO [train.py:742] (7/8) Epoch 17, validation: loss=0.1047, simple_loss=0.188, pruned_loss=0.01065, over 914524.00 frames. +2022-05-09 04:25:48,088 INFO [train.py:715] (7/8) Epoch 17, batch 30050, loss[loss=0.1311, simple_loss=0.207, pruned_loss=0.02762, over 4869.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2077, pruned_loss=0.02935, over 972647.87 frames.], batch size: 32, lr: 1.29e-04 +2022-05-09 04:26:27,728 INFO [train.py:715] (7/8) Epoch 17, batch 30100, loss[loss=0.1371, simple_loss=0.2124, pruned_loss=0.03094, over 4886.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02924, over 972243.19 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 04:27:06,813 INFO [train.py:715] (7/8) Epoch 17, batch 30150, loss[loss=0.1252, simple_loss=0.206, pruned_loss=0.0222, over 4895.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.02894, over 971828.46 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:27:46,311 INFO [train.py:715] (7/8) Epoch 17, batch 30200, loss[loss=0.1485, simple_loss=0.2189, pruned_loss=0.03905, over 4930.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02926, over 971987.94 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 04:28:25,427 INFO [train.py:715] (7/8) Epoch 17, batch 30250, loss[loss=0.1234, simple_loss=0.2024, pruned_loss=0.02224, over 4691.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02941, over 971825.70 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:29:04,418 INFO [train.py:715] (7/8) Epoch 17, batch 30300, loss[loss=0.1244, simple_loss=0.1995, pruned_loss=0.02464, over 4934.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02947, over 972033.25 frames.], batch size: 23, lr: 1.29e-04 +2022-05-09 04:29:44,188 INFO [train.py:715] (7/8) Epoch 17, batch 30350, loss[loss=0.1293, simple_loss=0.1962, pruned_loss=0.03118, over 4757.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.0293, over 971332.78 frames.], batch size: 19, lr: 1.29e-04 +2022-05-09 04:30:23,370 INFO [train.py:715] (7/8) Epoch 17, batch 30400, loss[loss=0.1879, simple_loss=0.2646, pruned_loss=0.05559, over 4910.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02942, over 971220.68 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 04:31:02,096 INFO [train.py:715] (7/8) Epoch 17, batch 30450, loss[loss=0.144, simple_loss=0.2204, pruned_loss=0.03385, over 4862.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02937, over 971880.00 frames.], batch size: 20, lr: 1.29e-04 +2022-05-09 04:31:41,826 INFO [train.py:715] (7/8) Epoch 17, batch 30500, loss[loss=0.1353, simple_loss=0.2122, pruned_loss=0.02925, over 4975.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2078, pruned_loss=0.02942, over 972716.48 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 04:32:21,638 INFO [train.py:715] (7/8) Epoch 17, batch 30550, loss[loss=0.1272, simple_loss=0.1966, pruned_loss=0.0289, over 4951.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2077, pruned_loss=0.02937, over 972826.66 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 04:33:01,424 INFO [train.py:715] (7/8) Epoch 17, batch 30600, loss[loss=0.1269, simple_loss=0.1955, pruned_loss=0.02913, over 4860.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02929, over 972788.46 frames.], batch size: 32, lr: 1.29e-04 +2022-05-09 04:33:40,318 INFO [train.py:715] (7/8) Epoch 17, batch 30650, loss[loss=0.1358, simple_loss=0.2073, pruned_loss=0.03216, over 4913.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2072, pruned_loss=0.02884, over 972374.10 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 04:34:20,060 INFO [train.py:715] (7/8) Epoch 17, batch 30700, loss[loss=0.1194, simple_loss=0.1952, pruned_loss=0.02185, over 4803.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2065, pruned_loss=0.02823, over 972500.78 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:34:59,087 INFO [train.py:715] (7/8) Epoch 17, batch 30750, loss[loss=0.1343, simple_loss=0.2121, pruned_loss=0.02827, over 4775.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2059, pruned_loss=0.02813, over 973034.18 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 04:35:38,917 INFO [train.py:715] (7/8) Epoch 17, batch 30800, loss[loss=0.1294, simple_loss=0.2103, pruned_loss=0.02424, over 4926.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2059, pruned_loss=0.02812, over 972820.79 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 04:36:18,142 INFO [train.py:715] (7/8) Epoch 17, batch 30850, loss[loss=0.1405, simple_loss=0.2217, pruned_loss=0.02967, over 4806.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2062, pruned_loss=0.02819, over 973422.69 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 04:36:58,361 INFO [train.py:715] (7/8) Epoch 17, batch 30900, loss[loss=0.1356, simple_loss=0.2088, pruned_loss=0.0312, over 4780.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02842, over 973111.11 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:37:38,030 INFO [train.py:715] (7/8) Epoch 17, batch 30950, loss[loss=0.1568, simple_loss=0.2301, pruned_loss=0.04174, over 4863.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02864, over 973255.95 frames.], batch size: 32, lr: 1.29e-04 +2022-05-09 04:38:17,302 INFO [train.py:715] (7/8) Epoch 17, batch 31000, loss[loss=0.1507, simple_loss=0.2219, pruned_loss=0.03973, over 4809.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02864, over 971829.61 frames.], batch size: 24, lr: 1.29e-04 +2022-05-09 04:38:57,011 INFO [train.py:715] (7/8) Epoch 17, batch 31050, loss[loss=0.1245, simple_loss=0.2017, pruned_loss=0.02365, over 4907.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2053, pruned_loss=0.02864, over 972097.65 frames.], batch size: 17, lr: 1.29e-04 +2022-05-09 04:39:36,079 INFO [train.py:715] (7/8) Epoch 17, batch 31100, loss[loss=0.1386, simple_loss=0.2021, pruned_loss=0.03752, over 4886.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02901, over 972189.17 frames.], batch size: 32, lr: 1.29e-04 +2022-05-09 04:40:15,211 INFO [train.py:715] (7/8) Epoch 17, batch 31150, loss[loss=0.1534, simple_loss=0.2278, pruned_loss=0.03953, over 4917.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02962, over 972497.21 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:40:54,496 INFO [train.py:715] (7/8) Epoch 17, batch 31200, loss[loss=0.1393, simple_loss=0.2153, pruned_loss=0.0317, over 4931.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.0295, over 972803.79 frames.], batch size: 21, lr: 1.29e-04 +2022-05-09 04:41:34,597 INFO [train.py:715] (7/8) Epoch 17, batch 31250, loss[loss=0.1644, simple_loss=0.2439, pruned_loss=0.04248, over 4973.00 frames.], tot_loss[loss=0.134, simple_loss=0.208, pruned_loss=0.02996, over 972446.94 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:42:13,895 INFO [train.py:715] (7/8) Epoch 17, batch 31300, loss[loss=0.1603, simple_loss=0.2317, pruned_loss=0.04441, over 4858.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2074, pruned_loss=0.03003, over 972605.89 frames.], batch size: 34, lr: 1.29e-04 +2022-05-09 04:42:53,281 INFO [train.py:715] (7/8) Epoch 17, batch 31350, loss[loss=0.1171, simple_loss=0.1962, pruned_loss=0.01902, over 4944.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2081, pruned_loss=0.03012, over 972625.54 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 04:43:32,647 INFO [train.py:715] (7/8) Epoch 17, batch 31400, loss[loss=0.1267, simple_loss=0.1977, pruned_loss=0.02778, over 4777.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02945, over 972546.74 frames.], batch size: 12, lr: 1.29e-04 +2022-05-09 04:44:11,255 INFO [train.py:715] (7/8) Epoch 17, batch 31450, loss[loss=0.122, simple_loss=0.1881, pruned_loss=0.02797, over 4861.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02872, over 972695.17 frames.], batch size: 32, lr: 1.29e-04 +2022-05-09 04:44:51,214 INFO [train.py:715] (7/8) Epoch 17, batch 31500, loss[loss=0.1301, simple_loss=0.1982, pruned_loss=0.031, over 4990.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02881, over 973599.47 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:45:29,938 INFO [train.py:715] (7/8) Epoch 17, batch 31550, loss[loss=0.1308, simple_loss=0.1994, pruned_loss=0.03111, over 4765.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02915, over 972953.51 frames.], batch size: 18, lr: 1.29e-04 +2022-05-09 04:46:09,496 INFO [train.py:715] (7/8) Epoch 17, batch 31600, loss[loss=0.1512, simple_loss=0.2209, pruned_loss=0.04079, over 4692.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02925, over 971810.09 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:46:48,900 INFO [train.py:715] (7/8) Epoch 17, batch 31650, loss[loss=0.1205, simple_loss=0.1916, pruned_loss=0.02471, over 4837.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2065, pruned_loss=0.02933, over 971941.36 frames.], batch size: 30, lr: 1.29e-04 +2022-05-09 04:47:28,182 INFO [train.py:715] (7/8) Epoch 17, batch 31700, loss[loss=0.1693, simple_loss=0.2351, pruned_loss=0.05176, over 4865.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.0291, over 971887.91 frames.], batch size: 32, lr: 1.29e-04 +2022-05-09 04:48:07,939 INFO [train.py:715] (7/8) Epoch 17, batch 31750, loss[loss=0.1698, simple_loss=0.243, pruned_loss=0.0483, over 4879.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02893, over 972746.42 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:48:47,179 INFO [train.py:715] (7/8) Epoch 17, batch 31800, loss[loss=0.1329, simple_loss=0.2061, pruned_loss=0.02989, over 4884.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02905, over 972507.44 frames.], batch size: 20, lr: 1.29e-04 +2022-05-09 04:49:27,381 INFO [train.py:715] (7/8) Epoch 17, batch 31850, loss[loss=0.1439, simple_loss=0.2111, pruned_loss=0.03834, over 4974.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02881, over 972049.14 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 04:50:06,509 INFO [train.py:715] (7/8) Epoch 17, batch 31900, loss[loss=0.1522, simple_loss=0.2164, pruned_loss=0.04403, over 4990.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02873, over 972406.91 frames.], batch size: 20, lr: 1.29e-04 +2022-05-09 04:50:45,989 INFO [train.py:715] (7/8) Epoch 17, batch 31950, loss[loss=0.1449, simple_loss=0.2094, pruned_loss=0.0402, over 4903.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02853, over 971830.63 frames.], batch size: 39, lr: 1.29e-04 +2022-05-09 04:51:25,762 INFO [train.py:715] (7/8) Epoch 17, batch 32000, loss[loss=0.1528, simple_loss=0.2203, pruned_loss=0.04259, over 4810.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2072, pruned_loss=0.02866, over 971209.77 frames.], batch size: 27, lr: 1.29e-04 +2022-05-09 04:52:04,647 INFO [train.py:715] (7/8) Epoch 17, batch 32050, loss[loss=0.1353, simple_loss=0.2135, pruned_loss=0.0285, over 4886.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2063, pruned_loss=0.02826, over 970926.13 frames.], batch size: 22, lr: 1.29e-04 +2022-05-09 04:52:44,370 INFO [train.py:715] (7/8) Epoch 17, batch 32100, loss[loss=0.1304, simple_loss=0.2132, pruned_loss=0.02379, over 4803.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02859, over 970938.85 frames.], batch size: 25, lr: 1.29e-04 +2022-05-09 04:53:23,406 INFO [train.py:715] (7/8) Epoch 17, batch 32150, loss[loss=0.155, simple_loss=0.2265, pruned_loss=0.04178, over 4836.00 frames.], tot_loss[loss=0.132, simple_loss=0.2071, pruned_loss=0.02846, over 971326.68 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:54:02,759 INFO [train.py:715] (7/8) Epoch 17, batch 32200, loss[loss=0.1626, simple_loss=0.2318, pruned_loss=0.04673, over 4974.00 frames.], tot_loss[loss=0.132, simple_loss=0.207, pruned_loss=0.02849, over 971959.71 frames.], batch size: 35, lr: 1.29e-04 +2022-05-09 04:54:45,064 INFO [train.py:715] (7/8) Epoch 17, batch 32250, loss[loss=0.1371, simple_loss=0.2005, pruned_loss=0.03683, over 4745.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.02857, over 972681.70 frames.], batch size: 16, lr: 1.29e-04 +2022-05-09 04:55:24,424 INFO [train.py:715] (7/8) Epoch 17, batch 32300, loss[loss=0.1146, simple_loss=0.197, pruned_loss=0.01609, over 4978.00 frames.], tot_loss[loss=0.132, simple_loss=0.2069, pruned_loss=0.02851, over 973676.48 frames.], batch size: 28, lr: 1.29e-04 +2022-05-09 04:56:04,325 INFO [train.py:715] (7/8) Epoch 17, batch 32350, loss[loss=0.149, simple_loss=0.2157, pruned_loss=0.04111, over 4971.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2063, pruned_loss=0.02828, over 973553.48 frames.], batch size: 15, lr: 1.29e-04 +2022-05-09 04:56:43,386 INFO [train.py:715] (7/8) Epoch 17, batch 32400, loss[loss=0.1316, simple_loss=0.2033, pruned_loss=0.02996, over 4929.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02839, over 973146.97 frames.], batch size: 29, lr: 1.29e-04 +2022-05-09 04:57:22,533 INFO [train.py:715] (7/8) Epoch 17, batch 32450, loss[loss=0.1302, simple_loss=0.205, pruned_loss=0.02765, over 4696.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02853, over 972334.25 frames.], batch size: 15, lr: 1.28e-04 +2022-05-09 04:58:02,558 INFO [train.py:715] (7/8) Epoch 17, batch 32500, loss[loss=0.1155, simple_loss=0.1941, pruned_loss=0.01847, over 4795.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.0288, over 972485.81 frames.], batch size: 21, lr: 1.28e-04 +2022-05-09 04:58:41,969 INFO [train.py:715] (7/8) Epoch 17, batch 32550, loss[loss=0.1186, simple_loss=0.1928, pruned_loss=0.02219, over 4782.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2067, pruned_loss=0.02834, over 972316.98 frames.], batch size: 18, lr: 1.28e-04 +2022-05-09 04:59:21,561 INFO [train.py:715] (7/8) Epoch 17, batch 32600, loss[loss=0.1276, simple_loss=0.2052, pruned_loss=0.02499, over 4916.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02869, over 971952.28 frames.], batch size: 19, lr: 1.28e-04 +2022-05-09 05:00:01,072 INFO [train.py:715] (7/8) Epoch 17, batch 32650, loss[loss=0.1273, simple_loss=0.2023, pruned_loss=0.02616, over 4660.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02894, over 971783.94 frames.], batch size: 13, lr: 1.28e-04 +2022-05-09 05:00:39,808 INFO [train.py:715] (7/8) Epoch 17, batch 32700, loss[loss=0.1176, simple_loss=0.187, pruned_loss=0.02411, over 4816.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.0287, over 972198.76 frames.], batch size: 21, lr: 1.28e-04 +2022-05-09 05:01:19,990 INFO [train.py:715] (7/8) Epoch 17, batch 32750, loss[loss=0.142, simple_loss=0.2149, pruned_loss=0.03456, over 4883.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02858, over 973489.40 frames.], batch size: 16, lr: 1.28e-04 +2022-05-09 05:01:59,339 INFO [train.py:715] (7/8) Epoch 17, batch 32800, loss[loss=0.1504, simple_loss=0.2229, pruned_loss=0.03893, over 4738.00 frames.], tot_loss[loss=0.1317, simple_loss=0.206, pruned_loss=0.02868, over 973690.92 frames.], batch size: 16, lr: 1.28e-04 +2022-05-09 05:02:38,978 INFO [train.py:715] (7/8) Epoch 17, batch 32850, loss[loss=0.1144, simple_loss=0.1841, pruned_loss=0.02235, over 4941.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02894, over 972837.18 frames.], batch size: 29, lr: 1.28e-04 +2022-05-09 05:03:18,521 INFO [train.py:715] (7/8) Epoch 17, batch 32900, loss[loss=0.1397, simple_loss=0.2049, pruned_loss=0.03724, over 4768.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2058, pruned_loss=0.02867, over 973343.25 frames.], batch size: 14, lr: 1.28e-04 +2022-05-09 05:03:58,026 INFO [train.py:715] (7/8) Epoch 17, batch 32950, loss[loss=0.1367, simple_loss=0.2047, pruned_loss=0.03435, over 4941.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02898, over 972792.38 frames.], batch size: 35, lr: 1.28e-04 +2022-05-09 05:04:36,967 INFO [train.py:715] (7/8) Epoch 17, batch 33000, loss[loss=0.143, simple_loss=0.221, pruned_loss=0.0325, over 4954.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02905, over 973149.72 frames.], batch size: 15, lr: 1.28e-04 +2022-05-09 05:04:36,968 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 05:04:49,646 INFO [train.py:742] (7/8) Epoch 17, validation: loss=0.1049, simple_loss=0.1881, pruned_loss=0.0108, over 914524.00 frames. +2022-05-09 05:05:28,989 INFO [train.py:715] (7/8) Epoch 17, batch 33050, loss[loss=0.113, simple_loss=0.1904, pruned_loss=0.0178, over 4935.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02876, over 973288.44 frames.], batch size: 35, lr: 1.28e-04 +2022-05-09 05:06:08,150 INFO [train.py:715] (7/8) Epoch 17, batch 33100, loss[loss=0.1437, simple_loss=0.2148, pruned_loss=0.0363, over 4826.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02913, over 972605.10 frames.], batch size: 15, lr: 1.28e-04 +2022-05-09 05:06:47,451 INFO [train.py:715] (7/8) Epoch 17, batch 33150, loss[loss=0.1453, simple_loss=0.2177, pruned_loss=0.03648, over 4767.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02917, over 971817.64 frames.], batch size: 14, lr: 1.28e-04 +2022-05-09 05:07:27,183 INFO [train.py:715] (7/8) Epoch 17, batch 33200, loss[loss=0.1372, simple_loss=0.2131, pruned_loss=0.03068, over 4930.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02922, over 971813.13 frames.], batch size: 35, lr: 1.28e-04 +2022-05-09 05:08:06,795 INFO [train.py:715] (7/8) Epoch 17, batch 33250, loss[loss=0.1642, simple_loss=0.239, pruned_loss=0.04473, over 4995.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2078, pruned_loss=0.02926, over 972202.24 frames.], batch size: 16, lr: 1.28e-04 +2022-05-09 05:08:46,108 INFO [train.py:715] (7/8) Epoch 17, batch 33300, loss[loss=0.1095, simple_loss=0.1818, pruned_loss=0.01856, over 4856.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2092, pruned_loss=0.03014, over 972109.38 frames.], batch size: 20, lr: 1.28e-04 +2022-05-09 05:09:25,685 INFO [train.py:715] (7/8) Epoch 17, batch 33350, loss[loss=0.1135, simple_loss=0.1921, pruned_loss=0.01746, over 4819.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2089, pruned_loss=0.02988, over 972551.80 frames.], batch size: 12, lr: 1.28e-04 +2022-05-09 05:10:05,485 INFO [train.py:715] (7/8) Epoch 17, batch 33400, loss[loss=0.1076, simple_loss=0.1795, pruned_loss=0.01781, over 4845.00 frames.], tot_loss[loss=0.1346, simple_loss=0.2091, pruned_loss=0.03009, over 972877.05 frames.], batch size: 32, lr: 1.28e-04 +2022-05-09 05:10:44,827 INFO [train.py:715] (7/8) Epoch 17, batch 33450, loss[loss=0.1406, simple_loss=0.2097, pruned_loss=0.03574, over 4971.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2082, pruned_loss=0.02952, over 972969.94 frames.], batch size: 31, lr: 1.28e-04 +2022-05-09 05:11:24,376 INFO [train.py:715] (7/8) Epoch 17, batch 33500, loss[loss=0.1457, simple_loss=0.2289, pruned_loss=0.03124, over 4899.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2084, pruned_loss=0.0299, over 973001.23 frames.], batch size: 17, lr: 1.28e-04 +2022-05-09 05:12:04,586 INFO [train.py:715] (7/8) Epoch 17, batch 33550, loss[loss=0.1211, simple_loss=0.1917, pruned_loss=0.02522, over 4803.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02967, over 973094.19 frames.], batch size: 21, lr: 1.28e-04 +2022-05-09 05:12:44,746 INFO [train.py:715] (7/8) Epoch 17, batch 33600, loss[loss=0.1474, simple_loss=0.209, pruned_loss=0.04293, over 4828.00 frames.], tot_loss[loss=0.133, simple_loss=0.207, pruned_loss=0.02951, over 972046.43 frames.], batch size: 30, lr: 1.28e-04 +2022-05-09 05:13:23,724 INFO [train.py:715] (7/8) Epoch 17, batch 33650, loss[loss=0.1414, simple_loss=0.2031, pruned_loss=0.03982, over 4849.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02932, over 972122.74 frames.], batch size: 30, lr: 1.28e-04 +2022-05-09 05:14:03,361 INFO [train.py:715] (7/8) Epoch 17, batch 33700, loss[loss=0.1279, simple_loss=0.2055, pruned_loss=0.02512, over 4820.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02938, over 971570.14 frames.], batch size: 27, lr: 1.28e-04 +2022-05-09 05:14:42,579 INFO [train.py:715] (7/8) Epoch 17, batch 33750, loss[loss=0.1119, simple_loss=0.1862, pruned_loss=0.0188, over 4757.00 frames.], tot_loss[loss=0.1323, simple_loss=0.207, pruned_loss=0.02879, over 971612.70 frames.], batch size: 12, lr: 1.28e-04 +2022-05-09 05:15:21,394 INFO [train.py:715] (7/8) Epoch 17, batch 33800, loss[loss=0.1244, simple_loss=0.1894, pruned_loss=0.02968, over 4824.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02888, over 972357.75 frames.], batch size: 13, lr: 1.28e-04 +2022-05-09 05:16:01,531 INFO [train.py:715] (7/8) Epoch 17, batch 33850, loss[loss=0.1253, simple_loss=0.1965, pruned_loss=0.02711, over 4816.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02895, over 972321.13 frames.], batch size: 27, lr: 1.28e-04 +2022-05-09 05:16:41,837 INFO [train.py:715] (7/8) Epoch 17, batch 33900, loss[loss=0.1103, simple_loss=0.1802, pruned_loss=0.02016, over 4923.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02927, over 972023.21 frames.], batch size: 29, lr: 1.28e-04 +2022-05-09 05:17:21,089 INFO [train.py:715] (7/8) Epoch 17, batch 33950, loss[loss=0.1255, simple_loss=0.2047, pruned_loss=0.02316, over 4929.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02893, over 973151.02 frames.], batch size: 18, lr: 1.28e-04 +2022-05-09 05:18:00,094 INFO [train.py:715] (7/8) Epoch 17, batch 34000, loss[loss=0.1349, simple_loss=0.1988, pruned_loss=0.03555, over 4924.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02901, over 973245.44 frames.], batch size: 23, lr: 1.28e-04 +2022-05-09 05:18:39,513 INFO [train.py:715] (7/8) Epoch 17, batch 34050, loss[loss=0.1391, simple_loss=0.2194, pruned_loss=0.02942, over 4813.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02893, over 973641.82 frames.], batch size: 21, lr: 1.28e-04 +2022-05-09 05:19:19,505 INFO [train.py:715] (7/8) Epoch 17, batch 34100, loss[loss=0.1329, simple_loss=0.2092, pruned_loss=0.02829, over 4738.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02864, over 972539.34 frames.], batch size: 16, lr: 1.28e-04 +2022-05-09 05:19:58,308 INFO [train.py:715] (7/8) Epoch 17, batch 34150, loss[loss=0.1173, simple_loss=0.188, pruned_loss=0.0233, over 4941.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02923, over 972132.63 frames.], batch size: 23, lr: 1.28e-04 +2022-05-09 05:20:37,452 INFO [train.py:715] (7/8) Epoch 17, batch 34200, loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02853, over 4796.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02931, over 971597.01 frames.], batch size: 14, lr: 1.28e-04 +2022-05-09 05:21:16,562 INFO [train.py:715] (7/8) Epoch 17, batch 34250, loss[loss=0.149, simple_loss=0.2164, pruned_loss=0.0408, over 4952.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02911, over 971108.37 frames.], batch size: 14, lr: 1.28e-04 +2022-05-09 05:21:55,282 INFO [train.py:715] (7/8) Epoch 17, batch 34300, loss[loss=0.1227, simple_loss=0.1952, pruned_loss=0.02511, over 4828.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02946, over 971171.07 frames.], batch size: 30, lr: 1.28e-04 +2022-05-09 05:22:34,167 INFO [train.py:715] (7/8) Epoch 17, batch 34350, loss[loss=0.162, simple_loss=0.2299, pruned_loss=0.0471, over 4966.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02913, over 971923.66 frames.], batch size: 39, lr: 1.28e-04 +2022-05-09 05:23:13,526 INFO [train.py:715] (7/8) Epoch 17, batch 34400, loss[loss=0.1108, simple_loss=0.1795, pruned_loss=0.0211, over 4958.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02926, over 971818.81 frames.], batch size: 14, lr: 1.28e-04 +2022-05-09 05:23:52,516 INFO [train.py:715] (7/8) Epoch 17, batch 34450, loss[loss=0.1423, simple_loss=0.2106, pruned_loss=0.03698, over 4884.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02941, over 971929.66 frames.], batch size: 16, lr: 1.28e-04 +2022-05-09 05:24:30,971 INFO [train.py:715] (7/8) Epoch 17, batch 34500, loss[loss=0.1146, simple_loss=0.1843, pruned_loss=0.02246, over 4771.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02935, over 972208.31 frames.], batch size: 19, lr: 1.28e-04 +2022-05-09 05:25:09,844 INFO [train.py:715] (7/8) Epoch 17, batch 34550, loss[loss=0.1588, simple_loss=0.2293, pruned_loss=0.04416, over 4934.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.02908, over 971708.13 frames.], batch size: 21, lr: 1.28e-04 +2022-05-09 05:25:48,997 INFO [train.py:715] (7/8) Epoch 17, batch 34600, loss[loss=0.1252, simple_loss=0.1914, pruned_loss=0.02955, over 4835.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2073, pruned_loss=0.0291, over 971216.44 frames.], batch size: 13, lr: 1.28e-04 +2022-05-09 05:26:27,697 INFO [train.py:715] (7/8) Epoch 17, batch 34650, loss[loss=0.1464, simple_loss=0.2148, pruned_loss=0.03905, over 4976.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02918, over 972602.02 frames.], batch size: 31, lr: 1.28e-04 +2022-05-09 05:27:06,964 INFO [train.py:715] (7/8) Epoch 17, batch 34700, loss[loss=0.1072, simple_loss=0.1887, pruned_loss=0.01282, over 4770.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02919, over 972522.07 frames.], batch size: 19, lr: 1.28e-04 +2022-05-09 05:27:45,509 INFO [train.py:715] (7/8) Epoch 17, batch 34750, loss[loss=0.1292, simple_loss=0.2034, pruned_loss=0.02751, over 4765.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02881, over 971917.58 frames.], batch size: 14, lr: 1.28e-04 +2022-05-09 05:28:22,200 INFO [train.py:715] (7/8) Epoch 17, batch 34800, loss[loss=0.1129, simple_loss=0.1797, pruned_loss=0.02309, over 4823.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2059, pruned_loss=0.02889, over 971453.49 frames.], batch size: 12, lr: 1.28e-04 +2022-05-09 05:29:12,359 INFO [train.py:715] (7/8) Epoch 18, batch 0, loss[loss=0.1343, simple_loss=0.2169, pruned_loss=0.02588, over 4761.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2169, pruned_loss=0.02588, over 4761.00 frames.], batch size: 19, lr: 1.25e-04 +2022-05-09 05:29:51,057 INFO [train.py:715] (7/8) Epoch 18, batch 50, loss[loss=0.1146, simple_loss=0.1933, pruned_loss=0.01792, over 4966.00 frames.], tot_loss[loss=0.1326, simple_loss=0.208, pruned_loss=0.02855, over 218666.21 frames.], batch size: 14, lr: 1.25e-04 +2022-05-09 05:30:31,047 INFO [train.py:715] (7/8) Epoch 18, batch 100, loss[loss=0.1416, simple_loss=0.2113, pruned_loss=0.03598, over 4762.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02877, over 385174.66 frames.], batch size: 18, lr: 1.25e-04 +2022-05-09 05:31:10,960 INFO [train.py:715] (7/8) Epoch 18, batch 150, loss[loss=0.1443, simple_loss=0.2063, pruned_loss=0.04109, over 4788.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2069, pruned_loss=0.02994, over 515376.03 frames.], batch size: 17, lr: 1.25e-04 +2022-05-09 05:31:50,259 INFO [train.py:715] (7/8) Epoch 18, batch 200, loss[loss=0.1288, simple_loss=0.2032, pruned_loss=0.02725, over 4962.00 frames.], tot_loss[loss=0.1342, simple_loss=0.2077, pruned_loss=0.0304, over 617247.62 frames.], batch size: 15, lr: 1.25e-04 +2022-05-09 05:32:29,111 INFO [train.py:715] (7/8) Epoch 18, batch 250, loss[loss=0.1528, simple_loss=0.2152, pruned_loss=0.04514, over 4876.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2072, pruned_loss=0.03009, over 695971.35 frames.], batch size: 38, lr: 1.25e-04 +2022-05-09 05:33:08,565 INFO [train.py:715] (7/8) Epoch 18, batch 300, loss[loss=0.1019, simple_loss=0.18, pruned_loss=0.01189, over 4861.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03006, over 757422.81 frames.], batch size: 20, lr: 1.25e-04 +2022-05-09 05:33:48,415 INFO [train.py:715] (7/8) Epoch 18, batch 350, loss[loss=0.1993, simple_loss=0.2848, pruned_loss=0.05687, over 4942.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2074, pruned_loss=0.02961, over 804818.64 frames.], batch size: 21, lr: 1.25e-04 +2022-05-09 05:34:27,358 INFO [train.py:715] (7/8) Epoch 18, batch 400, loss[loss=0.1428, simple_loss=0.2205, pruned_loss=0.03249, over 4790.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02939, over 842238.99 frames.], batch size: 24, lr: 1.25e-04 +2022-05-09 05:35:07,147 INFO [train.py:715] (7/8) Epoch 18, batch 450, loss[loss=0.1234, simple_loss=0.1935, pruned_loss=0.02662, over 4904.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2076, pruned_loss=0.02934, over 870703.24 frames.], batch size: 18, lr: 1.25e-04 +2022-05-09 05:35:47,328 INFO [train.py:715] (7/8) Epoch 18, batch 500, loss[loss=0.1464, simple_loss=0.2176, pruned_loss=0.03762, over 4911.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.0291, over 893602.37 frames.], batch size: 18, lr: 1.25e-04 +2022-05-09 05:36:27,092 INFO [train.py:715] (7/8) Epoch 18, batch 550, loss[loss=0.1275, simple_loss=0.2073, pruned_loss=0.02379, over 4959.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02883, over 911441.80 frames.], batch size: 39, lr: 1.25e-04 +2022-05-09 05:37:06,106 INFO [train.py:715] (7/8) Epoch 18, batch 600, loss[loss=0.1364, simple_loss=0.2103, pruned_loss=0.03124, over 4902.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02873, over 925370.33 frames.], batch size: 17, lr: 1.25e-04 +2022-05-09 05:37:45,640 INFO [train.py:715] (7/8) Epoch 18, batch 650, loss[loss=0.1358, simple_loss=0.209, pruned_loss=0.03131, over 4827.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02897, over 936072.93 frames.], batch size: 30, lr: 1.25e-04 +2022-05-09 05:38:25,477 INFO [train.py:715] (7/8) Epoch 18, batch 700, loss[loss=0.1116, simple_loss=0.1883, pruned_loss=0.01742, over 4863.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02895, over 943384.17 frames.], batch size: 16, lr: 1.25e-04 +2022-05-09 05:39:04,430 INFO [train.py:715] (7/8) Epoch 18, batch 750, loss[loss=0.1315, simple_loss=0.2023, pruned_loss=0.03032, over 4789.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02943, over 949446.88 frames.], batch size: 13, lr: 1.25e-04 +2022-05-09 05:39:43,256 INFO [train.py:715] (7/8) Epoch 18, batch 800, loss[loss=0.1065, simple_loss=0.1805, pruned_loss=0.01629, over 4797.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.02958, over 955087.27 frames.], batch size: 12, lr: 1.25e-04 +2022-05-09 05:40:22,749 INFO [train.py:715] (7/8) Epoch 18, batch 850, loss[loss=0.1173, simple_loss=0.1991, pruned_loss=0.01772, over 4913.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02947, over 958831.69 frames.], batch size: 29, lr: 1.25e-04 +2022-05-09 05:41:02,300 INFO [train.py:715] (7/8) Epoch 18, batch 900, loss[loss=0.1324, simple_loss=0.1927, pruned_loss=0.03605, over 4965.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02972, over 962137.52 frames.], batch size: 35, lr: 1.25e-04 +2022-05-09 05:41:41,279 INFO [train.py:715] (7/8) Epoch 18, batch 950, loss[loss=0.1575, simple_loss=0.2331, pruned_loss=0.04095, over 4937.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.0293, over 964926.49 frames.], batch size: 21, lr: 1.25e-04 +2022-05-09 05:42:20,889 INFO [train.py:715] (7/8) Epoch 18, batch 1000, loss[loss=0.1542, simple_loss=0.2287, pruned_loss=0.0399, over 4755.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2076, pruned_loss=0.02998, over 966337.58 frames.], batch size: 16, lr: 1.25e-04 +2022-05-09 05:43:00,528 INFO [train.py:715] (7/8) Epoch 18, batch 1050, loss[loss=0.1362, simple_loss=0.2157, pruned_loss=0.02838, over 4821.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02979, over 968059.33 frames.], batch size: 27, lr: 1.25e-04 +2022-05-09 05:43:39,932 INFO [train.py:715] (7/8) Epoch 18, batch 1100, loss[loss=0.1241, simple_loss=0.2041, pruned_loss=0.02207, over 4856.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2069, pruned_loss=0.02984, over 968286.99 frames.], batch size: 20, lr: 1.25e-04 +2022-05-09 05:44:18,726 INFO [train.py:715] (7/8) Epoch 18, batch 1150, loss[loss=0.1379, simple_loss=0.2109, pruned_loss=0.0325, over 4972.00 frames.], tot_loss[loss=0.133, simple_loss=0.2067, pruned_loss=0.02969, over 969244.31 frames.], batch size: 35, lr: 1.25e-04 +2022-05-09 05:44:58,551 INFO [train.py:715] (7/8) Epoch 18, batch 1200, loss[loss=0.1308, simple_loss=0.1934, pruned_loss=0.03407, over 4777.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2073, pruned_loss=0.03, over 970157.45 frames.], batch size: 14, lr: 1.25e-04 +2022-05-09 05:45:38,522 INFO [train.py:715] (7/8) Epoch 18, batch 1250, loss[loss=0.1463, simple_loss=0.217, pruned_loss=0.0378, over 4839.00 frames.], tot_loss[loss=0.134, simple_loss=0.2075, pruned_loss=0.03022, over 969915.02 frames.], batch size: 27, lr: 1.25e-04 +2022-05-09 05:46:17,552 INFO [train.py:715] (7/8) Epoch 18, batch 1300, loss[loss=0.1407, simple_loss=0.2151, pruned_loss=0.03312, over 4773.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2071, pruned_loss=0.03003, over 970733.66 frames.], batch size: 18, lr: 1.25e-04 +2022-05-09 05:46:56,372 INFO [train.py:715] (7/8) Epoch 18, batch 1350, loss[loss=0.1436, simple_loss=0.2123, pruned_loss=0.0374, over 4931.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2066, pruned_loss=0.03, over 971217.86 frames.], batch size: 23, lr: 1.25e-04 +2022-05-09 05:47:35,781 INFO [train.py:715] (7/8) Epoch 18, batch 1400, loss[loss=0.1419, simple_loss=0.2155, pruned_loss=0.03412, over 4697.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2062, pruned_loss=0.02956, over 970838.21 frames.], batch size: 15, lr: 1.25e-04 +2022-05-09 05:48:15,007 INFO [train.py:715] (7/8) Epoch 18, batch 1450, loss[loss=0.1605, simple_loss=0.2388, pruned_loss=0.04113, over 4866.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2069, pruned_loss=0.02972, over 971503.81 frames.], batch size: 20, lr: 1.25e-04 +2022-05-09 05:48:53,403 INFO [train.py:715] (7/8) Epoch 18, batch 1500, loss[loss=0.1585, simple_loss=0.2274, pruned_loss=0.04481, over 4880.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02928, over 970975.85 frames.], batch size: 16, lr: 1.25e-04 +2022-05-09 05:49:32,906 INFO [train.py:715] (7/8) Epoch 18, batch 1550, loss[loss=0.1029, simple_loss=0.1715, pruned_loss=0.01717, over 4985.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.0295, over 971141.08 frames.], batch size: 28, lr: 1.25e-04 +2022-05-09 05:50:12,322 INFO [train.py:715] (7/8) Epoch 18, batch 1600, loss[loss=0.1213, simple_loss=0.1942, pruned_loss=0.02421, over 4907.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02939, over 972077.93 frames.], batch size: 19, lr: 1.25e-04 +2022-05-09 05:50:51,520 INFO [train.py:715] (7/8) Epoch 18, batch 1650, loss[loss=0.1124, simple_loss=0.1969, pruned_loss=0.01397, over 4897.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02896, over 971491.39 frames.], batch size: 19, lr: 1.25e-04 +2022-05-09 05:51:30,467 INFO [train.py:715] (7/8) Epoch 18, batch 1700, loss[loss=0.1595, simple_loss=0.2389, pruned_loss=0.04006, over 4874.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.029, over 972811.81 frames.], batch size: 22, lr: 1.25e-04 +2022-05-09 05:52:09,884 INFO [train.py:715] (7/8) Epoch 18, batch 1750, loss[loss=0.1305, simple_loss=0.1975, pruned_loss=0.03172, over 4793.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02928, over 972559.85 frames.], batch size: 18, lr: 1.25e-04 +2022-05-09 05:52:49,172 INFO [train.py:715] (7/8) Epoch 18, batch 1800, loss[loss=0.1339, simple_loss=0.2016, pruned_loss=0.03312, over 4831.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02919, over 971650.18 frames.], batch size: 12, lr: 1.25e-04 +2022-05-09 05:53:27,456 INFO [train.py:715] (7/8) Epoch 18, batch 1850, loss[loss=0.1296, simple_loss=0.2147, pruned_loss=0.02224, over 4877.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02929, over 971646.66 frames.], batch size: 38, lr: 1.25e-04 +2022-05-09 05:54:06,245 INFO [train.py:715] (7/8) Epoch 18, batch 1900, loss[loss=0.1449, simple_loss=0.2099, pruned_loss=0.03998, over 4988.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02974, over 972119.74 frames.], batch size: 14, lr: 1.25e-04 +2022-05-09 05:54:45,623 INFO [train.py:715] (7/8) Epoch 18, batch 1950, loss[loss=0.1203, simple_loss=0.2007, pruned_loss=0.01993, over 4975.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2069, pruned_loss=0.02967, over 972799.09 frames.], batch size: 24, lr: 1.25e-04 +2022-05-09 05:55:24,355 INFO [train.py:715] (7/8) Epoch 18, batch 2000, loss[loss=0.1497, simple_loss=0.2248, pruned_loss=0.03733, over 4884.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02983, over 972821.88 frames.], batch size: 22, lr: 1.25e-04 +2022-05-09 05:56:02,843 INFO [train.py:715] (7/8) Epoch 18, batch 2050, loss[loss=0.1149, simple_loss=0.1919, pruned_loss=0.01897, over 4805.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.0298, over 973043.58 frames.], batch size: 24, lr: 1.25e-04 +2022-05-09 05:56:42,081 INFO [train.py:715] (7/8) Epoch 18, batch 2100, loss[loss=0.1246, simple_loss=0.1995, pruned_loss=0.02488, over 4779.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2075, pruned_loss=0.02973, over 972020.81 frames.], batch size: 18, lr: 1.25e-04 +2022-05-09 05:57:21,529 INFO [train.py:715] (7/8) Epoch 18, batch 2150, loss[loss=0.1308, simple_loss=0.1903, pruned_loss=0.03559, over 4797.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02934, over 972288.76 frames.], batch size: 14, lr: 1.25e-04 +2022-05-09 05:57:59,833 INFO [train.py:715] (7/8) Epoch 18, batch 2200, loss[loss=0.1562, simple_loss=0.2344, pruned_loss=0.03899, over 4873.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02959, over 971919.49 frames.], batch size: 22, lr: 1.25e-04 +2022-05-09 05:58:39,481 INFO [train.py:715] (7/8) Epoch 18, batch 2250, loss[loss=0.1417, simple_loss=0.2172, pruned_loss=0.03306, over 4735.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2084, pruned_loss=0.02986, over 971451.25 frames.], batch size: 16, lr: 1.25e-04 +2022-05-09 05:59:18,831 INFO [train.py:715] (7/8) Epoch 18, batch 2300, loss[loss=0.1441, simple_loss=0.2211, pruned_loss=0.03352, over 4705.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2081, pruned_loss=0.03022, over 971900.12 frames.], batch size: 15, lr: 1.25e-04 +2022-05-09 05:59:57,620 INFO [train.py:715] (7/8) Epoch 18, batch 2350, loss[loss=0.1227, simple_loss=0.1949, pruned_loss=0.0252, over 4793.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02944, over 972067.01 frames.], batch size: 21, lr: 1.25e-04 +2022-05-09 06:00:36,231 INFO [train.py:715] (7/8) Epoch 18, batch 2400, loss[loss=0.1617, simple_loss=0.2213, pruned_loss=0.05099, over 4964.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02936, over 972774.37 frames.], batch size: 15, lr: 1.25e-04 +2022-05-09 06:01:15,694 INFO [train.py:715] (7/8) Epoch 18, batch 2450, loss[loss=0.1132, simple_loss=0.1875, pruned_loss=0.0194, over 4819.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.02974, over 971942.47 frames.], batch size: 25, lr: 1.25e-04 +2022-05-09 06:01:55,089 INFO [train.py:715] (7/8) Epoch 18, batch 2500, loss[loss=0.1496, simple_loss=0.2273, pruned_loss=0.03602, over 4881.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2075, pruned_loss=0.02952, over 971779.50 frames.], batch size: 19, lr: 1.25e-04 +2022-05-09 06:02:33,096 INFO [train.py:715] (7/8) Epoch 18, batch 2550, loss[loss=0.111, simple_loss=0.1861, pruned_loss=0.01792, over 4828.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02898, over 972006.16 frames.], batch size: 26, lr: 1.25e-04 +2022-05-09 06:03:11,868 INFO [train.py:715] (7/8) Epoch 18, batch 2600, loss[loss=0.1043, simple_loss=0.1801, pruned_loss=0.01421, over 4930.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02872, over 972126.25 frames.], batch size: 23, lr: 1.25e-04 +2022-05-09 06:03:51,788 INFO [train.py:715] (7/8) Epoch 18, batch 2650, loss[loss=0.149, simple_loss=0.2132, pruned_loss=0.04236, over 4903.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02875, over 972161.91 frames.], batch size: 19, lr: 1.25e-04 +2022-05-09 06:04:30,528 INFO [train.py:715] (7/8) Epoch 18, batch 2700, loss[loss=0.1447, simple_loss=0.2156, pruned_loss=0.0369, over 4963.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2057, pruned_loss=0.02855, over 972236.26 frames.], batch size: 35, lr: 1.25e-04 +2022-05-09 06:05:08,888 INFO [train.py:715] (7/8) Epoch 18, batch 2750, loss[loss=0.1357, simple_loss=0.2179, pruned_loss=0.0268, over 4876.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2053, pruned_loss=0.02828, over 971969.50 frames.], batch size: 17, lr: 1.25e-04 +2022-05-09 06:05:47,974 INFO [train.py:715] (7/8) Epoch 18, batch 2800, loss[loss=0.1304, simple_loss=0.196, pruned_loss=0.03241, over 4785.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.0286, over 972323.44 frames.], batch size: 17, lr: 1.25e-04 +2022-05-09 06:06:27,520 INFO [train.py:715] (7/8) Epoch 18, batch 2850, loss[loss=0.1359, simple_loss=0.214, pruned_loss=0.02889, over 4829.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.0285, over 972598.33 frames.], batch size: 26, lr: 1.25e-04 +2022-05-09 06:07:06,095 INFO [train.py:715] (7/8) Epoch 18, batch 2900, loss[loss=0.1428, simple_loss=0.2221, pruned_loss=0.0318, over 4977.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.0287, over 972934.38 frames.], batch size: 14, lr: 1.25e-04 +2022-05-09 06:07:44,920 INFO [train.py:715] (7/8) Epoch 18, batch 2950, loss[loss=0.1208, simple_loss=0.1989, pruned_loss=0.02129, over 4793.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02883, over 972918.21 frames.], batch size: 24, lr: 1.25e-04 +2022-05-09 06:08:24,285 INFO [train.py:715] (7/8) Epoch 18, batch 3000, loss[loss=0.1549, simple_loss=0.2342, pruned_loss=0.03776, over 4811.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2065, pruned_loss=0.0285, over 973102.26 frames.], batch size: 26, lr: 1.25e-04 +2022-05-09 06:08:24,286 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 06:08:34,097 INFO [train.py:742] (7/8) Epoch 18, validation: loss=0.1047, simple_loss=0.1881, pruned_loss=0.01065, over 914524.00 frames. +2022-05-09 06:09:14,109 INFO [train.py:715] (7/8) Epoch 18, batch 3050, loss[loss=0.1461, simple_loss=0.2221, pruned_loss=0.03511, over 4815.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2063, pruned_loss=0.02819, over 972691.19 frames.], batch size: 25, lr: 1.25e-04 +2022-05-09 06:09:52,624 INFO [train.py:715] (7/8) Epoch 18, batch 3100, loss[loss=0.119, simple_loss=0.2019, pruned_loss=0.018, over 4963.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2061, pruned_loss=0.02809, over 972553.03 frames.], batch size: 24, lr: 1.25e-04 +2022-05-09 06:10:31,514 INFO [train.py:715] (7/8) Epoch 18, batch 3150, loss[loss=0.1214, simple_loss=0.1909, pruned_loss=0.02593, over 4836.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2052, pruned_loss=0.02783, over 972259.18 frames.], batch size: 15, lr: 1.25e-04 +2022-05-09 06:11:10,548 INFO [train.py:715] (7/8) Epoch 18, batch 3200, loss[loss=0.1425, simple_loss=0.2139, pruned_loss=0.03549, over 4970.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02829, over 971771.87 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 06:11:50,036 INFO [train.py:715] (7/8) Epoch 18, batch 3250, loss[loss=0.115, simple_loss=0.1852, pruned_loss=0.02243, over 4756.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02862, over 972728.10 frames.], batch size: 12, lr: 1.24e-04 +2022-05-09 06:12:28,196 INFO [train.py:715] (7/8) Epoch 18, batch 3300, loss[loss=0.1073, simple_loss=0.1836, pruned_loss=0.01547, over 4965.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02833, over 972572.62 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:13:07,653 INFO [train.py:715] (7/8) Epoch 18, batch 3350, loss[loss=0.1641, simple_loss=0.2347, pruned_loss=0.04679, over 4941.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2075, pruned_loss=0.02885, over 971766.16 frames.], batch size: 39, lr: 1.24e-04 +2022-05-09 06:13:47,790 INFO [train.py:715] (7/8) Epoch 18, batch 3400, loss[loss=0.1153, simple_loss=0.1981, pruned_loss=0.01628, over 4773.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2068, pruned_loss=0.02844, over 971943.25 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 06:14:26,395 INFO [train.py:715] (7/8) Epoch 18, batch 3450, loss[loss=0.142, simple_loss=0.2272, pruned_loss=0.02836, over 4840.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2063, pruned_loss=0.02843, over 972222.74 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:15:05,251 INFO [train.py:715] (7/8) Epoch 18, batch 3500, loss[loss=0.1284, simple_loss=0.2006, pruned_loss=0.02816, over 4984.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2071, pruned_loss=0.02877, over 972940.12 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:15:45,334 INFO [train.py:715] (7/8) Epoch 18, batch 3550, loss[loss=0.1075, simple_loss=0.1836, pruned_loss=0.0157, over 4764.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2061, pruned_loss=0.02822, over 972459.34 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 06:16:24,512 INFO [train.py:715] (7/8) Epoch 18, batch 3600, loss[loss=0.1693, simple_loss=0.2502, pruned_loss=0.04416, over 4939.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02827, over 973475.62 frames.], batch size: 29, lr: 1.24e-04 +2022-05-09 06:17:03,260 INFO [train.py:715] (7/8) Epoch 18, batch 3650, loss[loss=0.156, simple_loss=0.2229, pruned_loss=0.04452, over 4856.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02858, over 973101.45 frames.], batch size: 20, lr: 1.24e-04 +2022-05-09 06:17:42,727 INFO [train.py:715] (7/8) Epoch 18, batch 3700, loss[loss=0.1211, simple_loss=0.2095, pruned_loss=0.01633, over 4906.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02868, over 973333.23 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 06:18:22,000 INFO [train.py:715] (7/8) Epoch 18, batch 3750, loss[loss=0.1181, simple_loss=0.1915, pruned_loss=0.02238, over 4838.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02843, over 972857.10 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 06:18:59,953 INFO [train.py:715] (7/8) Epoch 18, batch 3800, loss[loss=0.1341, simple_loss=0.2074, pruned_loss=0.03039, over 4828.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02833, over 972923.81 frames.], batch size: 27, lr: 1.24e-04 +2022-05-09 06:19:39,331 INFO [train.py:715] (7/8) Epoch 18, batch 3850, loss[loss=0.1214, simple_loss=0.1869, pruned_loss=0.02795, over 4846.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2067, pruned_loss=0.02903, over 972825.81 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 06:20:19,352 INFO [train.py:715] (7/8) Epoch 18, batch 3900, loss[loss=0.1106, simple_loss=0.1789, pruned_loss=0.02116, over 4983.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02892, over 973122.93 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 06:20:57,827 INFO [train.py:715] (7/8) Epoch 18, batch 3950, loss[loss=0.114, simple_loss=0.1876, pruned_loss=0.02019, over 4942.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02868, over 973185.17 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:21:37,236 INFO [train.py:715] (7/8) Epoch 18, batch 4000, loss[loss=0.1312, simple_loss=0.2142, pruned_loss=0.02412, over 4688.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02887, over 972788.73 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:22:16,733 INFO [train.py:715] (7/8) Epoch 18, batch 4050, loss[loss=0.1143, simple_loss=0.1776, pruned_loss=0.02547, over 4768.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02841, over 972259.29 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:22:56,010 INFO [train.py:715] (7/8) Epoch 18, batch 4100, loss[loss=0.1354, simple_loss=0.1998, pruned_loss=0.03547, over 4787.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2067, pruned_loss=0.02845, over 971625.56 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 06:23:34,762 INFO [train.py:715] (7/8) Epoch 18, batch 4150, loss[loss=0.123, simple_loss=0.1964, pruned_loss=0.02486, over 4942.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2077, pruned_loss=0.02902, over 972352.97 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 06:24:14,200 INFO [train.py:715] (7/8) Epoch 18, batch 4200, loss[loss=0.1423, simple_loss=0.2105, pruned_loss=0.03704, over 4834.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2066, pruned_loss=0.02844, over 972782.16 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 06:24:53,580 INFO [train.py:715] (7/8) Epoch 18, batch 4250, loss[loss=0.1486, simple_loss=0.2232, pruned_loss=0.03702, over 4842.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2073, pruned_loss=0.0288, over 970963.05 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:25:32,489 INFO [train.py:715] (7/8) Epoch 18, batch 4300, loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.0286, over 4772.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02834, over 970841.32 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:26:12,601 INFO [train.py:715] (7/8) Epoch 18, batch 4350, loss[loss=0.1356, simple_loss=0.2102, pruned_loss=0.03047, over 4821.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02855, over 970602.26 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 06:26:52,062 INFO [train.py:715] (7/8) Epoch 18, batch 4400, loss[loss=0.1222, simple_loss=0.2006, pruned_loss=0.02195, over 4903.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02888, over 971067.60 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:27:31,552 INFO [train.py:715] (7/8) Epoch 18, batch 4450, loss[loss=0.1155, simple_loss=0.2057, pruned_loss=0.0127, over 4815.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02859, over 972245.58 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:28:09,900 INFO [train.py:715] (7/8) Epoch 18, batch 4500, loss[loss=0.1259, simple_loss=0.2034, pruned_loss=0.02419, over 4848.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2064, pruned_loss=0.02831, over 971433.09 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:28:49,169 INFO [train.py:715] (7/8) Epoch 18, batch 4550, loss[loss=0.1666, simple_loss=0.2267, pruned_loss=0.05326, over 4825.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02862, over 971474.06 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 06:29:29,013 INFO [train.py:715] (7/8) Epoch 18, batch 4600, loss[loss=0.1799, simple_loss=0.2482, pruned_loss=0.05579, over 4749.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02882, over 971665.27 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 06:30:07,896 INFO [train.py:715] (7/8) Epoch 18, batch 4650, loss[loss=0.1422, simple_loss=0.2229, pruned_loss=0.03072, over 4933.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.0286, over 971766.47 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 06:30:47,013 INFO [train.py:715] (7/8) Epoch 18, batch 4700, loss[loss=0.1096, simple_loss=0.1845, pruned_loss=0.01732, over 4774.00 frames.], tot_loss[loss=0.131, simple_loss=0.2058, pruned_loss=0.02812, over 971401.21 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 06:31:26,066 INFO [train.py:715] (7/8) Epoch 18, batch 4750, loss[loss=0.1145, simple_loss=0.1981, pruned_loss=0.01545, over 4806.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02845, over 972134.45 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 06:32:06,183 INFO [train.py:715] (7/8) Epoch 18, batch 4800, loss[loss=0.1419, simple_loss=0.217, pruned_loss=0.0334, over 4965.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02845, over 972805.13 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 06:32:44,918 INFO [train.py:715] (7/8) Epoch 18, batch 4850, loss[loss=0.143, simple_loss=0.2159, pruned_loss=0.03503, over 4933.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02845, over 972353.94 frames.], batch size: 29, lr: 1.24e-04 +2022-05-09 06:33:24,376 INFO [train.py:715] (7/8) Epoch 18, batch 4900, loss[loss=0.1132, simple_loss=0.1856, pruned_loss=0.02042, over 4922.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2073, pruned_loss=0.02926, over 972335.29 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 06:34:04,560 INFO [train.py:715] (7/8) Epoch 18, batch 4950, loss[loss=0.1267, simple_loss=0.2074, pruned_loss=0.02299, over 4643.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02904, over 971838.96 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 06:34:43,675 INFO [train.py:715] (7/8) Epoch 18, batch 5000, loss[loss=0.1686, simple_loss=0.2398, pruned_loss=0.04867, over 4951.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02844, over 972139.16 frames.], batch size: 39, lr: 1.24e-04 +2022-05-09 06:35:22,355 INFO [train.py:715] (7/8) Epoch 18, batch 5050, loss[loss=0.1102, simple_loss=0.177, pruned_loss=0.0217, over 4856.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02818, over 972611.82 frames.], batch size: 20, lr: 1.24e-04 +2022-05-09 06:36:01,527 INFO [train.py:715] (7/8) Epoch 18, batch 5100, loss[loss=0.1046, simple_loss=0.176, pruned_loss=0.01663, over 4936.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2058, pruned_loss=0.02799, over 972586.23 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:36:41,088 INFO [train.py:715] (7/8) Epoch 18, batch 5150, loss[loss=0.1257, simple_loss=0.1995, pruned_loss=0.0259, over 4961.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2052, pruned_loss=0.02797, over 972408.70 frames.], batch size: 29, lr: 1.24e-04 +2022-05-09 06:37:19,652 INFO [train.py:715] (7/8) Epoch 18, batch 5200, loss[loss=0.1557, simple_loss=0.2289, pruned_loss=0.04127, over 4772.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02843, over 971980.39 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:37:59,021 INFO [train.py:715] (7/8) Epoch 18, batch 5250, loss[loss=0.1703, simple_loss=0.2452, pruned_loss=0.04771, over 4905.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02861, over 972074.18 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:38:38,941 INFO [train.py:715] (7/8) Epoch 18, batch 5300, loss[loss=0.1522, simple_loss=0.2214, pruned_loss=0.04147, over 4833.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.02838, over 972569.35 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:39:18,964 INFO [train.py:715] (7/8) Epoch 18, batch 5350, loss[loss=0.1091, simple_loss=0.178, pruned_loss=0.0201, over 4747.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02883, over 972624.57 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 06:39:57,060 INFO [train.py:715] (7/8) Epoch 18, batch 5400, loss[loss=0.1209, simple_loss=0.2019, pruned_loss=0.01996, over 4987.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02913, over 973166.00 frames.], batch size: 28, lr: 1.24e-04 +2022-05-09 06:40:38,716 INFO [train.py:715] (7/8) Epoch 18, batch 5450, loss[loss=0.1517, simple_loss=0.2337, pruned_loss=0.03488, over 4784.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02905, over 973584.92 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:41:19,095 INFO [train.py:715] (7/8) Epoch 18, batch 5500, loss[loss=0.111, simple_loss=0.1911, pruned_loss=0.01543, over 4784.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02893, over 974440.30 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:41:58,080 INFO [train.py:715] (7/8) Epoch 18, batch 5550, loss[loss=0.1188, simple_loss=0.1955, pruned_loss=0.02107, over 4814.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02866, over 974807.28 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:42:36,883 INFO [train.py:715] (7/8) Epoch 18, batch 5600, loss[loss=0.1196, simple_loss=0.1977, pruned_loss=0.02074, over 4962.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02849, over 974496.27 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:43:15,925 INFO [train.py:715] (7/8) Epoch 18, batch 5650, loss[loss=0.1433, simple_loss=0.223, pruned_loss=0.0318, over 4859.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02903, over 973855.64 frames.], batch size: 20, lr: 1.24e-04 +2022-05-09 06:43:55,548 INFO [train.py:715] (7/8) Epoch 18, batch 5700, loss[loss=0.1247, simple_loss=0.2028, pruned_loss=0.02326, over 4829.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02892, over 974354.72 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 06:44:33,668 INFO [train.py:715] (7/8) Epoch 18, batch 5750, loss[loss=0.1223, simple_loss=0.2029, pruned_loss=0.02086, over 4910.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02904, over 974668.23 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:45:12,600 INFO [train.py:715] (7/8) Epoch 18, batch 5800, loss[loss=0.1963, simple_loss=0.2499, pruned_loss=0.07136, over 4897.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02901, over 973795.49 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 06:45:52,340 INFO [train.py:715] (7/8) Epoch 18, batch 5850, loss[loss=0.1359, simple_loss=0.214, pruned_loss=0.02889, over 4921.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2076, pruned_loss=0.02961, over 972609.89 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 06:46:31,449 INFO [train.py:715] (7/8) Epoch 18, batch 5900, loss[loss=0.131, simple_loss=0.1951, pruned_loss=0.03343, over 4865.00 frames.], tot_loss[loss=0.1343, simple_loss=0.2089, pruned_loss=0.02987, over 972597.27 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 06:47:10,143 INFO [train.py:715] (7/8) Epoch 18, batch 5950, loss[loss=0.135, simple_loss=0.2123, pruned_loss=0.02883, over 4702.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2088, pruned_loss=0.02972, over 972242.19 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 06:47:49,549 INFO [train.py:715] (7/8) Epoch 18, batch 6000, loss[loss=0.1594, simple_loss=0.2314, pruned_loss=0.04374, over 4787.00 frames.], tot_loss[loss=0.1341, simple_loss=0.2087, pruned_loss=0.02974, over 972116.16 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 06:47:49,549 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 06:47:59,475 INFO [train.py:742] (7/8) Epoch 18, validation: loss=0.1047, simple_loss=0.188, pruned_loss=0.01075, over 914524.00 frames. +2022-05-09 06:48:39,112 INFO [train.py:715] (7/8) Epoch 18, batch 6050, loss[loss=0.1395, simple_loss=0.2184, pruned_loss=0.03025, over 4920.00 frames.], tot_loss[loss=0.1334, simple_loss=0.208, pruned_loss=0.02933, over 973018.51 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:49:18,286 INFO [train.py:715] (7/8) Epoch 18, batch 6100, loss[loss=0.1419, simple_loss=0.2083, pruned_loss=0.03768, over 4785.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2082, pruned_loss=0.02955, over 972369.26 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 06:49:56,628 INFO [train.py:715] (7/8) Epoch 18, batch 6150, loss[loss=0.111, simple_loss=0.1856, pruned_loss=0.01821, over 4989.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02949, over 972148.65 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 06:50:35,921 INFO [train.py:715] (7/8) Epoch 18, batch 6200, loss[loss=0.113, simple_loss=0.1882, pruned_loss=0.01888, over 4916.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2082, pruned_loss=0.0298, over 972025.78 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:51:15,500 INFO [train.py:715] (7/8) Epoch 18, batch 6250, loss[loss=0.1121, simple_loss=0.1884, pruned_loss=0.01791, over 4758.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2082, pruned_loss=0.02957, over 971595.58 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 06:51:54,531 INFO [train.py:715] (7/8) Epoch 18, batch 6300, loss[loss=0.1157, simple_loss=0.1933, pruned_loss=0.01901, over 4872.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02935, over 972614.00 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 06:52:33,703 INFO [train.py:715] (7/8) Epoch 18, batch 6350, loss[loss=0.1222, simple_loss=0.1974, pruned_loss=0.02348, over 4882.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02901, over 972334.42 frames.], batch size: 39, lr: 1.24e-04 +2022-05-09 06:53:12,893 INFO [train.py:715] (7/8) Epoch 18, batch 6400, loss[loss=0.1642, simple_loss=0.2306, pruned_loss=0.04891, over 4815.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02941, over 972934.47 frames.], batch size: 27, lr: 1.24e-04 +2022-05-09 06:53:52,076 INFO [train.py:715] (7/8) Epoch 18, batch 6450, loss[loss=0.1295, simple_loss=0.1947, pruned_loss=0.03216, over 4803.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2063, pruned_loss=0.02923, over 973285.05 frames.], batch size: 12, lr: 1.24e-04 +2022-05-09 06:54:30,358 INFO [train.py:715] (7/8) Epoch 18, batch 6500, loss[loss=0.1486, simple_loss=0.2225, pruned_loss=0.03741, over 4785.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2067, pruned_loss=0.02921, over 972843.70 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 06:55:08,638 INFO [train.py:715] (7/8) Epoch 18, batch 6550, loss[loss=0.129, simple_loss=0.2091, pruned_loss=0.02441, over 4810.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.0288, over 972552.35 frames.], batch size: 26, lr: 1.24e-04 +2022-05-09 06:55:48,102 INFO [train.py:715] (7/8) Epoch 18, batch 6600, loss[loss=0.1207, simple_loss=0.1931, pruned_loss=0.02418, over 4962.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02899, over 972223.53 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 06:56:27,459 INFO [train.py:715] (7/8) Epoch 18, batch 6650, loss[loss=0.1402, simple_loss=0.2222, pruned_loss=0.02911, over 4761.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02894, over 972796.27 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:57:05,503 INFO [train.py:715] (7/8) Epoch 18, batch 6700, loss[loss=0.1486, simple_loss=0.2311, pruned_loss=0.03307, over 4922.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02919, over 973228.71 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 06:57:44,487 INFO [train.py:715] (7/8) Epoch 18, batch 6750, loss[loss=0.1315, simple_loss=0.2104, pruned_loss=0.02631, over 4989.00 frames.], tot_loss[loss=0.133, simple_loss=0.2074, pruned_loss=0.02928, over 972739.72 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 06:58:23,848 INFO [train.py:715] (7/8) Epoch 18, batch 6800, loss[loss=0.1429, simple_loss=0.2107, pruned_loss=0.03758, over 4858.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02932, over 972778.77 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 06:59:02,554 INFO [train.py:715] (7/8) Epoch 18, batch 6850, loss[loss=0.1154, simple_loss=0.1908, pruned_loss=0.02001, over 4949.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2062, pruned_loss=0.02919, over 972299.89 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 06:59:40,722 INFO [train.py:715] (7/8) Epoch 18, batch 6900, loss[loss=0.1232, simple_loss=0.2053, pruned_loss=0.02054, over 4980.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2065, pruned_loss=0.02934, over 972643.67 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:00:20,318 INFO [train.py:715] (7/8) Epoch 18, batch 6950, loss[loss=0.124, simple_loss=0.2054, pruned_loss=0.02125, over 4978.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02913, over 973150.17 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:00:59,041 INFO [train.py:715] (7/8) Epoch 18, batch 7000, loss[loss=0.1377, simple_loss=0.2144, pruned_loss=0.03047, over 4918.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02904, over 973143.94 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 07:01:37,422 INFO [train.py:715] (7/8) Epoch 18, batch 7050, loss[loss=0.1462, simple_loss=0.2152, pruned_loss=0.03866, over 4691.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02874, over 973392.31 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:02:16,626 INFO [train.py:715] (7/8) Epoch 18, batch 7100, loss[loss=0.1242, simple_loss=0.1973, pruned_loss=0.02555, over 4951.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.02843, over 973958.29 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 07:02:56,203 INFO [train.py:715] (7/8) Epoch 18, batch 7150, loss[loss=0.1446, simple_loss=0.2172, pruned_loss=0.03593, over 4848.00 frames.], tot_loss[loss=0.131, simple_loss=0.2058, pruned_loss=0.02807, over 973531.67 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 07:03:34,831 INFO [train.py:715] (7/8) Epoch 18, batch 7200, loss[loss=0.1316, simple_loss=0.2032, pruned_loss=0.03001, over 4902.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2059, pruned_loss=0.02822, over 973370.93 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 07:04:13,060 INFO [train.py:715] (7/8) Epoch 18, batch 7250, loss[loss=0.1452, simple_loss=0.2135, pruned_loss=0.03844, over 4874.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2059, pruned_loss=0.02822, over 973275.52 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 07:04:52,162 INFO [train.py:715] (7/8) Epoch 18, batch 7300, loss[loss=0.1484, simple_loss=0.2191, pruned_loss=0.03888, over 4860.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02863, over 972731.01 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 07:05:31,286 INFO [train.py:715] (7/8) Epoch 18, batch 7350, loss[loss=0.1103, simple_loss=0.1782, pruned_loss=0.0212, over 4841.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02858, over 972869.35 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 07:06:09,357 INFO [train.py:715] (7/8) Epoch 18, batch 7400, loss[loss=0.1551, simple_loss=0.2509, pruned_loss=0.02965, over 4978.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02857, over 972789.46 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 07:06:48,513 INFO [train.py:715] (7/8) Epoch 18, batch 7450, loss[loss=0.1467, simple_loss=0.2268, pruned_loss=0.03328, over 4942.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02893, over 972144.04 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:07:27,761 INFO [train.py:715] (7/8) Epoch 18, batch 7500, loss[loss=0.1056, simple_loss=0.1834, pruned_loss=0.01392, over 4810.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02918, over 972715.79 frames.], batch size: 27, lr: 1.24e-04 +2022-05-09 07:08:05,373 INFO [train.py:715] (7/8) Epoch 18, batch 7550, loss[loss=0.1244, simple_loss=0.2024, pruned_loss=0.02321, over 4755.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02888, over 971690.46 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:08:43,910 INFO [train.py:715] (7/8) Epoch 18, batch 7600, loss[loss=0.1348, simple_loss=0.2111, pruned_loss=0.02925, over 4804.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02905, over 971557.20 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:09:23,640 INFO [train.py:715] (7/8) Epoch 18, batch 7650, loss[loss=0.1192, simple_loss=0.1888, pruned_loss=0.02474, over 4758.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02861, over 972118.85 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:10:02,905 INFO [train.py:715] (7/8) Epoch 18, batch 7700, loss[loss=0.139, simple_loss=0.2156, pruned_loss=0.03122, over 4781.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02893, over 972286.71 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 07:10:41,610 INFO [train.py:715] (7/8) Epoch 18, batch 7750, loss[loss=0.1363, simple_loss=0.2121, pruned_loss=0.03022, over 4912.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02886, over 972528.38 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:11:21,216 INFO [train.py:715] (7/8) Epoch 18, batch 7800, loss[loss=0.1341, simple_loss=0.2052, pruned_loss=0.03149, over 4978.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02908, over 972102.23 frames.], batch size: 28, lr: 1.24e-04 +2022-05-09 07:12:01,097 INFO [train.py:715] (7/8) Epoch 18, batch 7850, loss[loss=0.1102, simple_loss=0.1877, pruned_loss=0.01638, over 4875.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02888, over 972378.63 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 07:12:40,477 INFO [train.py:715] (7/8) Epoch 18, batch 7900, loss[loss=0.1239, simple_loss=0.1867, pruned_loss=0.03054, over 4798.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02891, over 972541.41 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 07:13:19,677 INFO [train.py:715] (7/8) Epoch 18, batch 7950, loss[loss=0.1354, simple_loss=0.2146, pruned_loss=0.02814, over 4817.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02901, over 972614.48 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:13:59,116 INFO [train.py:715] (7/8) Epoch 18, batch 8000, loss[loss=0.148, simple_loss=0.2221, pruned_loss=0.03701, over 4824.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2066, pruned_loss=0.0294, over 972340.41 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:14:38,134 INFO [train.py:715] (7/8) Epoch 18, batch 8050, loss[loss=0.1179, simple_loss=0.1847, pruned_loss=0.02554, over 4833.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02953, over 971602.06 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 07:15:16,609 INFO [train.py:715] (7/8) Epoch 18, batch 8100, loss[loss=0.1254, simple_loss=0.2047, pruned_loss=0.02305, over 4821.00 frames.], tot_loss[loss=0.133, simple_loss=0.2069, pruned_loss=0.02957, over 971458.99 frames.], batch size: 27, lr: 1.24e-04 +2022-05-09 07:15:55,250 INFO [train.py:715] (7/8) Epoch 18, batch 8150, loss[loss=0.1424, simple_loss=0.2279, pruned_loss=0.02848, over 4816.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.02966, over 971286.84 frames.], batch size: 27, lr: 1.24e-04 +2022-05-09 07:16:34,310 INFO [train.py:715] (7/8) Epoch 18, batch 8200, loss[loss=0.129, simple_loss=0.199, pruned_loss=0.02955, over 4858.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02949, over 971209.22 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 07:17:12,929 INFO [train.py:715] (7/8) Epoch 18, batch 8250, loss[loss=0.1372, simple_loss=0.2067, pruned_loss=0.03386, over 4963.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02913, over 971968.60 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 07:17:51,221 INFO [train.py:715] (7/8) Epoch 18, batch 8300, loss[loss=0.1546, simple_loss=0.2081, pruned_loss=0.05056, over 4970.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02877, over 971778.80 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:18:31,286 INFO [train.py:715] (7/8) Epoch 18, batch 8350, loss[loss=0.1418, simple_loss=0.208, pruned_loss=0.03781, over 4921.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2071, pruned_loss=0.0289, over 972239.12 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:19:10,482 INFO [train.py:715] (7/8) Epoch 18, batch 8400, loss[loss=0.1326, simple_loss=0.1997, pruned_loss=0.03273, over 4806.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.0291, over 971873.41 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:19:48,920 INFO [train.py:715] (7/8) Epoch 18, batch 8450, loss[loss=0.1977, simple_loss=0.254, pruned_loss=0.07067, over 4745.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2074, pruned_loss=0.0292, over 972271.52 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:20:28,159 INFO [train.py:715] (7/8) Epoch 18, batch 8500, loss[loss=0.1191, simple_loss=0.1999, pruned_loss=0.01918, over 4975.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02909, over 972522.08 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:21:07,336 INFO [train.py:715] (7/8) Epoch 18, batch 8550, loss[loss=0.1294, simple_loss=0.2085, pruned_loss=0.02515, over 4889.00 frames.], tot_loss[loss=0.1334, simple_loss=0.208, pruned_loss=0.02941, over 972965.77 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 07:21:46,034 INFO [train.py:715] (7/8) Epoch 18, batch 8600, loss[loss=0.1217, simple_loss=0.1954, pruned_loss=0.024, over 4852.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02918, over 972501.86 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:22:24,238 INFO [train.py:715] (7/8) Epoch 18, batch 8650, loss[loss=0.1153, simple_loss=0.1891, pruned_loss=0.0208, over 4798.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02934, over 973131.19 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:23:03,810 INFO [train.py:715] (7/8) Epoch 18, batch 8700, loss[loss=0.1383, simple_loss=0.2084, pruned_loss=0.03413, over 4885.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02914, over 972668.34 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 07:23:43,635 INFO [train.py:715] (7/8) Epoch 18, batch 8750, loss[loss=0.1296, simple_loss=0.206, pruned_loss=0.02658, over 4763.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.0293, over 972982.62 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:24:23,136 INFO [train.py:715] (7/8) Epoch 18, batch 8800, loss[loss=0.1239, simple_loss=0.1935, pruned_loss=0.0271, over 4842.00 frames.], tot_loss[loss=0.1321, simple_loss=0.206, pruned_loss=0.02905, over 972707.36 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 07:25:01,508 INFO [train.py:715] (7/8) Epoch 18, batch 8850, loss[loss=0.1775, simple_loss=0.2395, pruned_loss=0.05774, over 4844.00 frames.], tot_loss[loss=0.132, simple_loss=0.2058, pruned_loss=0.0291, over 972298.35 frames.], batch size: 34, lr: 1.24e-04 +2022-05-09 07:25:41,123 INFO [train.py:715] (7/8) Epoch 18, batch 8900, loss[loss=0.1273, simple_loss=0.1993, pruned_loss=0.02759, over 4835.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2057, pruned_loss=0.02882, over 971961.34 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:26:19,642 INFO [train.py:715] (7/8) Epoch 18, batch 8950, loss[loss=0.1663, simple_loss=0.2272, pruned_loss=0.05275, over 4723.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02896, over 971507.60 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:26:58,103 INFO [train.py:715] (7/8) Epoch 18, batch 9000, loss[loss=0.1231, simple_loss=0.2026, pruned_loss=0.02178, over 4977.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02918, over 972921.20 frames.], batch size: 28, lr: 1.24e-04 +2022-05-09 07:26:58,103 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 07:27:08,040 INFO [train.py:742] (7/8) Epoch 18, validation: loss=0.1045, simple_loss=0.1879, pruned_loss=0.01057, over 914524.00 frames. +2022-05-09 07:27:46,933 INFO [train.py:715] (7/8) Epoch 18, batch 9050, loss[loss=0.1193, simple_loss=0.1928, pruned_loss=0.02296, over 4772.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.0286, over 972124.54 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:28:26,541 INFO [train.py:715] (7/8) Epoch 18, batch 9100, loss[loss=0.1172, simple_loss=0.1986, pruned_loss=0.01792, over 4752.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02881, over 972333.40 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:29:05,674 INFO [train.py:715] (7/8) Epoch 18, batch 9150, loss[loss=0.1414, simple_loss=0.2136, pruned_loss=0.03461, over 4925.00 frames.], tot_loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.02892, over 972245.54 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:29:43,363 INFO [train.py:715] (7/8) Epoch 18, batch 9200, loss[loss=0.131, simple_loss=0.2029, pruned_loss=0.02952, over 4872.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2068, pruned_loss=0.02869, over 972114.37 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 07:30:22,561 INFO [train.py:715] (7/8) Epoch 18, batch 9250, loss[loss=0.1338, simple_loss=0.2113, pruned_loss=0.02814, over 4752.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02863, over 972231.30 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 07:31:01,721 INFO [train.py:715] (7/8) Epoch 18, batch 9300, loss[loss=0.1331, simple_loss=0.2017, pruned_loss=0.03229, over 4974.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2063, pruned_loss=0.02819, over 972201.55 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:31:39,927 INFO [train.py:715] (7/8) Epoch 18, batch 9350, loss[loss=0.1309, simple_loss=0.2045, pruned_loss=0.0287, over 4808.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02842, over 971585.62 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:32:18,514 INFO [train.py:715] (7/8) Epoch 18, batch 9400, loss[loss=0.1088, simple_loss=0.184, pruned_loss=0.01677, over 4966.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02869, over 971705.52 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:32:58,077 INFO [train.py:715] (7/8) Epoch 18, batch 9450, loss[loss=0.1318, simple_loss=0.2039, pruned_loss=0.02981, over 4937.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02891, over 971855.22 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:33:36,481 INFO [train.py:715] (7/8) Epoch 18, batch 9500, loss[loss=0.1243, simple_loss=0.1969, pruned_loss=0.02584, over 4919.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02922, over 971482.94 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 07:34:14,740 INFO [train.py:715] (7/8) Epoch 18, batch 9550, loss[loss=0.1397, simple_loss=0.2135, pruned_loss=0.03296, over 4793.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2057, pruned_loss=0.02909, over 971149.15 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:34:53,875 INFO [train.py:715] (7/8) Epoch 18, batch 9600, loss[loss=0.1396, simple_loss=0.2074, pruned_loss=0.03593, over 4700.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2064, pruned_loss=0.02973, over 971055.93 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:35:33,430 INFO [train.py:715] (7/8) Epoch 18, batch 9650, loss[loss=0.1252, simple_loss=0.2047, pruned_loss=0.02281, over 4884.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2067, pruned_loss=0.02979, over 971143.81 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:36:12,262 INFO [train.py:715] (7/8) Epoch 18, batch 9700, loss[loss=0.1351, simple_loss=0.2067, pruned_loss=0.03177, over 4898.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2068, pruned_loss=0.02987, over 971432.84 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 07:36:50,931 INFO [train.py:715] (7/8) Epoch 18, batch 9750, loss[loss=0.144, simple_loss=0.2207, pruned_loss=0.03367, over 4889.00 frames.], tot_loss[loss=0.1339, simple_loss=0.2077, pruned_loss=0.03011, over 971489.86 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:37:31,029 INFO [train.py:715] (7/8) Epoch 18, batch 9800, loss[loss=0.1383, simple_loss=0.208, pruned_loss=0.03436, over 4956.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02964, over 972202.33 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:38:09,633 INFO [train.py:715] (7/8) Epoch 18, batch 9850, loss[loss=0.1505, simple_loss=0.2253, pruned_loss=0.03787, over 4801.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02932, over 972931.93 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:38:47,997 INFO [train.py:715] (7/8) Epoch 18, batch 9900, loss[loss=0.1268, simple_loss=0.2025, pruned_loss=0.02553, over 4819.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02944, over 973093.03 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 07:39:27,319 INFO [train.py:715] (7/8) Epoch 18, batch 9950, loss[loss=0.1254, simple_loss=0.1962, pruned_loss=0.02733, over 4835.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02936, over 973057.82 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:40:06,408 INFO [train.py:715] (7/8) Epoch 18, batch 10000, loss[loss=0.1236, simple_loss=0.199, pruned_loss=0.02411, over 4767.00 frames.], tot_loss[loss=0.1322, simple_loss=0.206, pruned_loss=0.02915, over 973039.52 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:40:45,258 INFO [train.py:715] (7/8) Epoch 18, batch 10050, loss[loss=0.1267, simple_loss=0.2034, pruned_loss=0.02497, over 4924.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2061, pruned_loss=0.02922, over 972861.29 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 07:41:23,498 INFO [train.py:715] (7/8) Epoch 18, batch 10100, loss[loss=0.1058, simple_loss=0.1847, pruned_loss=0.01342, over 4958.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02916, over 973092.09 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 07:42:02,487 INFO [train.py:715] (7/8) Epoch 18, batch 10150, loss[loss=0.1274, simple_loss=0.2059, pruned_loss=0.0244, over 4944.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02915, over 974358.80 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 07:42:41,662 INFO [train.py:715] (7/8) Epoch 18, batch 10200, loss[loss=0.139, simple_loss=0.2101, pruned_loss=0.03391, over 4797.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02882, over 973788.85 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 07:43:20,199 INFO [train.py:715] (7/8) Epoch 18, batch 10250, loss[loss=0.1297, simple_loss=0.2115, pruned_loss=0.02391, over 4885.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02885, over 972703.44 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 07:43:59,315 INFO [train.py:715] (7/8) Epoch 18, batch 10300, loss[loss=0.1341, simple_loss=0.2115, pruned_loss=0.02833, over 4800.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02908, over 972749.15 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:44:39,639 INFO [train.py:715] (7/8) Epoch 18, batch 10350, loss[loss=0.1285, simple_loss=0.2075, pruned_loss=0.02473, over 4958.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02903, over 973504.67 frames.], batch size: 35, lr: 1.24e-04 +2022-05-09 07:45:18,122 INFO [train.py:715] (7/8) Epoch 18, batch 10400, loss[loss=0.1264, simple_loss=0.2092, pruned_loss=0.02176, over 4983.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02882, over 973766.80 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 07:45:56,570 INFO [train.py:715] (7/8) Epoch 18, batch 10450, loss[loss=0.1204, simple_loss=0.1941, pruned_loss=0.02339, over 4897.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02857, over 973313.72 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 07:46:36,304 INFO [train.py:715] (7/8) Epoch 18, batch 10500, loss[loss=0.1305, simple_loss=0.2078, pruned_loss=0.02662, over 4782.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02835, over 972909.54 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:47:15,166 INFO [train.py:715] (7/8) Epoch 18, batch 10550, loss[loss=0.1249, simple_loss=0.1998, pruned_loss=0.02497, over 4642.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02843, over 971745.09 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 07:47:53,900 INFO [train.py:715] (7/8) Epoch 18, batch 10600, loss[loss=0.1237, simple_loss=0.2061, pruned_loss=0.02063, over 4821.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2071, pruned_loss=0.02871, over 972696.10 frames.], batch size: 27, lr: 1.24e-04 +2022-05-09 07:48:33,503 INFO [train.py:715] (7/8) Epoch 18, batch 10650, loss[loss=0.1715, simple_loss=0.2456, pruned_loss=0.04868, over 4778.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02878, over 972081.96 frames.], batch size: 17, lr: 1.24e-04 +2022-05-09 07:49:13,195 INFO [train.py:715] (7/8) Epoch 18, batch 10700, loss[loss=0.1393, simple_loss=0.2137, pruned_loss=0.03242, over 4820.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02879, over 972442.16 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:49:52,113 INFO [train.py:715] (7/8) Epoch 18, batch 10750, loss[loss=0.1398, simple_loss=0.2082, pruned_loss=0.03573, over 4848.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02898, over 972223.05 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 07:50:31,126 INFO [train.py:715] (7/8) Epoch 18, batch 10800, loss[loss=0.1293, simple_loss=0.1886, pruned_loss=0.035, over 4976.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2069, pruned_loss=0.02868, over 971806.70 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 07:51:10,557 INFO [train.py:715] (7/8) Epoch 18, batch 10850, loss[loss=0.1193, simple_loss=0.1842, pruned_loss=0.02718, over 4786.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02835, over 971518.67 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 07:51:49,058 INFO [train.py:715] (7/8) Epoch 18, batch 10900, loss[loss=0.1482, simple_loss=0.2269, pruned_loss=0.03477, over 4775.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02849, over 971880.60 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:52:27,638 INFO [train.py:715] (7/8) Epoch 18, batch 10950, loss[loss=0.1346, simple_loss=0.207, pruned_loss=0.03105, over 4821.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2066, pruned_loss=0.02843, over 972740.91 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 07:53:07,690 INFO [train.py:715] (7/8) Epoch 18, batch 11000, loss[loss=0.1103, simple_loss=0.1849, pruned_loss=0.01787, over 4892.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.02819, over 972282.02 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 07:53:46,747 INFO [train.py:715] (7/8) Epoch 18, batch 11050, loss[loss=0.1173, simple_loss=0.1968, pruned_loss=0.01894, over 4765.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02844, over 971756.69 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:54:26,301 INFO [train.py:715] (7/8) Epoch 18, batch 11100, loss[loss=0.1247, simple_loss=0.1922, pruned_loss=0.02865, over 4823.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2055, pruned_loss=0.02818, over 972638.99 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:55:05,197 INFO [train.py:715] (7/8) Epoch 18, batch 11150, loss[loss=0.1259, simple_loss=0.1969, pruned_loss=0.02745, over 4811.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2049, pruned_loss=0.02799, over 972444.75 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 07:55:44,749 INFO [train.py:715] (7/8) Epoch 18, batch 11200, loss[loss=0.1298, simple_loss=0.2072, pruned_loss=0.02624, over 4992.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2054, pruned_loss=0.02851, over 972030.95 frames.], batch size: 14, lr: 1.24e-04 +2022-05-09 07:56:23,191 INFO [train.py:715] (7/8) Epoch 18, batch 11250, loss[loss=0.1284, simple_loss=0.2012, pruned_loss=0.02774, over 4909.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2053, pruned_loss=0.02858, over 972515.51 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 07:57:01,932 INFO [train.py:715] (7/8) Epoch 18, batch 11300, loss[loss=0.1529, simple_loss=0.2223, pruned_loss=0.04178, over 4694.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02844, over 972006.50 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:57:41,021 INFO [train.py:715] (7/8) Epoch 18, batch 11350, loss[loss=0.1239, simple_loss=0.1992, pruned_loss=0.02429, over 4982.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02899, over 972472.47 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 07:58:20,190 INFO [train.py:715] (7/8) Epoch 18, batch 11400, loss[loss=0.1426, simple_loss=0.2263, pruned_loss=0.0295, over 4954.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02907, over 971968.67 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 07:58:59,557 INFO [train.py:715] (7/8) Epoch 18, batch 11450, loss[loss=0.1714, simple_loss=0.2391, pruned_loss=0.0518, over 4881.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02858, over 972064.95 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 07:59:38,060 INFO [train.py:715] (7/8) Epoch 18, batch 11500, loss[loss=0.1587, simple_loss=0.2371, pruned_loss=0.04016, over 4977.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02855, over 971896.98 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:00:17,727 INFO [train.py:715] (7/8) Epoch 18, batch 11550, loss[loss=0.1542, simple_loss=0.2197, pruned_loss=0.04438, over 4870.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02853, over 971986.39 frames.], batch size: 38, lr: 1.24e-04 +2022-05-09 08:00:57,126 INFO [train.py:715] (7/8) Epoch 18, batch 11600, loss[loss=0.1186, simple_loss=0.1945, pruned_loss=0.02129, over 4767.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02851, over 972187.78 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 08:01:35,949 INFO [train.py:715] (7/8) Epoch 18, batch 11650, loss[loss=0.1394, simple_loss=0.2082, pruned_loss=0.0353, over 4977.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02836, over 971924.60 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:02:15,655 INFO [train.py:715] (7/8) Epoch 18, batch 11700, loss[loss=0.1135, simple_loss=0.1929, pruned_loss=0.01706, over 4826.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2054, pruned_loss=0.02794, over 971895.88 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 08:02:54,932 INFO [train.py:715] (7/8) Epoch 18, batch 11750, loss[loss=0.1248, simple_loss=0.2078, pruned_loss=0.02091, over 4877.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02816, over 972227.03 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 08:03:34,976 INFO [train.py:715] (7/8) Epoch 18, batch 11800, loss[loss=0.1446, simple_loss=0.2177, pruned_loss=0.03577, over 4808.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02879, over 971964.02 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 08:04:13,541 INFO [train.py:715] (7/8) Epoch 18, batch 11850, loss[loss=0.1015, simple_loss=0.1764, pruned_loss=0.01336, over 4771.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02858, over 972121.87 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 08:04:53,379 INFO [train.py:715] (7/8) Epoch 18, batch 11900, loss[loss=0.1254, simple_loss=0.1961, pruned_loss=0.02734, over 4825.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2054, pruned_loss=0.02846, over 972003.54 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:05:32,230 INFO [train.py:715] (7/8) Epoch 18, batch 11950, loss[loss=0.1177, simple_loss=0.1924, pruned_loss=0.02154, over 4979.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.02844, over 971370.09 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:06:10,827 INFO [train.py:715] (7/8) Epoch 18, batch 12000, loss[loss=0.1234, simple_loss=0.1971, pruned_loss=0.02478, over 4831.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02852, over 972552.66 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 08:06:10,828 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 08:06:20,737 INFO [train.py:742] (7/8) Epoch 18, validation: loss=0.1046, simple_loss=0.188, pruned_loss=0.01063, over 914524.00 frames. +2022-05-09 08:07:00,013 INFO [train.py:715] (7/8) Epoch 18, batch 12050, loss[loss=0.1268, simple_loss=0.2084, pruned_loss=0.02255, over 4880.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2053, pruned_loss=0.02825, over 972165.97 frames.], batch size: 16, lr: 1.24e-04 +2022-05-09 08:07:39,520 INFO [train.py:715] (7/8) Epoch 18, batch 12100, loss[loss=0.1413, simple_loss=0.2189, pruned_loss=0.03188, over 4840.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2057, pruned_loss=0.02846, over 972244.63 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 08:08:19,052 INFO [train.py:715] (7/8) Epoch 18, batch 12150, loss[loss=0.1194, simple_loss=0.185, pruned_loss=0.02693, over 4852.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02875, over 972068.54 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:08:59,344 INFO [train.py:715] (7/8) Epoch 18, batch 12200, loss[loss=0.1344, simple_loss=0.2064, pruned_loss=0.03124, over 4841.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02863, over 972191.24 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 08:09:38,277 INFO [train.py:715] (7/8) Epoch 18, batch 12250, loss[loss=0.1226, simple_loss=0.1865, pruned_loss=0.02942, over 4849.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2071, pruned_loss=0.02883, over 971720.32 frames.], batch size: 12, lr: 1.24e-04 +2022-05-09 08:10:18,806 INFO [train.py:715] (7/8) Epoch 18, batch 12300, loss[loss=0.131, simple_loss=0.2038, pruned_loss=0.0291, over 4903.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2062, pruned_loss=0.02819, over 972248.31 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 08:10:58,227 INFO [train.py:715] (7/8) Epoch 18, batch 12350, loss[loss=0.1654, simple_loss=0.2436, pruned_loss=0.04362, over 4817.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2067, pruned_loss=0.0284, over 971832.70 frames.], batch size: 25, lr: 1.24e-04 +2022-05-09 08:11:37,142 INFO [train.py:715] (7/8) Epoch 18, batch 12400, loss[loss=0.1371, simple_loss=0.2102, pruned_loss=0.03205, over 4846.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02848, over 970965.37 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:12:16,683 INFO [train.py:715] (7/8) Epoch 18, batch 12450, loss[loss=0.1181, simple_loss=0.192, pruned_loss=0.02204, over 4814.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2053, pruned_loss=0.02826, over 971208.00 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 08:12:55,937 INFO [train.py:715] (7/8) Epoch 18, batch 12500, loss[loss=0.1828, simple_loss=0.2486, pruned_loss=0.05853, over 4940.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2053, pruned_loss=0.02842, over 971143.14 frames.], batch size: 39, lr: 1.24e-04 +2022-05-09 08:13:36,321 INFO [train.py:715] (7/8) Epoch 18, batch 12550, loss[loss=0.1442, simple_loss=0.2248, pruned_loss=0.03175, over 4915.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.0289, over 970842.74 frames.], batch size: 18, lr: 1.24e-04 +2022-05-09 08:14:14,820 INFO [train.py:715] (7/8) Epoch 18, batch 12600, loss[loss=0.1526, simple_loss=0.2227, pruned_loss=0.04125, over 4809.00 frames.], tot_loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.02886, over 971629.43 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:14:54,514 INFO [train.py:715] (7/8) Epoch 18, batch 12650, loss[loss=0.12, simple_loss=0.1974, pruned_loss=0.02126, over 4927.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2075, pruned_loss=0.02894, over 971492.24 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 08:15:33,313 INFO [train.py:715] (7/8) Epoch 18, batch 12700, loss[loss=0.1087, simple_loss=0.181, pruned_loss=0.0182, over 4927.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2065, pruned_loss=0.02847, over 971234.55 frames.], batch size: 23, lr: 1.24e-04 +2022-05-09 08:16:12,932 INFO [train.py:715] (7/8) Epoch 18, batch 12750, loss[loss=0.142, simple_loss=0.2212, pruned_loss=0.03134, over 4837.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2067, pruned_loss=0.02838, over 970510.19 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 08:16:52,485 INFO [train.py:715] (7/8) Epoch 18, batch 12800, loss[loss=0.1323, simple_loss=0.2011, pruned_loss=0.03175, over 4821.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02865, over 969942.39 frames.], batch size: 13, lr: 1.24e-04 +2022-05-09 08:17:31,838 INFO [train.py:715] (7/8) Epoch 18, batch 12850, loss[loss=0.1308, simple_loss=0.1985, pruned_loss=0.03161, over 4840.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02841, over 970678.93 frames.], batch size: 30, lr: 1.24e-04 +2022-05-09 08:18:11,710 INFO [train.py:715] (7/8) Epoch 18, batch 12900, loss[loss=0.1192, simple_loss=0.1922, pruned_loss=0.02304, over 4861.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02919, over 971452.96 frames.], batch size: 32, lr: 1.24e-04 +2022-05-09 08:18:50,202 INFO [train.py:715] (7/8) Epoch 18, batch 12950, loss[loss=0.1348, simple_loss=0.2123, pruned_loss=0.02866, over 4801.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02861, over 971858.70 frames.], batch size: 21, lr: 1.24e-04 +2022-05-09 08:19:30,196 INFO [train.py:715] (7/8) Epoch 18, batch 13000, loss[loss=0.1586, simple_loss=0.2322, pruned_loss=0.04253, over 4831.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02902, over 972100.89 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:20:09,531 INFO [train.py:715] (7/8) Epoch 18, batch 13050, loss[loss=0.1411, simple_loss=0.2261, pruned_loss=0.02804, over 4899.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02854, over 971699.11 frames.], batch size: 22, lr: 1.24e-04 +2022-05-09 08:20:48,613 INFO [train.py:715] (7/8) Epoch 18, batch 13100, loss[loss=0.1922, simple_loss=0.2586, pruned_loss=0.06295, over 4985.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02897, over 971431.43 frames.], batch size: 39, lr: 1.24e-04 +2022-05-09 08:21:28,140 INFO [train.py:715] (7/8) Epoch 18, batch 13150, loss[loss=0.1275, simple_loss=0.2032, pruned_loss=0.02593, over 4888.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02895, over 972285.33 frames.], batch size: 19, lr: 1.24e-04 +2022-05-09 08:22:07,410 INFO [train.py:715] (7/8) Epoch 18, batch 13200, loss[loss=0.1501, simple_loss=0.2148, pruned_loss=0.04271, over 4705.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02936, over 972082.34 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:22:47,221 INFO [train.py:715] (7/8) Epoch 18, batch 13250, loss[loss=0.1345, simple_loss=0.209, pruned_loss=0.03005, over 4848.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02927, over 971480.92 frames.], batch size: 20, lr: 1.24e-04 +2022-05-09 08:23:25,810 INFO [train.py:715] (7/8) Epoch 18, batch 13300, loss[loss=0.1312, simple_loss=0.2089, pruned_loss=0.02681, over 4870.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.0293, over 972195.21 frames.], batch size: 20, lr: 1.24e-04 +2022-05-09 08:24:05,549 INFO [train.py:715] (7/8) Epoch 18, batch 13350, loss[loss=0.1293, simple_loss=0.1939, pruned_loss=0.03241, over 4803.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02899, over 972005.16 frames.], batch size: 24, lr: 1.24e-04 +2022-05-09 08:24:44,554 INFO [train.py:715] (7/8) Epoch 18, batch 13400, loss[loss=0.1425, simple_loss=0.2272, pruned_loss=0.02893, over 4964.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02916, over 971873.07 frames.], batch size: 15, lr: 1.24e-04 +2022-05-09 08:25:25,447 INFO [train.py:715] (7/8) Epoch 18, batch 13450, loss[loss=0.1274, simple_loss=0.2001, pruned_loss=0.02733, over 4881.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02919, over 972334.00 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 08:26:05,137 INFO [train.py:715] (7/8) Epoch 18, batch 13500, loss[loss=0.1232, simple_loss=0.1918, pruned_loss=0.02736, over 4955.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02956, over 972247.45 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 08:26:44,083 INFO [train.py:715] (7/8) Epoch 18, batch 13550, loss[loss=0.1332, simple_loss=0.2118, pruned_loss=0.02727, over 4912.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2069, pruned_loss=0.02927, over 972592.44 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 08:27:23,343 INFO [train.py:715] (7/8) Epoch 18, batch 13600, loss[loss=0.135, simple_loss=0.2099, pruned_loss=0.03009, over 4746.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.02889, over 972820.62 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:28:02,155 INFO [train.py:715] (7/8) Epoch 18, batch 13650, loss[loss=0.1266, simple_loss=0.1965, pruned_loss=0.02841, over 4859.00 frames.], tot_loss[loss=0.1317, simple_loss=0.206, pruned_loss=0.0287, over 973663.03 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 08:28:41,564 INFO [train.py:715] (7/8) Epoch 18, batch 13700, loss[loss=0.1239, simple_loss=0.2072, pruned_loss=0.02027, over 4961.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02845, over 973966.13 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 08:29:20,648 INFO [train.py:715] (7/8) Epoch 18, batch 13750, loss[loss=0.1347, simple_loss=0.2135, pruned_loss=0.02793, over 4979.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2054, pruned_loss=0.02841, over 974509.00 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 08:29:59,713 INFO [train.py:715] (7/8) Epoch 18, batch 13800, loss[loss=0.1485, simple_loss=0.2144, pruned_loss=0.04128, over 4744.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02863, over 974284.70 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:30:39,481 INFO [train.py:715] (7/8) Epoch 18, batch 13850, loss[loss=0.1293, simple_loss=0.2186, pruned_loss=0.02003, over 4911.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02859, over 973680.48 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 08:31:18,293 INFO [train.py:715] (7/8) Epoch 18, batch 13900, loss[loss=0.1613, simple_loss=0.2279, pruned_loss=0.04734, over 4881.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02872, over 973770.13 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:31:57,752 INFO [train.py:715] (7/8) Epoch 18, batch 13950, loss[loss=0.1425, simple_loss=0.2172, pruned_loss=0.03394, over 4883.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02874, over 973361.47 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:32:37,350 INFO [train.py:715] (7/8) Epoch 18, batch 14000, loss[loss=0.1375, simple_loss=0.2142, pruned_loss=0.03035, over 4901.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02912, over 973998.39 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 08:33:17,112 INFO [train.py:715] (7/8) Epoch 18, batch 14050, loss[loss=0.1377, simple_loss=0.2152, pruned_loss=0.03007, over 4944.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2073, pruned_loss=0.02963, over 973680.86 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 08:33:56,296 INFO [train.py:715] (7/8) Epoch 18, batch 14100, loss[loss=0.1425, simple_loss=0.209, pruned_loss=0.03796, over 4774.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02945, over 973298.35 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 08:34:35,397 INFO [train.py:715] (7/8) Epoch 18, batch 14150, loss[loss=0.1202, simple_loss=0.2074, pruned_loss=0.01652, over 4747.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02931, over 973444.86 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:35:14,783 INFO [train.py:715] (7/8) Epoch 18, batch 14200, loss[loss=0.136, simple_loss=0.2032, pruned_loss=0.03439, over 4850.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02923, over 973076.31 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 08:35:54,059 INFO [train.py:715] (7/8) Epoch 18, batch 14250, loss[loss=0.1673, simple_loss=0.2373, pruned_loss=0.0487, over 4780.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.02934, over 972853.33 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 08:36:33,986 INFO [train.py:715] (7/8) Epoch 18, batch 14300, loss[loss=0.1286, simple_loss=0.2027, pruned_loss=0.02731, over 4875.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2065, pruned_loss=0.02938, over 972411.45 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:37:13,314 INFO [train.py:715] (7/8) Epoch 18, batch 14350, loss[loss=0.1441, simple_loss=0.2235, pruned_loss=0.03229, over 4921.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02941, over 972217.93 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 08:37:52,858 INFO [train.py:715] (7/8) Epoch 18, batch 14400, loss[loss=0.1554, simple_loss=0.2221, pruned_loss=0.04431, over 4793.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2077, pruned_loss=0.02962, over 972515.65 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 08:38:32,502 INFO [train.py:715] (7/8) Epoch 18, batch 14450, loss[loss=0.147, simple_loss=0.2235, pruned_loss=0.03524, over 4774.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02977, over 972597.43 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 08:39:11,250 INFO [train.py:715] (7/8) Epoch 18, batch 14500, loss[loss=0.1287, simple_loss=0.205, pruned_loss=0.02614, over 4857.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.02975, over 972891.26 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 08:39:50,389 INFO [train.py:715] (7/8) Epoch 18, batch 14550, loss[loss=0.1599, simple_loss=0.2516, pruned_loss=0.03415, over 4854.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02941, over 973015.16 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 08:40:29,522 INFO [train.py:715] (7/8) Epoch 18, batch 14600, loss[loss=0.1372, simple_loss=0.2116, pruned_loss=0.03138, over 4739.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02931, over 972747.64 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:41:09,224 INFO [train.py:715] (7/8) Epoch 18, batch 14650, loss[loss=0.1632, simple_loss=0.2316, pruned_loss=0.04744, over 4863.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.0291, over 972912.30 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:41:48,680 INFO [train.py:715] (7/8) Epoch 18, batch 14700, loss[loss=0.1259, simple_loss=0.2029, pruned_loss=0.02449, over 4830.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2075, pruned_loss=0.02875, over 971842.17 frames.], batch size: 26, lr: 1.23e-04 +2022-05-09 08:42:28,037 INFO [train.py:715] (7/8) Epoch 18, batch 14750, loss[loss=0.1355, simple_loss=0.2135, pruned_loss=0.02876, over 4881.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2074, pruned_loss=0.02894, over 972088.81 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:43:07,468 INFO [train.py:715] (7/8) Epoch 18, batch 14800, loss[loss=0.126, simple_loss=0.1998, pruned_loss=0.02606, over 4867.00 frames.], tot_loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.02893, over 971884.71 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:43:46,221 INFO [train.py:715] (7/8) Epoch 18, batch 14850, loss[loss=0.1435, simple_loss=0.2111, pruned_loss=0.03791, over 4887.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02905, over 971473.81 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 08:44:25,880 INFO [train.py:715] (7/8) Epoch 18, batch 14900, loss[loss=0.1218, simple_loss=0.2034, pruned_loss=0.02009, over 4987.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02859, over 972317.84 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 08:45:05,547 INFO [train.py:715] (7/8) Epoch 18, batch 14950, loss[loss=0.1226, simple_loss=0.1977, pruned_loss=0.02379, over 4806.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02863, over 972878.32 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 08:45:44,813 INFO [train.py:715] (7/8) Epoch 18, batch 15000, loss[loss=0.1057, simple_loss=0.1766, pruned_loss=0.01741, over 4888.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02864, over 972905.44 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 08:45:44,814 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 08:45:54,765 INFO [train.py:742] (7/8) Epoch 18, validation: loss=0.1048, simple_loss=0.1881, pruned_loss=0.01071, over 914524.00 frames. +2022-05-09 08:46:34,348 INFO [train.py:715] (7/8) Epoch 18, batch 15050, loss[loss=0.1312, simple_loss=0.2029, pruned_loss=0.02979, over 4879.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2075, pruned_loss=0.02869, over 972948.49 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:47:13,524 INFO [train.py:715] (7/8) Epoch 18, batch 15100, loss[loss=0.1143, simple_loss=0.1878, pruned_loss=0.02039, over 4859.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2078, pruned_loss=0.02886, over 972825.72 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 08:47:53,254 INFO [train.py:715] (7/8) Epoch 18, batch 15150, loss[loss=0.1416, simple_loss=0.2134, pruned_loss=0.03487, over 4903.00 frames.], tot_loss[loss=0.1322, simple_loss=0.207, pruned_loss=0.02869, over 972330.45 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 08:48:32,388 INFO [train.py:715] (7/8) Epoch 18, batch 15200, loss[loss=0.1272, simple_loss=0.2003, pruned_loss=0.02708, over 4744.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02897, over 972704.98 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 08:49:11,929 INFO [train.py:715] (7/8) Epoch 18, batch 15250, loss[loss=0.1467, simple_loss=0.2229, pruned_loss=0.03528, over 4886.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02877, over 973566.73 frames.], batch size: 38, lr: 1.23e-04 +2022-05-09 08:49:51,790 INFO [train.py:715] (7/8) Epoch 18, batch 15300, loss[loss=0.1289, simple_loss=0.2042, pruned_loss=0.02677, over 4970.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02862, over 973272.48 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 08:50:31,164 INFO [train.py:715] (7/8) Epoch 18, batch 15350, loss[loss=0.1288, simple_loss=0.207, pruned_loss=0.02535, over 4948.00 frames.], tot_loss[loss=0.1323, simple_loss=0.207, pruned_loss=0.02884, over 973070.31 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 08:51:10,082 INFO [train.py:715] (7/8) Epoch 18, batch 15400, loss[loss=0.1535, simple_loss=0.2238, pruned_loss=0.04159, over 4884.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2078, pruned_loss=0.0297, over 971993.89 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 08:51:49,373 INFO [train.py:715] (7/8) Epoch 18, batch 15450, loss[loss=0.1192, simple_loss=0.1878, pruned_loss=0.0253, over 4829.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02944, over 971231.68 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 08:52:28,996 INFO [train.py:715] (7/8) Epoch 18, batch 15500, loss[loss=0.1209, simple_loss=0.1983, pruned_loss=0.02175, over 4942.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02913, over 971267.98 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 08:53:08,163 INFO [train.py:715] (7/8) Epoch 18, batch 15550, loss[loss=0.157, simple_loss=0.2211, pruned_loss=0.04647, over 4781.00 frames.], tot_loss[loss=0.132, simple_loss=0.2058, pruned_loss=0.02912, over 971422.39 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 08:53:47,890 INFO [train.py:715] (7/8) Epoch 18, batch 15600, loss[loss=0.1244, simple_loss=0.197, pruned_loss=0.02589, over 4964.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2058, pruned_loss=0.02874, over 972230.53 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 08:54:28,011 INFO [train.py:715] (7/8) Epoch 18, batch 15650, loss[loss=0.1598, simple_loss=0.2264, pruned_loss=0.04653, over 4913.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02921, over 972574.17 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 08:55:07,612 INFO [train.py:715] (7/8) Epoch 18, batch 15700, loss[loss=0.1705, simple_loss=0.2625, pruned_loss=0.03921, over 4913.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02902, over 972304.15 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 08:55:46,517 INFO [train.py:715] (7/8) Epoch 18, batch 15750, loss[loss=0.09327, simple_loss=0.1641, pruned_loss=0.0112, over 4774.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02943, over 971759.00 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 08:56:25,958 INFO [train.py:715] (7/8) Epoch 18, batch 15800, loss[loss=0.1422, simple_loss=0.2187, pruned_loss=0.03288, over 4780.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2072, pruned_loss=0.0296, over 972098.57 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 08:57:05,868 INFO [train.py:715] (7/8) Epoch 18, batch 15850, loss[loss=0.135, simple_loss=0.212, pruned_loss=0.02894, over 4783.00 frames.], tot_loss[loss=0.133, simple_loss=0.2071, pruned_loss=0.02945, over 971768.11 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 08:57:45,099 INFO [train.py:715] (7/8) Epoch 18, batch 15900, loss[loss=0.1973, simple_loss=0.2776, pruned_loss=0.05854, over 4929.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02948, over 972394.25 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 08:58:24,413 INFO [train.py:715] (7/8) Epoch 18, batch 15950, loss[loss=0.1349, simple_loss=0.2101, pruned_loss=0.02982, over 4874.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02902, over 972351.33 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 08:59:04,890 INFO [train.py:715] (7/8) Epoch 18, batch 16000, loss[loss=0.1352, simple_loss=0.2073, pruned_loss=0.03155, over 4705.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02898, over 973229.49 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 08:59:45,383 INFO [train.py:715] (7/8) Epoch 18, batch 16050, loss[loss=0.1303, simple_loss=0.2059, pruned_loss=0.02738, over 4797.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02858, over 972648.81 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 09:00:24,420 INFO [train.py:715] (7/8) Epoch 18, batch 16100, loss[loss=0.1389, simple_loss=0.2168, pruned_loss=0.03054, over 4956.00 frames.], tot_loss[loss=0.132, simple_loss=0.2068, pruned_loss=0.02858, over 972517.35 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 09:01:03,603 INFO [train.py:715] (7/8) Epoch 18, batch 16150, loss[loss=0.1288, simple_loss=0.2096, pruned_loss=0.02394, over 4939.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2062, pruned_loss=0.02829, over 973064.89 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 09:01:43,695 INFO [train.py:715] (7/8) Epoch 18, batch 16200, loss[loss=0.1234, simple_loss=0.2043, pruned_loss=0.02126, over 4912.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02842, over 973487.37 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:02:22,643 INFO [train.py:715] (7/8) Epoch 18, batch 16250, loss[loss=0.1082, simple_loss=0.1831, pruned_loss=0.01664, over 4889.00 frames.], tot_loss[loss=0.132, simple_loss=0.2069, pruned_loss=0.02853, over 973017.87 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 09:03:01,671 INFO [train.py:715] (7/8) Epoch 18, batch 16300, loss[loss=0.1767, simple_loss=0.2548, pruned_loss=0.04929, over 4819.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2075, pruned_loss=0.02917, over 972816.91 frames.], batch size: 27, lr: 1.23e-04 +2022-05-09 09:03:41,212 INFO [train.py:715] (7/8) Epoch 18, batch 16350, loss[loss=0.1553, simple_loss=0.2297, pruned_loss=0.04048, over 4807.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02906, over 972849.98 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 09:04:20,328 INFO [train.py:715] (7/8) Epoch 18, batch 16400, loss[loss=0.1148, simple_loss=0.1919, pruned_loss=0.01879, over 4977.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02935, over 972656.24 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 09:04:59,286 INFO [train.py:715] (7/8) Epoch 18, batch 16450, loss[loss=0.1141, simple_loss=0.1933, pruned_loss=0.01749, over 4801.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02939, over 971966.33 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 09:05:38,809 INFO [train.py:715] (7/8) Epoch 18, batch 16500, loss[loss=0.1232, simple_loss=0.2023, pruned_loss=0.02205, over 4985.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2075, pruned_loss=0.02908, over 972336.46 frames.], batch size: 28, lr: 1.23e-04 +2022-05-09 09:06:18,649 INFO [train.py:715] (7/8) Epoch 18, batch 16550, loss[loss=0.1228, simple_loss=0.1977, pruned_loss=0.02401, over 4751.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02844, over 972330.84 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:06:57,079 INFO [train.py:715] (7/8) Epoch 18, batch 16600, loss[loss=0.1372, simple_loss=0.213, pruned_loss=0.03073, over 4950.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2055, pruned_loss=0.02816, over 971616.40 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:07:36,513 INFO [train.py:715] (7/8) Epoch 18, batch 16650, loss[loss=0.1523, simple_loss=0.2248, pruned_loss=0.03995, over 4921.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02879, over 972683.96 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:08:15,859 INFO [train.py:715] (7/8) Epoch 18, batch 16700, loss[loss=0.1285, simple_loss=0.2045, pruned_loss=0.02619, over 4813.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02912, over 972438.88 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 09:08:55,199 INFO [train.py:715] (7/8) Epoch 18, batch 16750, loss[loss=0.1599, simple_loss=0.2396, pruned_loss=0.0401, over 4875.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02935, over 972874.34 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:09:34,646 INFO [train.py:715] (7/8) Epoch 18, batch 16800, loss[loss=0.1351, simple_loss=0.2006, pruned_loss=0.03476, over 4914.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2058, pruned_loss=0.02904, over 973174.79 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 09:10:13,849 INFO [train.py:715] (7/8) Epoch 18, batch 16850, loss[loss=0.1432, simple_loss=0.2033, pruned_loss=0.04155, over 4969.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2053, pruned_loss=0.02903, over 973373.79 frames.], batch size: 35, lr: 1.23e-04 +2022-05-09 09:10:53,310 INFO [train.py:715] (7/8) Epoch 18, batch 16900, loss[loss=0.1416, simple_loss=0.2081, pruned_loss=0.03754, over 4735.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2053, pruned_loss=0.02885, over 973805.88 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:11:32,154 INFO [train.py:715] (7/8) Epoch 18, batch 16950, loss[loss=0.1209, simple_loss=0.1939, pruned_loss=0.02391, over 4840.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2051, pruned_loss=0.02883, over 973979.71 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 09:12:11,613 INFO [train.py:715] (7/8) Epoch 18, batch 17000, loss[loss=0.1146, simple_loss=0.1782, pruned_loss=0.02545, over 4777.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2052, pruned_loss=0.02895, over 973774.15 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 09:12:51,063 INFO [train.py:715] (7/8) Epoch 18, batch 17050, loss[loss=0.1127, simple_loss=0.1857, pruned_loss=0.01985, over 4774.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2044, pruned_loss=0.02863, over 973642.75 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:13:30,538 INFO [train.py:715] (7/8) Epoch 18, batch 17100, loss[loss=0.108, simple_loss=0.1876, pruned_loss=0.01418, over 4942.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2043, pruned_loss=0.02823, over 973149.35 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 09:14:10,115 INFO [train.py:715] (7/8) Epoch 18, batch 17150, loss[loss=0.1443, simple_loss=0.2201, pruned_loss=0.03425, over 4941.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2045, pruned_loss=0.02813, over 972962.51 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:14:49,240 INFO [train.py:715] (7/8) Epoch 18, batch 17200, loss[loss=0.1265, simple_loss=0.1993, pruned_loss=0.02688, over 4820.00 frames.], tot_loss[loss=0.1317, simple_loss=0.206, pruned_loss=0.02873, over 972625.80 frames.], batch size: 26, lr: 1.23e-04 +2022-05-09 09:15:28,959 INFO [train.py:715] (7/8) Epoch 18, batch 17250, loss[loss=0.1326, simple_loss=0.2092, pruned_loss=0.02803, over 4905.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02878, over 973364.03 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 09:16:08,222 INFO [train.py:715] (7/8) Epoch 18, batch 17300, loss[loss=0.1245, simple_loss=0.1983, pruned_loss=0.02532, over 4853.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02862, over 973020.47 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 09:16:48,155 INFO [train.py:715] (7/8) Epoch 18, batch 17350, loss[loss=0.1157, simple_loss=0.1902, pruned_loss=0.02056, over 4875.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02904, over 972439.04 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 09:17:27,215 INFO [train.py:715] (7/8) Epoch 18, batch 17400, loss[loss=0.1234, simple_loss=0.2037, pruned_loss=0.0216, over 4978.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2073, pruned_loss=0.02948, over 972383.70 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 09:18:07,007 INFO [train.py:715] (7/8) Epoch 18, batch 17450, loss[loss=0.1287, simple_loss=0.1977, pruned_loss=0.02984, over 4858.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02909, over 972405.82 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 09:18:46,087 INFO [train.py:715] (7/8) Epoch 18, batch 17500, loss[loss=0.1487, simple_loss=0.2106, pruned_loss=0.04347, over 4878.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02858, over 971838.63 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 09:19:24,712 INFO [train.py:715] (7/8) Epoch 18, batch 17550, loss[loss=0.1177, simple_loss=0.1871, pruned_loss=0.02418, over 4803.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2054, pruned_loss=0.02868, over 971707.87 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 09:20:04,280 INFO [train.py:715] (7/8) Epoch 18, batch 17600, loss[loss=0.1298, simple_loss=0.208, pruned_loss=0.02579, over 4870.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02921, over 972403.32 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 09:20:43,548 INFO [train.py:715] (7/8) Epoch 18, batch 17650, loss[loss=0.1114, simple_loss=0.1813, pruned_loss=0.02071, over 4993.00 frames.], tot_loss[loss=0.132, simple_loss=0.2059, pruned_loss=0.02906, over 973138.96 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 09:21:22,852 INFO [train.py:715] (7/8) Epoch 18, batch 17700, loss[loss=0.1589, simple_loss=0.234, pruned_loss=0.04193, over 4987.00 frames.], tot_loss[loss=0.132, simple_loss=0.2056, pruned_loss=0.02914, over 972775.56 frames.], batch size: 31, lr: 1.23e-04 +2022-05-09 09:22:01,950 INFO [train.py:715] (7/8) Epoch 18, batch 17750, loss[loss=0.1013, simple_loss=0.1728, pruned_loss=0.01491, over 4924.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2061, pruned_loss=0.02912, over 972641.43 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 09:22:41,549 INFO [train.py:715] (7/8) Epoch 18, batch 17800, loss[loss=0.1227, simple_loss=0.1856, pruned_loss=0.02986, over 4838.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2053, pruned_loss=0.02865, over 973249.20 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 09:23:20,835 INFO [train.py:715] (7/8) Epoch 18, batch 17850, loss[loss=0.1225, simple_loss=0.19, pruned_loss=0.02748, over 4847.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2063, pruned_loss=0.02924, over 972780.30 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 09:23:59,346 INFO [train.py:715] (7/8) Epoch 18, batch 17900, loss[loss=0.1168, simple_loss=0.1969, pruned_loss=0.01838, over 4781.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02903, over 972522.05 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:24:39,459 INFO [train.py:715] (7/8) Epoch 18, batch 17950, loss[loss=0.1409, simple_loss=0.2199, pruned_loss=0.03095, over 4940.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02873, over 973625.12 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 09:25:18,521 INFO [train.py:715] (7/8) Epoch 18, batch 18000, loss[loss=0.1273, simple_loss=0.2013, pruned_loss=0.02668, over 4924.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02895, over 973153.14 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 09:25:18,522 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 09:25:28,383 INFO [train.py:742] (7/8) Epoch 18, validation: loss=0.1046, simple_loss=0.1878, pruned_loss=0.01063, over 914524.00 frames. +2022-05-09 09:26:07,772 INFO [train.py:715] (7/8) Epoch 18, batch 18050, loss[loss=0.1245, simple_loss=0.1968, pruned_loss=0.02614, over 4802.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02884, over 973038.96 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:26:47,165 INFO [train.py:715] (7/8) Epoch 18, batch 18100, loss[loss=0.1297, simple_loss=0.2062, pruned_loss=0.0266, over 4958.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2049, pruned_loss=0.02829, over 972792.66 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 09:27:26,270 INFO [train.py:715] (7/8) Epoch 18, batch 18150, loss[loss=0.1237, simple_loss=0.2047, pruned_loss=0.02133, over 4966.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.02923, over 973245.51 frames.], batch size: 28, lr: 1.23e-04 +2022-05-09 09:28:06,061 INFO [train.py:715] (7/8) Epoch 18, batch 18200, loss[loss=0.1261, simple_loss=0.2045, pruned_loss=0.02385, over 4745.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02936, over 973473.36 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:28:45,779 INFO [train.py:715] (7/8) Epoch 18, batch 18250, loss[loss=0.127, simple_loss=0.2051, pruned_loss=0.02449, over 4911.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02944, over 973866.85 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 09:29:24,152 INFO [train.py:715] (7/8) Epoch 18, batch 18300, loss[loss=0.1093, simple_loss=0.1746, pruned_loss=0.02204, over 4814.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2075, pruned_loss=0.02953, over 973527.12 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 09:30:03,825 INFO [train.py:715] (7/8) Epoch 18, batch 18350, loss[loss=0.146, simple_loss=0.2238, pruned_loss=0.03407, over 4920.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2079, pruned_loss=0.02985, over 973236.35 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 09:30:43,382 INFO [train.py:715] (7/8) Epoch 18, batch 18400, loss[loss=0.1375, simple_loss=0.2056, pruned_loss=0.03472, over 4920.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02952, over 972738.99 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 09:31:22,381 INFO [train.py:715] (7/8) Epoch 18, batch 18450, loss[loss=0.1579, simple_loss=0.2204, pruned_loss=0.04773, over 4951.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02913, over 972311.84 frames.], batch size: 40, lr: 1.23e-04 +2022-05-09 09:32:01,512 INFO [train.py:715] (7/8) Epoch 18, batch 18500, loss[loss=0.1407, simple_loss=0.2116, pruned_loss=0.03489, over 4773.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02856, over 972034.28 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:32:40,865 INFO [train.py:715] (7/8) Epoch 18, batch 18550, loss[loss=0.1322, simple_loss=0.2087, pruned_loss=0.02781, over 4967.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02892, over 971563.89 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 09:33:20,074 INFO [train.py:715] (7/8) Epoch 18, batch 18600, loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.0295, over 4972.00 frames.], tot_loss[loss=0.131, simple_loss=0.2061, pruned_loss=0.02796, over 972819.13 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 09:33:58,713 INFO [train.py:715] (7/8) Epoch 18, batch 18650, loss[loss=0.1359, simple_loss=0.2111, pruned_loss=0.03037, over 4799.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.0285, over 973876.82 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 09:34:38,208 INFO [train.py:715] (7/8) Epoch 18, batch 18700, loss[loss=0.1239, simple_loss=0.1992, pruned_loss=0.02426, over 4859.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2069, pruned_loss=0.02865, over 973884.83 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 09:35:17,421 INFO [train.py:715] (7/8) Epoch 18, batch 18750, loss[loss=0.1438, simple_loss=0.21, pruned_loss=0.03882, over 4947.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2074, pruned_loss=0.02867, over 972723.16 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 09:35:56,637 INFO [train.py:715] (7/8) Epoch 18, batch 18800, loss[loss=0.1305, simple_loss=0.204, pruned_loss=0.02847, over 4988.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2075, pruned_loss=0.029, over 973108.66 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 09:36:35,997 INFO [train.py:715] (7/8) Epoch 18, batch 18850, loss[loss=0.1364, simple_loss=0.2096, pruned_loss=0.03155, over 4903.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2066, pruned_loss=0.02845, over 972862.00 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:37:15,846 INFO [train.py:715] (7/8) Epoch 18, batch 18900, loss[loss=0.1087, simple_loss=0.178, pruned_loss=0.01967, over 4850.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.02819, over 972468.74 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 09:37:54,911 INFO [train.py:715] (7/8) Epoch 18, batch 18950, loss[loss=0.1242, simple_loss=0.1976, pruned_loss=0.02542, over 4837.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02825, over 972627.66 frames.], batch size: 26, lr: 1.23e-04 +2022-05-09 09:38:33,356 INFO [train.py:715] (7/8) Epoch 18, batch 19000, loss[loss=0.1278, simple_loss=0.1965, pruned_loss=0.02958, over 4856.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2054, pruned_loss=0.02823, over 972676.32 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 09:39:12,868 INFO [train.py:715] (7/8) Epoch 18, batch 19050, loss[loss=0.1318, simple_loss=0.1996, pruned_loss=0.03197, over 4975.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02881, over 972619.63 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 09:39:51,873 INFO [train.py:715] (7/8) Epoch 18, batch 19100, loss[loss=0.1046, simple_loss=0.1778, pruned_loss=0.01568, over 4822.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02882, over 972789.12 frames.], batch size: 26, lr: 1.23e-04 +2022-05-09 09:40:31,189 INFO [train.py:715] (7/8) Epoch 18, batch 19150, loss[loss=0.1316, simple_loss=0.195, pruned_loss=0.03412, over 4929.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02872, over 973260.37 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:41:11,056 INFO [train.py:715] (7/8) Epoch 18, batch 19200, loss[loss=0.1347, simple_loss=0.2073, pruned_loss=0.03104, over 4853.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2061, pruned_loss=0.02828, over 973009.80 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 09:41:50,581 INFO [train.py:715] (7/8) Epoch 18, batch 19250, loss[loss=0.1608, simple_loss=0.2326, pruned_loss=0.04454, over 4809.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02827, over 973440.85 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 09:42:29,656 INFO [train.py:715] (7/8) Epoch 18, batch 19300, loss[loss=0.1408, simple_loss=0.22, pruned_loss=0.03078, over 4942.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2059, pruned_loss=0.02799, over 973117.07 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 09:43:08,119 INFO [train.py:715] (7/8) Epoch 18, batch 19350, loss[loss=0.1174, simple_loss=0.195, pruned_loss=0.01985, over 4974.00 frames.], tot_loss[loss=0.131, simple_loss=0.2059, pruned_loss=0.02806, over 973220.94 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 09:43:47,525 INFO [train.py:715] (7/8) Epoch 18, batch 19400, loss[loss=0.125, simple_loss=0.2071, pruned_loss=0.02148, over 4913.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2062, pruned_loss=0.02818, over 973128.41 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:44:26,733 INFO [train.py:715] (7/8) Epoch 18, batch 19450, loss[loss=0.1191, simple_loss=0.1872, pruned_loss=0.02547, over 4857.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02809, over 973381.62 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:45:05,484 INFO [train.py:715] (7/8) Epoch 18, batch 19500, loss[loss=0.1308, simple_loss=0.2164, pruned_loss=0.02265, over 4932.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02819, over 973541.46 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 09:45:44,654 INFO [train.py:715] (7/8) Epoch 18, batch 19550, loss[loss=0.1497, simple_loss=0.2187, pruned_loss=0.04037, over 4754.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2057, pruned_loss=0.02842, over 973905.06 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 09:46:24,063 INFO [train.py:715] (7/8) Epoch 18, batch 19600, loss[loss=0.1563, simple_loss=0.2314, pruned_loss=0.04061, over 4894.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2072, pruned_loss=0.02877, over 973900.07 frames.], batch size: 38, lr: 1.23e-04 +2022-05-09 09:47:02,886 INFO [train.py:715] (7/8) Epoch 18, batch 19650, loss[loss=0.1698, simple_loss=0.2505, pruned_loss=0.04454, over 4762.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02892, over 974270.90 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 09:47:41,713 INFO [train.py:715] (7/8) Epoch 18, batch 19700, loss[loss=0.1999, simple_loss=0.2902, pruned_loss=0.05485, over 4963.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02937, over 973172.82 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:48:21,731 INFO [train.py:715] (7/8) Epoch 18, batch 19750, loss[loss=0.1311, simple_loss=0.2041, pruned_loss=0.02907, over 4912.00 frames.], tot_loss[loss=0.133, simple_loss=0.2077, pruned_loss=0.02911, over 973341.76 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:49:01,598 INFO [train.py:715] (7/8) Epoch 18, batch 19800, loss[loss=0.1249, simple_loss=0.2057, pruned_loss=0.02205, over 4986.00 frames.], tot_loss[loss=0.133, simple_loss=0.2076, pruned_loss=0.02918, over 973084.96 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 09:49:40,676 INFO [train.py:715] (7/8) Epoch 18, batch 19850, loss[loss=0.1329, simple_loss=0.1995, pruned_loss=0.03313, over 4780.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02916, over 973177.41 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:50:20,124 INFO [train.py:715] (7/8) Epoch 18, batch 19900, loss[loss=0.1351, simple_loss=0.2106, pruned_loss=0.0298, over 4905.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02902, over 972581.56 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 09:50:59,803 INFO [train.py:715] (7/8) Epoch 18, batch 19950, loss[loss=0.1293, simple_loss=0.2074, pruned_loss=0.02554, over 4945.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02896, over 973389.66 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:51:39,048 INFO [train.py:715] (7/8) Epoch 18, batch 20000, loss[loss=0.1601, simple_loss=0.233, pruned_loss=0.04356, over 4801.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02874, over 973845.73 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 09:52:18,799 INFO [train.py:715] (7/8) Epoch 18, batch 20050, loss[loss=0.1124, simple_loss=0.1798, pruned_loss=0.0225, over 4779.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02826, over 973261.18 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 09:52:59,019 INFO [train.py:715] (7/8) Epoch 18, batch 20100, loss[loss=0.169, simple_loss=0.243, pruned_loss=0.04748, over 4915.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02859, over 972367.63 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 09:53:39,147 INFO [train.py:715] (7/8) Epoch 18, batch 20150, loss[loss=0.1485, simple_loss=0.2234, pruned_loss=0.03682, over 4769.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2057, pruned_loss=0.0287, over 972078.18 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 09:54:18,210 INFO [train.py:715] (7/8) Epoch 18, batch 20200, loss[loss=0.1461, simple_loss=0.2176, pruned_loss=0.03726, over 4844.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02898, over 972396.68 frames.], batch size: 20, lr: 1.23e-04 +2022-05-09 09:54:57,193 INFO [train.py:715] (7/8) Epoch 18, batch 20250, loss[loss=0.1178, simple_loss=0.2019, pruned_loss=0.01687, over 4813.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2071, pruned_loss=0.02865, over 971637.86 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 09:55:36,874 INFO [train.py:715] (7/8) Epoch 18, batch 20300, loss[loss=0.168, simple_loss=0.2297, pruned_loss=0.05312, over 4854.00 frames.], tot_loss[loss=0.1323, simple_loss=0.207, pruned_loss=0.0288, over 972023.29 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 09:56:16,005 INFO [train.py:715] (7/8) Epoch 18, batch 20350, loss[loss=0.1145, simple_loss=0.1886, pruned_loss=0.02018, over 4949.00 frames.], tot_loss[loss=0.1322, simple_loss=0.207, pruned_loss=0.0287, over 972477.45 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 09:56:55,261 INFO [train.py:715] (7/8) Epoch 18, batch 20400, loss[loss=0.1361, simple_loss=0.2161, pruned_loss=0.02801, over 4760.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02855, over 972141.02 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:57:34,100 INFO [train.py:715] (7/8) Epoch 18, batch 20450, loss[loss=0.1494, simple_loss=0.2229, pruned_loss=0.03794, over 4855.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02879, over 973128.30 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 09:58:14,210 INFO [train.py:715] (7/8) Epoch 18, batch 20500, loss[loss=0.1427, simple_loss=0.2161, pruned_loss=0.03463, over 4736.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02884, over 973003.31 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 09:58:52,924 INFO [train.py:715] (7/8) Epoch 18, batch 20550, loss[loss=0.1202, simple_loss=0.1884, pruned_loss=0.02603, over 4956.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2074, pruned_loss=0.02941, over 972996.18 frames.], batch size: 31, lr: 1.23e-04 +2022-05-09 09:59:31,856 INFO [train.py:715] (7/8) Epoch 18, batch 20600, loss[loss=0.1301, simple_loss=0.2051, pruned_loss=0.02756, over 4895.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02913, over 973406.43 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 10:00:10,870 INFO [train.py:715] (7/8) Epoch 18, batch 20650, loss[loss=0.1314, simple_loss=0.1922, pruned_loss=0.0353, over 4802.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02898, over 972783.39 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 10:00:50,422 INFO [train.py:715] (7/8) Epoch 18, batch 20700, loss[loss=0.1491, simple_loss=0.2269, pruned_loss=0.03562, over 4826.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02898, over 972923.28 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 10:01:28,861 INFO [train.py:715] (7/8) Epoch 18, batch 20750, loss[loss=0.1603, simple_loss=0.2309, pruned_loss=0.04489, over 4839.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02889, over 972297.64 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 10:02:08,331 INFO [train.py:715] (7/8) Epoch 18, batch 20800, loss[loss=0.1367, simple_loss=0.2087, pruned_loss=0.03239, over 4818.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02854, over 971917.05 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 10:02:47,766 INFO [train.py:715] (7/8) Epoch 18, batch 20850, loss[loss=0.1026, simple_loss=0.1822, pruned_loss=0.01148, over 4775.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2049, pruned_loss=0.02839, over 972024.17 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 10:03:26,623 INFO [train.py:715] (7/8) Epoch 18, batch 20900, loss[loss=0.1426, simple_loss=0.2104, pruned_loss=0.03741, over 4787.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2049, pruned_loss=0.02809, over 972036.75 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 10:04:05,321 INFO [train.py:715] (7/8) Epoch 18, batch 20950, loss[loss=0.1191, simple_loss=0.1968, pruned_loss=0.02072, over 4842.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2048, pruned_loss=0.02783, over 972402.86 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 10:04:44,843 INFO [train.py:715] (7/8) Epoch 18, batch 21000, loss[loss=0.1287, simple_loss=0.2096, pruned_loss=0.02391, over 4806.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2049, pruned_loss=0.02784, over 972331.88 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 10:04:44,844 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 10:04:54,817 INFO [train.py:742] (7/8) Epoch 18, validation: loss=0.1046, simple_loss=0.1879, pruned_loss=0.01059, over 914524.00 frames. +2022-05-09 10:05:34,565 INFO [train.py:715] (7/8) Epoch 18, batch 21050, loss[loss=0.1389, simple_loss=0.2145, pruned_loss=0.03171, over 4967.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02832, over 973152.11 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:06:14,354 INFO [train.py:715] (7/8) Epoch 18, batch 21100, loss[loss=0.1076, simple_loss=0.1778, pruned_loss=0.01874, over 4781.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02825, over 972731.78 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 10:06:53,521 INFO [train.py:715] (7/8) Epoch 18, batch 21150, loss[loss=0.1355, simple_loss=0.1981, pruned_loss=0.03646, over 4749.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2052, pruned_loss=0.02832, over 971957.55 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 10:07:33,002 INFO [train.py:715] (7/8) Epoch 18, batch 21200, loss[loss=0.1461, simple_loss=0.2154, pruned_loss=0.03841, over 4779.00 frames.], tot_loss[loss=0.1306, simple_loss=0.205, pruned_loss=0.02808, over 971009.03 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 10:08:12,708 INFO [train.py:715] (7/8) Epoch 18, batch 21250, loss[loss=0.1151, simple_loss=0.1968, pruned_loss=0.01674, over 4919.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2052, pruned_loss=0.02813, over 971755.57 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 10:08:51,645 INFO [train.py:715] (7/8) Epoch 18, batch 21300, loss[loss=0.1213, simple_loss=0.204, pruned_loss=0.01926, over 4866.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02827, over 970913.99 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 10:09:30,192 INFO [train.py:715] (7/8) Epoch 18, batch 21350, loss[loss=0.1571, simple_loss=0.2329, pruned_loss=0.04064, over 4754.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2054, pruned_loss=0.02841, over 971399.76 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 10:10:09,582 INFO [train.py:715] (7/8) Epoch 18, batch 21400, loss[loss=0.1223, simple_loss=0.2059, pruned_loss=0.01941, over 4788.00 frames.], tot_loss[loss=0.132, simple_loss=0.206, pruned_loss=0.02901, over 972347.80 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 10:10:51,762 INFO [train.py:715] (7/8) Epoch 18, batch 21450, loss[loss=0.1359, simple_loss=0.2124, pruned_loss=0.02965, over 4897.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02834, over 971784.46 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 10:11:30,942 INFO [train.py:715] (7/8) Epoch 18, batch 21500, loss[loss=0.1303, simple_loss=0.2099, pruned_loss=0.02538, over 4833.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02889, over 971846.55 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:12:09,694 INFO [train.py:715] (7/8) Epoch 18, batch 21550, loss[loss=0.1376, simple_loss=0.2023, pruned_loss=0.03642, over 4984.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02898, over 971956.48 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:12:49,088 INFO [train.py:715] (7/8) Epoch 18, batch 21600, loss[loss=0.1089, simple_loss=0.1882, pruned_loss=0.01478, over 4968.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02947, over 972609.87 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:13:28,304 INFO [train.py:715] (7/8) Epoch 18, batch 21650, loss[loss=0.1573, simple_loss=0.2251, pruned_loss=0.04475, over 4896.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2072, pruned_loss=0.02933, over 972317.76 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 10:14:06,695 INFO [train.py:715] (7/8) Epoch 18, batch 21700, loss[loss=0.134, simple_loss=0.2161, pruned_loss=0.02593, over 4962.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02931, over 972623.85 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:14:45,681 INFO [train.py:715] (7/8) Epoch 18, batch 21750, loss[loss=0.1324, simple_loss=0.2135, pruned_loss=0.02559, over 4763.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02914, over 973221.15 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 10:15:24,823 INFO [train.py:715] (7/8) Epoch 18, batch 21800, loss[loss=0.1126, simple_loss=0.1869, pruned_loss=0.01913, over 4817.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02884, over 972987.62 frames.], batch size: 26, lr: 1.23e-04 +2022-05-09 10:16:04,136 INFO [train.py:715] (7/8) Epoch 18, batch 21850, loss[loss=0.106, simple_loss=0.1698, pruned_loss=0.02104, over 4781.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02852, over 972800.94 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 10:16:43,562 INFO [train.py:715] (7/8) Epoch 18, batch 21900, loss[loss=0.1272, simple_loss=0.2102, pruned_loss=0.02208, over 4777.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02847, over 972821.08 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 10:17:23,082 INFO [train.py:715] (7/8) Epoch 18, batch 21950, loss[loss=0.1379, simple_loss=0.2084, pruned_loss=0.03366, over 4917.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2052, pruned_loss=0.02828, over 973202.42 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 10:18:02,136 INFO [train.py:715] (7/8) Epoch 18, batch 22000, loss[loss=0.1201, simple_loss=0.1972, pruned_loss=0.02152, over 4821.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2052, pruned_loss=0.02819, over 973142.60 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 10:18:41,241 INFO [train.py:715] (7/8) Epoch 18, batch 22050, loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02927, over 4801.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02829, over 973890.79 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:19:20,731 INFO [train.py:715] (7/8) Epoch 18, batch 22100, loss[loss=0.1692, simple_loss=0.2383, pruned_loss=0.05004, over 4864.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02862, over 973611.92 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 10:19:59,604 INFO [train.py:715] (7/8) Epoch 18, batch 22150, loss[loss=0.1391, simple_loss=0.2113, pruned_loss=0.03349, over 4979.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2054, pruned_loss=0.02843, over 972647.55 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 10:20:39,096 INFO [train.py:715] (7/8) Epoch 18, batch 22200, loss[loss=0.1347, simple_loss=0.2141, pruned_loss=0.02764, over 4784.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.02849, over 973193.17 frames.], batch size: 18, lr: 1.23e-04 +2022-05-09 10:21:17,773 INFO [train.py:715] (7/8) Epoch 18, batch 22250, loss[loss=0.1499, simple_loss=0.2231, pruned_loss=0.03831, over 4808.00 frames.], tot_loss[loss=0.131, simple_loss=0.2052, pruned_loss=0.02835, over 973007.55 frames.], batch size: 12, lr: 1.23e-04 +2022-05-09 10:21:57,019 INFO [train.py:715] (7/8) Epoch 18, batch 22300, loss[loss=0.1339, simple_loss=0.2135, pruned_loss=0.02716, over 4807.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02848, over 973272.73 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 10:22:35,720 INFO [train.py:715] (7/8) Epoch 18, batch 22350, loss[loss=0.1516, simple_loss=0.2168, pruned_loss=0.04323, over 4779.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02822, over 972857.44 frames.], batch size: 14, lr: 1.23e-04 +2022-05-09 10:23:14,497 INFO [train.py:715] (7/8) Epoch 18, batch 22400, loss[loss=0.1087, simple_loss=0.1907, pruned_loss=0.01334, over 4814.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2052, pruned_loss=0.02805, over 972070.08 frames.], batch size: 27, lr: 1.23e-04 +2022-05-09 10:23:53,399 INFO [train.py:715] (7/8) Epoch 18, batch 22450, loss[loss=0.1336, simple_loss=0.2034, pruned_loss=0.03186, over 4832.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2053, pruned_loss=0.02812, over 972212.81 frames.], batch size: 30, lr: 1.23e-04 +2022-05-09 10:24:32,485 INFO [train.py:715] (7/8) Epoch 18, batch 22500, loss[loss=0.1343, simple_loss=0.209, pruned_loss=0.02975, over 4873.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2053, pruned_loss=0.02822, over 972926.44 frames.], batch size: 39, lr: 1.23e-04 +2022-05-09 10:25:11,265 INFO [train.py:715] (7/8) Epoch 18, batch 22550, loss[loss=0.1029, simple_loss=0.1761, pruned_loss=0.01486, over 4956.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2054, pruned_loss=0.02843, over 973705.59 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 10:25:50,058 INFO [train.py:715] (7/8) Epoch 18, batch 22600, loss[loss=0.1131, simple_loss=0.189, pruned_loss=0.01858, over 4890.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02827, over 973318.81 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 10:26:29,080 INFO [train.py:715] (7/8) Epoch 18, batch 22650, loss[loss=0.1373, simple_loss=0.2099, pruned_loss=0.03238, over 4911.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2053, pruned_loss=0.02815, over 973382.35 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 10:27:07,865 INFO [train.py:715] (7/8) Epoch 18, batch 22700, loss[loss=0.1161, simple_loss=0.1854, pruned_loss=0.02341, over 4922.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2055, pruned_loss=0.02813, over 972463.36 frames.], batch size: 29, lr: 1.23e-04 +2022-05-09 10:27:46,837 INFO [train.py:715] (7/8) Epoch 18, batch 22750, loss[loss=0.1311, simple_loss=0.2067, pruned_loss=0.0277, over 4927.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2057, pruned_loss=0.0284, over 972490.86 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 10:28:26,218 INFO [train.py:715] (7/8) Epoch 18, batch 22800, loss[loss=0.129, simple_loss=0.2118, pruned_loss=0.02313, over 4897.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02877, over 971886.10 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 10:29:04,924 INFO [train.py:715] (7/8) Epoch 18, batch 22850, loss[loss=0.1292, simple_loss=0.2026, pruned_loss=0.02789, over 4768.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.029, over 971532.51 frames.], batch size: 19, lr: 1.23e-04 +2022-05-09 10:29:43,881 INFO [train.py:715] (7/8) Epoch 18, batch 22900, loss[loss=0.1334, simple_loss=0.2107, pruned_loss=0.02808, over 4803.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2065, pruned_loss=0.02936, over 972027.27 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 10:30:22,780 INFO [train.py:715] (7/8) Epoch 18, batch 22950, loss[loss=0.1401, simple_loss=0.2107, pruned_loss=0.03479, over 4987.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2055, pruned_loss=0.02875, over 971538.89 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:31:02,204 INFO [train.py:715] (7/8) Epoch 18, batch 23000, loss[loss=0.1827, simple_loss=0.2578, pruned_loss=0.05384, over 4866.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2054, pruned_loss=0.02874, over 972096.59 frames.], batch size: 32, lr: 1.23e-04 +2022-05-09 10:31:40,971 INFO [train.py:715] (7/8) Epoch 18, batch 23050, loss[loss=0.1181, simple_loss=0.1952, pruned_loss=0.0205, over 4962.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02881, over 972606.26 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:32:20,095 INFO [train.py:715] (7/8) Epoch 18, batch 23100, loss[loss=0.123, simple_loss=0.1979, pruned_loss=0.02407, over 4984.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02909, over 972523.01 frames.], batch size: 28, lr: 1.23e-04 +2022-05-09 10:32:59,655 INFO [train.py:715] (7/8) Epoch 18, batch 23150, loss[loss=0.1311, simple_loss=0.2043, pruned_loss=0.02894, over 4979.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2063, pruned_loss=0.02924, over 972667.44 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:33:38,766 INFO [train.py:715] (7/8) Epoch 18, batch 23200, loss[loss=0.1308, simple_loss=0.2072, pruned_loss=0.02718, over 4812.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2064, pruned_loss=0.02937, over 972149.77 frames.], batch size: 27, lr: 1.23e-04 +2022-05-09 10:34:17,631 INFO [train.py:715] (7/8) Epoch 18, batch 23250, loss[loss=0.1541, simple_loss=0.226, pruned_loss=0.04113, over 4750.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02892, over 972252.48 frames.], batch size: 16, lr: 1.23e-04 +2022-05-09 10:34:56,936 INFO [train.py:715] (7/8) Epoch 18, batch 23300, loss[loss=0.1316, simple_loss=0.2195, pruned_loss=0.02189, over 4913.00 frames.], tot_loss[loss=0.1317, simple_loss=0.206, pruned_loss=0.02871, over 972550.53 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 10:35:36,583 INFO [train.py:715] (7/8) Epoch 18, batch 23350, loss[loss=0.1602, simple_loss=0.2356, pruned_loss=0.04245, over 4936.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02868, over 972475.72 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 10:36:15,528 INFO [train.py:715] (7/8) Epoch 18, batch 23400, loss[loss=0.1133, simple_loss=0.1896, pruned_loss=0.0185, over 4804.00 frames.], tot_loss[loss=0.1319, simple_loss=0.207, pruned_loss=0.02843, over 972678.81 frames.], batch size: 25, lr: 1.23e-04 +2022-05-09 10:36:54,047 INFO [train.py:715] (7/8) Epoch 18, batch 23450, loss[loss=0.1184, simple_loss=0.1905, pruned_loss=0.02312, over 4841.00 frames.], tot_loss[loss=0.1319, simple_loss=0.207, pruned_loss=0.02838, over 972978.90 frames.], batch size: 13, lr: 1.23e-04 +2022-05-09 10:37:33,551 INFO [train.py:715] (7/8) Epoch 18, batch 23500, loss[loss=0.1303, simple_loss=0.2008, pruned_loss=0.02988, over 4936.00 frames.], tot_loss[loss=0.132, simple_loss=0.207, pruned_loss=0.02844, over 973833.68 frames.], batch size: 21, lr: 1.23e-04 +2022-05-09 10:38:12,436 INFO [train.py:715] (7/8) Epoch 18, batch 23550, loss[loss=0.1338, simple_loss=0.2138, pruned_loss=0.02691, over 4694.00 frames.], tot_loss[loss=0.1318, simple_loss=0.207, pruned_loss=0.02826, over 973292.67 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:38:51,087 INFO [train.py:715] (7/8) Epoch 18, batch 23600, loss[loss=0.1333, simple_loss=0.2148, pruned_loss=0.02587, over 4807.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2073, pruned_loss=0.02853, over 973197.92 frames.], batch size: 24, lr: 1.23e-04 +2022-05-09 10:39:30,022 INFO [train.py:715] (7/8) Epoch 18, batch 23650, loss[loss=0.1137, simple_loss=0.1928, pruned_loss=0.01733, over 4869.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2058, pruned_loss=0.028, over 972914.99 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 10:40:08,662 INFO [train.py:715] (7/8) Epoch 18, batch 23700, loss[loss=0.143, simple_loss=0.2186, pruned_loss=0.03369, over 4686.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02847, over 972839.31 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:40:47,464 INFO [train.py:715] (7/8) Epoch 18, batch 23750, loss[loss=0.1118, simple_loss=0.1861, pruned_loss=0.01879, over 4883.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02839, over 972301.40 frames.], batch size: 22, lr: 1.23e-04 +2022-05-09 10:41:26,881 INFO [train.py:715] (7/8) Epoch 18, batch 23800, loss[loss=0.1521, simple_loss=0.211, pruned_loss=0.04662, over 4712.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02851, over 972666.17 frames.], batch size: 15, lr: 1.23e-04 +2022-05-09 10:42:06,534 INFO [train.py:715] (7/8) Epoch 18, batch 23850, loss[loss=0.1214, simple_loss=0.1978, pruned_loss=0.0225, over 4946.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2052, pruned_loss=0.02802, over 972271.25 frames.], batch size: 23, lr: 1.23e-04 +2022-05-09 10:42:45,348 INFO [train.py:715] (7/8) Epoch 18, batch 23900, loss[loss=0.1367, simple_loss=0.2167, pruned_loss=0.02831, over 4781.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.02813, over 971859.37 frames.], batch size: 17, lr: 1.23e-04 +2022-05-09 10:43:24,103 INFO [train.py:715] (7/8) Epoch 18, batch 23950, loss[loss=0.1394, simple_loss=0.2159, pruned_loss=0.03148, over 4764.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2064, pruned_loss=0.02828, over 971766.09 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 10:44:03,430 INFO [train.py:715] (7/8) Epoch 18, batch 24000, loss[loss=0.1368, simple_loss=0.2225, pruned_loss=0.02552, over 4849.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2062, pruned_loss=0.0281, over 971310.91 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 10:44:03,431 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 10:44:13,351 INFO [train.py:742] (7/8) Epoch 18, validation: loss=0.1045, simple_loss=0.1878, pruned_loss=0.01057, over 914524.00 frames. +2022-05-09 10:44:52,990 INFO [train.py:715] (7/8) Epoch 18, batch 24050, loss[loss=0.1471, simple_loss=0.2226, pruned_loss=0.0358, over 4903.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2072, pruned_loss=0.02859, over 971639.78 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 10:45:31,816 INFO [train.py:715] (7/8) Epoch 18, batch 24100, loss[loss=0.1094, simple_loss=0.1935, pruned_loss=0.0126, over 4958.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2072, pruned_loss=0.02848, over 971793.34 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 10:46:10,747 INFO [train.py:715] (7/8) Epoch 18, batch 24150, loss[loss=0.138, simple_loss=0.2088, pruned_loss=0.03362, over 4858.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2071, pruned_loss=0.02823, over 972240.53 frames.], batch size: 30, lr: 1.22e-04 +2022-05-09 10:46:50,172 INFO [train.py:715] (7/8) Epoch 18, batch 24200, loss[loss=0.126, simple_loss=0.1964, pruned_loss=0.02779, over 4854.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2061, pruned_loss=0.02782, over 972913.42 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 10:47:29,226 INFO [train.py:715] (7/8) Epoch 18, batch 24250, loss[loss=0.1142, simple_loss=0.1822, pruned_loss=0.02306, over 4869.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2053, pruned_loss=0.02766, over 971746.06 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 10:48:08,103 INFO [train.py:715] (7/8) Epoch 18, batch 24300, loss[loss=0.1497, simple_loss=0.2234, pruned_loss=0.03802, over 4806.00 frames.], tot_loss[loss=0.1312, simple_loss=0.206, pruned_loss=0.02821, over 971797.96 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 10:48:46,577 INFO [train.py:715] (7/8) Epoch 18, batch 24350, loss[loss=0.1272, simple_loss=0.1977, pruned_loss=0.02836, over 4936.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02877, over 972428.90 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 10:49:25,639 INFO [train.py:715] (7/8) Epoch 18, batch 24400, loss[loss=0.1303, simple_loss=0.2084, pruned_loss=0.02612, over 4955.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02872, over 972690.60 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 10:50:04,248 INFO [train.py:715] (7/8) Epoch 18, batch 24450, loss[loss=0.1354, simple_loss=0.2169, pruned_loss=0.02697, over 4809.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2073, pruned_loss=0.02869, over 972361.79 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 10:50:42,846 INFO [train.py:715] (7/8) Epoch 18, batch 24500, loss[loss=0.1418, simple_loss=0.2193, pruned_loss=0.03217, over 4749.00 frames.], tot_loss[loss=0.132, simple_loss=0.207, pruned_loss=0.02847, over 972792.61 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 10:51:22,303 INFO [train.py:715] (7/8) Epoch 18, batch 24550, loss[loss=0.135, simple_loss=0.2108, pruned_loss=0.02964, over 4811.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2077, pruned_loss=0.02946, over 972513.56 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 10:52:01,508 INFO [train.py:715] (7/8) Epoch 18, batch 24600, loss[loss=0.165, simple_loss=0.226, pruned_loss=0.05197, over 4830.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02922, over 972582.82 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 10:52:40,235 INFO [train.py:715] (7/8) Epoch 18, batch 24650, loss[loss=0.1128, simple_loss=0.1886, pruned_loss=0.01855, over 4928.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2076, pruned_loss=0.02936, over 972482.64 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 10:53:18,840 INFO [train.py:715] (7/8) Epoch 18, batch 24700, loss[loss=0.1262, simple_loss=0.194, pruned_loss=0.02917, over 4810.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02916, over 972336.68 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 10:53:58,063 INFO [train.py:715] (7/8) Epoch 18, batch 24750, loss[loss=0.144, simple_loss=0.2207, pruned_loss=0.03365, over 4759.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2071, pruned_loss=0.02908, over 972476.60 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 10:54:37,028 INFO [train.py:715] (7/8) Epoch 18, batch 24800, loss[loss=0.1032, simple_loss=0.1733, pruned_loss=0.01657, over 4857.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.0287, over 972785.73 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 10:55:16,443 INFO [train.py:715] (7/8) Epoch 18, batch 24850, loss[loss=0.1292, simple_loss=0.2093, pruned_loss=0.02454, over 4767.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2062, pruned_loss=0.02833, over 973046.97 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 10:55:55,500 INFO [train.py:715] (7/8) Epoch 18, batch 24900, loss[loss=0.1353, simple_loss=0.2095, pruned_loss=0.03056, over 4734.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02875, over 973074.92 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 10:56:35,065 INFO [train.py:715] (7/8) Epoch 18, batch 24950, loss[loss=0.1566, simple_loss=0.2325, pruned_loss=0.04038, over 4775.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02935, over 972313.94 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 10:57:14,189 INFO [train.py:715] (7/8) Epoch 18, batch 25000, loss[loss=0.1185, simple_loss=0.2, pruned_loss=0.0185, over 4962.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2076, pruned_loss=0.02946, over 973008.23 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 10:57:52,843 INFO [train.py:715] (7/8) Epoch 18, batch 25050, loss[loss=0.138, simple_loss=0.2148, pruned_loss=0.03058, over 4950.00 frames.], tot_loss[loss=0.1329, simple_loss=0.207, pruned_loss=0.02936, over 973130.02 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 10:58:32,136 INFO [train.py:715] (7/8) Epoch 18, batch 25100, loss[loss=0.1129, simple_loss=0.1847, pruned_loss=0.02058, over 4931.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02904, over 973057.29 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 10:59:11,693 INFO [train.py:715] (7/8) Epoch 18, batch 25150, loss[loss=0.1421, simple_loss=0.2248, pruned_loss=0.02977, over 4982.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02885, over 972630.30 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 10:59:50,262 INFO [train.py:715] (7/8) Epoch 18, batch 25200, loss[loss=0.1147, simple_loss=0.1979, pruned_loss=0.01574, over 4930.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.02937, over 973271.92 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:00:29,821 INFO [train.py:715] (7/8) Epoch 18, batch 25250, loss[loss=0.1963, simple_loss=0.2652, pruned_loss=0.06369, over 4964.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02951, over 973735.71 frames.], batch size: 40, lr: 1.22e-04 +2022-05-09 11:01:09,546 INFO [train.py:715] (7/8) Epoch 18, batch 25300, loss[loss=0.1425, simple_loss=0.2055, pruned_loss=0.03979, over 4792.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.02962, over 972645.44 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:01:48,678 INFO [train.py:715] (7/8) Epoch 18, batch 25350, loss[loss=0.1344, simple_loss=0.209, pruned_loss=0.02989, over 4982.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02917, over 971820.62 frames.], batch size: 33, lr: 1.22e-04 +2022-05-09 11:02:27,382 INFO [train.py:715] (7/8) Epoch 18, batch 25400, loss[loss=0.1308, simple_loss=0.2072, pruned_loss=0.02716, over 4700.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02879, over 972082.82 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:03:06,962 INFO [train.py:715] (7/8) Epoch 18, batch 25450, loss[loss=0.1205, simple_loss=0.1939, pruned_loss=0.02355, over 4810.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.029, over 971913.70 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 11:03:45,938 INFO [train.py:715] (7/8) Epoch 18, batch 25500, loss[loss=0.1314, simple_loss=0.2097, pruned_loss=0.0265, over 4772.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2069, pruned_loss=0.02931, over 972459.09 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:04:24,922 INFO [train.py:715] (7/8) Epoch 18, batch 25550, loss[loss=0.1439, simple_loss=0.2209, pruned_loss=0.03347, over 4899.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02899, over 972901.43 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:05:04,559 INFO [train.py:715] (7/8) Epoch 18, batch 25600, loss[loss=0.1492, simple_loss=0.2209, pruned_loss=0.03875, over 4954.00 frames.], tot_loss[loss=0.133, simple_loss=0.2075, pruned_loss=0.02921, over 972850.50 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 11:05:44,105 INFO [train.py:715] (7/8) Epoch 18, batch 25650, loss[loss=0.1167, simple_loss=0.2005, pruned_loss=0.01642, over 4880.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2077, pruned_loss=0.02934, over 972929.25 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 11:06:23,311 INFO [train.py:715] (7/8) Epoch 18, batch 25700, loss[loss=0.1145, simple_loss=0.1934, pruned_loss=0.01783, over 4815.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02913, over 972501.95 frames.], batch size: 27, lr: 1.22e-04 +2022-05-09 11:07:02,569 INFO [train.py:715] (7/8) Epoch 18, batch 25750, loss[loss=0.116, simple_loss=0.1962, pruned_loss=0.0179, over 4773.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2075, pruned_loss=0.02935, over 972308.97 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:07:41,974 INFO [train.py:715] (7/8) Epoch 18, batch 25800, loss[loss=0.1457, simple_loss=0.2301, pruned_loss=0.0307, over 4753.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2079, pruned_loss=0.02914, over 972421.80 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 11:08:20,796 INFO [train.py:715] (7/8) Epoch 18, batch 25850, loss[loss=0.1216, simple_loss=0.1949, pruned_loss=0.02415, over 4800.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2062, pruned_loss=0.0282, over 972319.28 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 11:08:59,118 INFO [train.py:715] (7/8) Epoch 18, batch 25900, loss[loss=0.1239, simple_loss=0.2002, pruned_loss=0.02377, over 4949.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2065, pruned_loss=0.02823, over 972502.95 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 11:09:38,437 INFO [train.py:715] (7/8) Epoch 18, batch 25950, loss[loss=0.1101, simple_loss=0.183, pruned_loss=0.01861, over 4871.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2074, pruned_loss=0.02865, over 971580.63 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:10:17,513 INFO [train.py:715] (7/8) Epoch 18, batch 26000, loss[loss=0.1153, simple_loss=0.1959, pruned_loss=0.01736, over 4838.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2073, pruned_loss=0.02864, over 971752.12 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 11:10:56,983 INFO [train.py:715] (7/8) Epoch 18, batch 26050, loss[loss=0.131, simple_loss=0.2068, pruned_loss=0.02762, over 4985.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.02854, over 971480.15 frames.], batch size: 31, lr: 1.22e-04 +2022-05-09 11:11:36,112 INFO [train.py:715] (7/8) Epoch 18, batch 26100, loss[loss=0.116, simple_loss=0.1927, pruned_loss=0.0197, over 4799.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2064, pruned_loss=0.02837, over 971670.67 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 11:12:15,691 INFO [train.py:715] (7/8) Epoch 18, batch 26150, loss[loss=0.1097, simple_loss=0.179, pruned_loss=0.02018, over 4962.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.02848, over 972633.78 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:12:54,905 INFO [train.py:715] (7/8) Epoch 18, batch 26200, loss[loss=0.1134, simple_loss=0.1847, pruned_loss=0.02104, over 4932.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.0284, over 972533.03 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 11:13:33,235 INFO [train.py:715] (7/8) Epoch 18, batch 26250, loss[loss=0.1365, simple_loss=0.2268, pruned_loss=0.02311, over 4778.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02861, over 972057.01 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 11:14:12,858 INFO [train.py:715] (7/8) Epoch 18, batch 26300, loss[loss=0.1391, simple_loss=0.2147, pruned_loss=0.0318, over 4771.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02879, over 972290.39 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 11:14:51,544 INFO [train.py:715] (7/8) Epoch 18, batch 26350, loss[loss=0.1387, simple_loss=0.2177, pruned_loss=0.02987, over 4972.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02865, over 972139.88 frames.], batch size: 35, lr: 1.22e-04 +2022-05-09 11:15:30,594 INFO [train.py:715] (7/8) Epoch 18, batch 26400, loss[loss=0.1246, simple_loss=0.2017, pruned_loss=0.02371, over 4988.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.02844, over 971422.92 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:16:09,486 INFO [train.py:715] (7/8) Epoch 18, batch 26450, loss[loss=0.1131, simple_loss=0.1885, pruned_loss=0.01882, over 4899.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02826, over 971195.73 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 11:16:49,039 INFO [train.py:715] (7/8) Epoch 18, batch 26500, loss[loss=0.17, simple_loss=0.2229, pruned_loss=0.05855, over 4780.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.02802, over 970653.64 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:17:28,072 INFO [train.py:715] (7/8) Epoch 18, batch 26550, loss[loss=0.1446, simple_loss=0.2152, pruned_loss=0.037, over 4953.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02872, over 970336.99 frames.], batch size: 35, lr: 1.22e-04 +2022-05-09 11:18:06,865 INFO [train.py:715] (7/8) Epoch 18, batch 26600, loss[loss=0.127, simple_loss=0.2067, pruned_loss=0.02365, over 4807.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02904, over 970512.07 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 11:18:46,131 INFO [train.py:715] (7/8) Epoch 18, batch 26650, loss[loss=0.1213, simple_loss=0.1922, pruned_loss=0.02513, over 4758.00 frames.], tot_loss[loss=0.1317, simple_loss=0.206, pruned_loss=0.02866, over 971116.31 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:19:25,272 INFO [train.py:715] (7/8) Epoch 18, batch 26700, loss[loss=0.1522, simple_loss=0.2275, pruned_loss=0.03848, over 4921.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02848, over 971648.99 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:20:05,265 INFO [train.py:715] (7/8) Epoch 18, batch 26750, loss[loss=0.1445, simple_loss=0.2253, pruned_loss=0.03192, over 4964.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02887, over 971871.40 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:20:43,662 INFO [train.py:715] (7/8) Epoch 18, batch 26800, loss[loss=0.1441, simple_loss=0.2285, pruned_loss=0.02983, over 4794.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02864, over 972265.98 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:21:23,698 INFO [train.py:715] (7/8) Epoch 18, batch 26850, loss[loss=0.1249, simple_loss=0.1995, pruned_loss=0.02513, over 4895.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02881, over 972377.31 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 11:22:03,380 INFO [train.py:715] (7/8) Epoch 18, batch 26900, loss[loss=0.1138, simple_loss=0.1792, pruned_loss=0.02426, over 4786.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.0287, over 972350.48 frames.], batch size: 12, lr: 1.22e-04 +2022-05-09 11:22:41,422 INFO [train.py:715] (7/8) Epoch 18, batch 26950, loss[loss=0.108, simple_loss=0.1798, pruned_loss=0.01813, over 4780.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.0288, over 971994.15 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:23:20,806 INFO [train.py:715] (7/8) Epoch 18, batch 27000, loss[loss=0.1207, simple_loss=0.192, pruned_loss=0.02465, over 4777.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02875, over 971816.52 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:23:20,807 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 11:23:30,796 INFO [train.py:742] (7/8) Epoch 18, validation: loss=0.1044, simple_loss=0.1877, pruned_loss=0.01055, over 914524.00 frames. +2022-05-09 11:24:11,109 INFO [train.py:715] (7/8) Epoch 18, batch 27050, loss[loss=0.1183, simple_loss=0.1838, pruned_loss=0.02639, over 4863.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.02835, over 972072.55 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:24:50,013 INFO [train.py:715] (7/8) Epoch 18, batch 27100, loss[loss=0.1256, simple_loss=0.2113, pruned_loss=0.01998, over 4940.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02898, over 972121.68 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 11:25:29,327 INFO [train.py:715] (7/8) Epoch 18, batch 27150, loss[loss=0.121, simple_loss=0.1914, pruned_loss=0.02532, over 4880.00 frames.], tot_loss[loss=0.1324, simple_loss=0.207, pruned_loss=0.02891, over 972616.57 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 11:26:08,687 INFO [train.py:715] (7/8) Epoch 18, batch 27200, loss[loss=0.1464, simple_loss=0.2231, pruned_loss=0.0349, over 4851.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2066, pruned_loss=0.02893, over 972280.81 frames.], batch size: 30, lr: 1.22e-04 +2022-05-09 11:26:47,919 INFO [train.py:715] (7/8) Epoch 18, batch 27250, loss[loss=0.1298, simple_loss=0.2023, pruned_loss=0.0286, over 4828.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02902, over 972268.05 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:27:26,984 INFO [train.py:715] (7/8) Epoch 18, batch 27300, loss[loss=0.1104, simple_loss=0.1798, pruned_loss=0.02056, over 4870.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02906, over 972264.91 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:28:05,824 INFO [train.py:715] (7/8) Epoch 18, batch 27350, loss[loss=0.1418, simple_loss=0.217, pruned_loss=0.03336, over 4880.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.0292, over 971901.37 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 11:28:46,004 INFO [train.py:715] (7/8) Epoch 18, batch 27400, loss[loss=0.1452, simple_loss=0.2234, pruned_loss=0.03345, over 4825.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02918, over 971717.14 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 11:29:25,405 INFO [train.py:715] (7/8) Epoch 18, batch 27450, loss[loss=0.1279, simple_loss=0.1951, pruned_loss=0.03033, over 4757.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02915, over 972064.70 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:30:04,446 INFO [train.py:715] (7/8) Epoch 18, batch 27500, loss[loss=0.1232, simple_loss=0.1974, pruned_loss=0.02448, over 4702.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02885, over 972694.29 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:30:44,161 INFO [train.py:715] (7/8) Epoch 18, batch 27550, loss[loss=0.1379, simple_loss=0.2138, pruned_loss=0.03097, over 4857.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02863, over 972861.44 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:31:23,281 INFO [train.py:715] (7/8) Epoch 18, batch 27600, loss[loss=0.145, simple_loss=0.2142, pruned_loss=0.03789, over 4851.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02836, over 973344.78 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 11:32:01,948 INFO [train.py:715] (7/8) Epoch 18, batch 27650, loss[loss=0.144, simple_loss=0.2196, pruned_loss=0.0342, over 4957.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02858, over 972976.09 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:32:40,859 INFO [train.py:715] (7/8) Epoch 18, batch 27700, loss[loss=0.1363, simple_loss=0.2099, pruned_loss=0.03138, over 4906.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02882, over 973574.52 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:33:20,158 INFO [train.py:715] (7/8) Epoch 18, batch 27750, loss[loss=0.1397, simple_loss=0.2193, pruned_loss=0.03008, over 4773.00 frames.], tot_loss[loss=0.1317, simple_loss=0.206, pruned_loss=0.02872, over 972941.15 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:33:59,617 INFO [train.py:715] (7/8) Epoch 18, batch 27800, loss[loss=0.1465, simple_loss=0.2206, pruned_loss=0.03622, over 4979.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02911, over 973384.35 frames.], batch size: 28, lr: 1.22e-04 +2022-05-09 11:34:38,870 INFO [train.py:715] (7/8) Epoch 18, batch 27850, loss[loss=0.1371, simple_loss=0.2131, pruned_loss=0.03056, over 4866.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2067, pruned_loss=0.02948, over 973212.31 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:35:18,481 INFO [train.py:715] (7/8) Epoch 18, batch 27900, loss[loss=0.1402, simple_loss=0.2074, pruned_loss=0.03654, over 4980.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02954, over 973530.26 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 11:35:57,739 INFO [train.py:715] (7/8) Epoch 18, batch 27950, loss[loss=0.1152, simple_loss=0.1868, pruned_loss=0.02184, over 4814.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2062, pruned_loss=0.02916, over 973199.14 frames.], batch size: 12, lr: 1.22e-04 +2022-05-09 11:36:36,983 INFO [train.py:715] (7/8) Epoch 18, batch 28000, loss[loss=0.1441, simple_loss=0.2171, pruned_loss=0.03554, over 4928.00 frames.], tot_loss[loss=0.132, simple_loss=0.2055, pruned_loss=0.02925, over 974243.97 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:37:16,532 INFO [train.py:715] (7/8) Epoch 18, batch 28050, loss[loss=0.1275, simple_loss=0.2069, pruned_loss=0.02405, over 4875.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2052, pruned_loss=0.02889, over 973647.32 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 11:37:56,321 INFO [train.py:715] (7/8) Epoch 18, batch 28100, loss[loss=0.147, simple_loss=0.2208, pruned_loss=0.03661, over 4861.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02919, over 974886.42 frames.], batch size: 30, lr: 1.22e-04 +2022-05-09 11:38:35,511 INFO [train.py:715] (7/8) Epoch 18, batch 28150, loss[loss=0.1292, simple_loss=0.2094, pruned_loss=0.0245, over 4954.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2058, pruned_loss=0.02902, over 974837.60 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 11:39:13,844 INFO [train.py:715] (7/8) Epoch 18, batch 28200, loss[loss=0.1227, simple_loss=0.2023, pruned_loss=0.02153, over 4804.00 frames.], tot_loss[loss=0.132, simple_loss=0.2059, pruned_loss=0.02903, over 974451.57 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:39:53,466 INFO [train.py:715] (7/8) Epoch 18, batch 28250, loss[loss=0.1145, simple_loss=0.1975, pruned_loss=0.0158, over 4879.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2057, pruned_loss=0.02857, over 974671.54 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 11:40:32,296 INFO [train.py:715] (7/8) Epoch 18, batch 28300, loss[loss=0.1359, simple_loss=0.2146, pruned_loss=0.02863, over 4795.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02871, over 975011.69 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:41:11,197 INFO [train.py:715] (7/8) Epoch 18, batch 28350, loss[loss=0.1319, simple_loss=0.1983, pruned_loss=0.03274, over 4872.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02891, over 974593.29 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 11:41:50,495 INFO [train.py:715] (7/8) Epoch 18, batch 28400, loss[loss=0.1206, simple_loss=0.1989, pruned_loss=0.02109, over 4915.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.02887, over 973506.49 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 11:42:29,773 INFO [train.py:715] (7/8) Epoch 18, batch 28450, loss[loss=0.1056, simple_loss=0.1683, pruned_loss=0.02141, over 4726.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02858, over 972912.36 frames.], batch size: 12, lr: 1.22e-04 +2022-05-09 11:43:08,846 INFO [train.py:715] (7/8) Epoch 18, batch 28500, loss[loss=0.1183, simple_loss=0.1882, pruned_loss=0.02417, over 4963.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02884, over 972099.87 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:43:47,955 INFO [train.py:715] (7/8) Epoch 18, batch 28550, loss[loss=0.1309, simple_loss=0.2031, pruned_loss=0.02938, over 4793.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02856, over 973088.45 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 11:44:27,959 INFO [train.py:715] (7/8) Epoch 18, batch 28600, loss[loss=0.1378, simple_loss=0.2029, pruned_loss=0.03635, over 4784.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2069, pruned_loss=0.0286, over 973257.38 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:45:06,661 INFO [train.py:715] (7/8) Epoch 18, batch 28650, loss[loss=0.1193, simple_loss=0.1986, pruned_loss=0.01997, over 4813.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02832, over 973060.69 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 11:45:45,610 INFO [train.py:715] (7/8) Epoch 18, batch 28700, loss[loss=0.1314, simple_loss=0.2102, pruned_loss=0.02632, over 4784.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2062, pruned_loss=0.02813, over 973172.61 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 11:46:25,179 INFO [train.py:715] (7/8) Epoch 18, batch 28750, loss[loss=0.154, simple_loss=0.2219, pruned_loss=0.04304, over 4993.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2057, pruned_loss=0.02842, over 973499.55 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:47:04,223 INFO [train.py:715] (7/8) Epoch 18, batch 28800, loss[loss=0.1347, simple_loss=0.2217, pruned_loss=0.02381, over 4887.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2066, pruned_loss=0.02822, over 972818.90 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:47:43,078 INFO [train.py:715] (7/8) Epoch 18, batch 28850, loss[loss=0.1298, simple_loss=0.2086, pruned_loss=0.02545, over 4899.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02827, over 973424.11 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:48:21,624 INFO [train.py:715] (7/8) Epoch 18, batch 28900, loss[loss=0.1386, simple_loss=0.2079, pruned_loss=0.03461, over 4977.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2053, pruned_loss=0.02807, over 973254.83 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 11:49:01,745 INFO [train.py:715] (7/8) Epoch 18, batch 28950, loss[loss=0.1105, simple_loss=0.1915, pruned_loss=0.01473, over 4854.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2052, pruned_loss=0.02774, over 972968.12 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 11:49:40,558 INFO [train.py:715] (7/8) Epoch 18, batch 29000, loss[loss=0.1232, simple_loss=0.1942, pruned_loss=0.02609, over 4807.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02846, over 971961.53 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 11:50:19,732 INFO [train.py:715] (7/8) Epoch 18, batch 29050, loss[loss=0.1259, simple_loss=0.2053, pruned_loss=0.02323, over 4746.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02875, over 971225.70 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 11:50:59,124 INFO [train.py:715] (7/8) Epoch 18, batch 29100, loss[loss=0.1092, simple_loss=0.1808, pruned_loss=0.0188, over 4782.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.0284, over 971500.20 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:51:38,417 INFO [train.py:715] (7/8) Epoch 18, batch 29150, loss[loss=0.131, simple_loss=0.212, pruned_loss=0.02498, over 4761.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02863, over 971102.69 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:52:17,128 INFO [train.py:715] (7/8) Epoch 18, batch 29200, loss[loss=0.1255, simple_loss=0.2015, pruned_loss=0.02468, over 4987.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02853, over 971052.67 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 11:52:55,644 INFO [train.py:715] (7/8) Epoch 18, batch 29250, loss[loss=0.1176, simple_loss=0.1901, pruned_loss=0.0226, over 4840.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.02851, over 971869.80 frames.], batch size: 30, lr: 1.22e-04 +2022-05-09 11:53:35,213 INFO [train.py:715] (7/8) Epoch 18, batch 29300, loss[loss=0.1519, simple_loss=0.2262, pruned_loss=0.03882, over 4685.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02858, over 971184.78 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 11:54:13,916 INFO [train.py:715] (7/8) Epoch 18, batch 29350, loss[loss=0.1366, simple_loss=0.2084, pruned_loss=0.03246, over 4976.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2067, pruned_loss=0.02954, over 970679.52 frames.], batch size: 35, lr: 1.22e-04 +2022-05-09 11:54:52,611 INFO [train.py:715] (7/8) Epoch 18, batch 29400, loss[loss=0.124, simple_loss=0.1972, pruned_loss=0.02536, over 4815.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2068, pruned_loss=0.02952, over 971086.10 frames.], batch size: 27, lr: 1.22e-04 +2022-05-09 11:55:33,952 INFO [train.py:715] (7/8) Epoch 18, batch 29450, loss[loss=0.1402, simple_loss=0.2309, pruned_loss=0.02477, over 4960.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2073, pruned_loss=0.02952, over 972060.08 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 11:56:12,979 INFO [train.py:715] (7/8) Epoch 18, batch 29500, loss[loss=0.1384, simple_loss=0.2109, pruned_loss=0.03295, over 4846.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02936, over 971379.02 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 11:56:52,074 INFO [train.py:715] (7/8) Epoch 18, batch 29550, loss[loss=0.1376, simple_loss=0.2245, pruned_loss=0.02537, over 4797.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02954, over 971626.20 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 11:57:30,050 INFO [train.py:715] (7/8) Epoch 18, batch 29600, loss[loss=0.1216, simple_loss=0.1988, pruned_loss=0.02225, over 4765.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2077, pruned_loss=0.02973, over 971552.53 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 11:58:09,262 INFO [train.py:715] (7/8) Epoch 18, batch 29650, loss[loss=0.1273, simple_loss=0.2146, pruned_loss=0.02001, over 4954.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02951, over 971659.11 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 11:58:48,234 INFO [train.py:715] (7/8) Epoch 18, batch 29700, loss[loss=0.1377, simple_loss=0.2086, pruned_loss=0.03341, over 4872.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2079, pruned_loss=0.02958, over 971981.86 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 11:59:26,598 INFO [train.py:715] (7/8) Epoch 18, batch 29750, loss[loss=0.1207, simple_loss=0.1986, pruned_loss=0.02137, over 4985.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2081, pruned_loss=0.02966, over 971126.45 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 12:00:05,916 INFO [train.py:715] (7/8) Epoch 18, batch 29800, loss[loss=0.1479, simple_loss=0.2177, pruned_loss=0.03907, over 4833.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02977, over 972105.01 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 12:00:45,628 INFO [train.py:715] (7/8) Epoch 18, batch 29850, loss[loss=0.09921, simple_loss=0.1705, pruned_loss=0.01394, over 4765.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2073, pruned_loss=0.02977, over 971816.78 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:01:24,706 INFO [train.py:715] (7/8) Epoch 18, batch 29900, loss[loss=0.1104, simple_loss=0.1916, pruned_loss=0.01457, over 4819.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02937, over 971768.15 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 12:02:03,298 INFO [train.py:715] (7/8) Epoch 18, batch 29950, loss[loss=0.1159, simple_loss=0.1785, pruned_loss=0.02663, over 4808.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02887, over 970874.21 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 12:02:43,061 INFO [train.py:715] (7/8) Epoch 18, batch 30000, loss[loss=0.1124, simple_loss=0.1901, pruned_loss=0.01734, over 4753.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02864, over 970960.40 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 12:02:43,061 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 12:02:52,968 INFO [train.py:742] (7/8) Epoch 18, validation: loss=0.1047, simple_loss=0.188, pruned_loss=0.01071, over 914524.00 frames. +2022-05-09 12:03:33,191 INFO [train.py:715] (7/8) Epoch 18, batch 30050, loss[loss=0.1211, simple_loss=0.1922, pruned_loss=0.02493, over 4876.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2058, pruned_loss=0.02872, over 971047.49 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 12:04:12,318 INFO [train.py:715] (7/8) Epoch 18, batch 30100, loss[loss=0.1322, simple_loss=0.217, pruned_loss=0.02371, over 4835.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02909, over 971461.41 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:04:50,504 INFO [train.py:715] (7/8) Epoch 18, batch 30150, loss[loss=0.1054, simple_loss=0.1736, pruned_loss=0.01859, over 4981.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2062, pruned_loss=0.02903, over 971847.29 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:05:29,938 INFO [train.py:715] (7/8) Epoch 18, batch 30200, loss[loss=0.1207, simple_loss=0.1982, pruned_loss=0.02156, over 4901.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02914, over 970747.27 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 12:06:09,181 INFO [train.py:715] (7/8) Epoch 18, batch 30250, loss[loss=0.1347, simple_loss=0.2007, pruned_loss=0.03441, over 4748.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02952, over 971412.89 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 12:06:48,872 INFO [train.py:715] (7/8) Epoch 18, batch 30300, loss[loss=0.1299, simple_loss=0.2019, pruned_loss=0.02895, over 4882.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02947, over 972827.30 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 12:07:27,508 INFO [train.py:715] (7/8) Epoch 18, batch 30350, loss[loss=0.1294, simple_loss=0.2044, pruned_loss=0.02716, over 4937.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02907, over 973082.77 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 12:08:07,408 INFO [train.py:715] (7/8) Epoch 18, batch 30400, loss[loss=0.1341, simple_loss=0.2074, pruned_loss=0.03041, over 4816.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02909, over 973803.86 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 12:08:46,436 INFO [train.py:715] (7/8) Epoch 18, batch 30450, loss[loss=0.1345, simple_loss=0.2073, pruned_loss=0.03083, over 4886.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2056, pruned_loss=0.02863, over 973442.94 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 12:09:24,924 INFO [train.py:715] (7/8) Epoch 18, batch 30500, loss[loss=0.1289, simple_loss=0.2185, pruned_loss=0.0196, over 4872.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02926, over 973616.65 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 12:10:04,134 INFO [train.py:715] (7/8) Epoch 18, batch 30550, loss[loss=0.1229, simple_loss=0.207, pruned_loss=0.01937, over 4913.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02884, over 973335.46 frames.], batch size: 23, lr: 1.22e-04 +2022-05-09 12:10:42,814 INFO [train.py:715] (7/8) Epoch 18, batch 30600, loss[loss=0.1337, simple_loss=0.2116, pruned_loss=0.02795, over 4893.00 frames.], tot_loss[loss=0.1321, simple_loss=0.206, pruned_loss=0.02908, over 973055.51 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 12:11:21,481 INFO [train.py:715] (7/8) Epoch 18, batch 30650, loss[loss=0.1405, simple_loss=0.2183, pruned_loss=0.03137, over 4985.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2056, pruned_loss=0.02866, over 972242.41 frames.], batch size: 28, lr: 1.22e-04 +2022-05-09 12:12:00,154 INFO [train.py:715] (7/8) Epoch 18, batch 30700, loss[loss=0.1268, simple_loss=0.2044, pruned_loss=0.02457, over 4852.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.0292, over 971970.74 frames.], batch size: 32, lr: 1.22e-04 +2022-05-09 12:12:39,286 INFO [train.py:715] (7/8) Epoch 18, batch 30750, loss[loss=0.129, simple_loss=0.1996, pruned_loss=0.02922, over 4988.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2066, pruned_loss=0.0293, over 971950.99 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:13:18,035 INFO [train.py:715] (7/8) Epoch 18, batch 30800, loss[loss=0.1308, simple_loss=0.2131, pruned_loss=0.02427, over 4898.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02922, over 971895.49 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 12:13:56,473 INFO [train.py:715] (7/8) Epoch 18, batch 30850, loss[loss=0.1219, simple_loss=0.203, pruned_loss=0.02038, over 4931.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2063, pruned_loss=0.02902, over 972487.18 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:14:35,507 INFO [train.py:715] (7/8) Epoch 18, batch 30900, loss[loss=0.1601, simple_loss=0.2195, pruned_loss=0.05042, over 4847.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02925, over 971605.06 frames.], batch size: 30, lr: 1.22e-04 +2022-05-09 12:15:14,126 INFO [train.py:715] (7/8) Epoch 18, batch 30950, loss[loss=0.131, simple_loss=0.2129, pruned_loss=0.02456, over 4896.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02909, over 972424.58 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 12:15:52,435 INFO [train.py:715] (7/8) Epoch 18, batch 31000, loss[loss=0.1372, simple_loss=0.2088, pruned_loss=0.03282, over 4782.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2062, pruned_loss=0.02907, over 971815.07 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:16:31,407 INFO [train.py:715] (7/8) Epoch 18, batch 31050, loss[loss=0.1186, simple_loss=0.1924, pruned_loss=0.02237, over 4884.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02912, over 971305.95 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 12:17:10,959 INFO [train.py:715] (7/8) Epoch 18, batch 31100, loss[loss=0.1222, simple_loss=0.2037, pruned_loss=0.02038, over 4814.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.0292, over 971293.34 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:17:49,898 INFO [train.py:715] (7/8) Epoch 18, batch 31150, loss[loss=0.1527, simple_loss=0.2133, pruned_loss=0.04605, over 4781.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2061, pruned_loss=0.02889, over 971588.81 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 12:18:28,837 INFO [train.py:715] (7/8) Epoch 18, batch 31200, loss[loss=0.1309, simple_loss=0.2029, pruned_loss=0.02946, over 4892.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02912, over 971408.78 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 12:19:08,219 INFO [train.py:715] (7/8) Epoch 18, batch 31250, loss[loss=0.1325, simple_loss=0.2075, pruned_loss=0.02869, over 4919.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02912, over 971985.55 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:19:47,259 INFO [train.py:715] (7/8) Epoch 18, batch 31300, loss[loss=0.1523, simple_loss=0.2274, pruned_loss=0.03859, over 4825.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02853, over 972635.88 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:20:25,876 INFO [train.py:715] (7/8) Epoch 18, batch 31350, loss[loss=0.1268, simple_loss=0.2016, pruned_loss=0.02597, over 4734.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02875, over 972267.46 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 12:21:05,052 INFO [train.py:715] (7/8) Epoch 18, batch 31400, loss[loss=0.1232, simple_loss=0.2022, pruned_loss=0.02216, over 4789.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2067, pruned_loss=0.02846, over 971212.02 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:21:44,557 INFO [train.py:715] (7/8) Epoch 18, batch 31450, loss[loss=0.1221, simple_loss=0.1928, pruned_loss=0.02576, over 4984.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02857, over 971381.30 frames.], batch size: 28, lr: 1.22e-04 +2022-05-09 12:22:23,388 INFO [train.py:715] (7/8) Epoch 18, batch 31500, loss[loss=0.1259, simple_loss=0.1957, pruned_loss=0.028, over 4808.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02874, over 971920.06 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:23:01,623 INFO [train.py:715] (7/8) Epoch 18, batch 31550, loss[loss=0.149, simple_loss=0.2187, pruned_loss=0.03961, over 4991.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02879, over 972370.41 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 12:23:41,441 INFO [train.py:715] (7/8) Epoch 18, batch 31600, loss[loss=0.1551, simple_loss=0.2205, pruned_loss=0.04484, over 4860.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02864, over 971698.33 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 12:24:20,714 INFO [train.py:715] (7/8) Epoch 18, batch 31650, loss[loss=0.1093, simple_loss=0.1889, pruned_loss=0.01486, over 4782.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02843, over 971548.44 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:24:59,686 INFO [train.py:715] (7/8) Epoch 18, batch 31700, loss[loss=0.1235, simple_loss=0.2074, pruned_loss=0.01976, over 4776.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02849, over 972085.37 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:25:38,811 INFO [train.py:715] (7/8) Epoch 18, batch 31750, loss[loss=0.1551, simple_loss=0.2247, pruned_loss=0.04278, over 4832.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02864, over 971396.65 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:26:18,655 INFO [train.py:715] (7/8) Epoch 18, batch 31800, loss[loss=0.1118, simple_loss=0.1853, pruned_loss=0.01909, over 4936.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02859, over 971105.31 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 12:26:58,020 INFO [train.py:715] (7/8) Epoch 18, batch 31850, loss[loss=0.1282, simple_loss=0.2081, pruned_loss=0.02413, over 4898.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.0283, over 972087.13 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 12:27:36,979 INFO [train.py:715] (7/8) Epoch 18, batch 31900, loss[loss=0.136, simple_loss=0.2115, pruned_loss=0.0302, over 4925.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2069, pruned_loss=0.0284, over 972535.41 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:28:16,148 INFO [train.py:715] (7/8) Epoch 18, batch 31950, loss[loss=0.1287, simple_loss=0.1968, pruned_loss=0.03032, over 4829.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.0288, over 972705.70 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:28:54,462 INFO [train.py:715] (7/8) Epoch 18, batch 32000, loss[loss=0.1055, simple_loss=0.1854, pruned_loss=0.01278, over 4927.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.02876, over 973142.83 frames.], batch size: 29, lr: 1.22e-04 +2022-05-09 12:29:32,619 INFO [train.py:715] (7/8) Epoch 18, batch 32050, loss[loss=0.1295, simple_loss=0.2083, pruned_loss=0.02532, over 4986.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02864, over 973582.49 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 12:30:11,876 INFO [train.py:715] (7/8) Epoch 18, batch 32100, loss[loss=0.1301, simple_loss=0.2068, pruned_loss=0.02664, over 4852.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02861, over 973446.38 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 12:30:51,374 INFO [train.py:715] (7/8) Epoch 18, batch 32150, loss[loss=0.1502, simple_loss=0.2264, pruned_loss=0.03694, over 4955.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.02841, over 973362.72 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 12:31:30,534 INFO [train.py:715] (7/8) Epoch 18, batch 32200, loss[loss=0.1361, simple_loss=0.2199, pruned_loss=0.02616, over 4771.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.0285, over 972787.89 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:32:08,911 INFO [train.py:715] (7/8) Epoch 18, batch 32250, loss[loss=0.1194, simple_loss=0.2044, pruned_loss=0.01725, over 4981.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02904, over 971714.58 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:32:48,157 INFO [train.py:715] (7/8) Epoch 18, batch 32300, loss[loss=0.1483, simple_loss=0.219, pruned_loss=0.0388, over 4928.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.0289, over 971333.02 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:33:26,714 INFO [train.py:715] (7/8) Epoch 18, batch 32350, loss[loss=0.1577, simple_loss=0.2295, pruned_loss=0.04291, over 4680.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02874, over 970994.15 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:34:05,361 INFO [train.py:715] (7/8) Epoch 18, batch 32400, loss[loss=0.1378, simple_loss=0.2149, pruned_loss=0.03031, over 4853.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02873, over 971303.99 frames.], batch size: 30, lr: 1.22e-04 +2022-05-09 12:34:44,779 INFO [train.py:715] (7/8) Epoch 18, batch 32450, loss[loss=0.1489, simple_loss=0.222, pruned_loss=0.03792, over 4789.00 frames.], tot_loss[loss=0.1326, simple_loss=0.207, pruned_loss=0.02908, over 971944.33 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 12:35:23,651 INFO [train.py:715] (7/8) Epoch 18, batch 32500, loss[loss=0.1351, simple_loss=0.2073, pruned_loss=0.03148, over 4807.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2063, pruned_loss=0.02914, over 971062.35 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 12:36:02,853 INFO [train.py:715] (7/8) Epoch 18, batch 32550, loss[loss=0.1451, simple_loss=0.219, pruned_loss=0.03561, over 4788.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02875, over 971460.98 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:36:42,027 INFO [train.py:715] (7/8) Epoch 18, batch 32600, loss[loss=0.1326, simple_loss=0.2167, pruned_loss=0.02428, over 4809.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2065, pruned_loss=0.0289, over 972340.09 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:37:21,453 INFO [train.py:715] (7/8) Epoch 18, batch 32650, loss[loss=0.1004, simple_loss=0.1688, pruned_loss=0.01603, over 4800.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2059, pruned_loss=0.02877, over 972566.27 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 12:37:59,895 INFO [train.py:715] (7/8) Epoch 18, batch 32700, loss[loss=0.1192, simple_loss=0.1896, pruned_loss=0.02438, over 4806.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2055, pruned_loss=0.02892, over 972234.79 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:38:38,642 INFO [train.py:715] (7/8) Epoch 18, batch 32750, loss[loss=0.1157, simple_loss=0.1857, pruned_loss=0.02287, over 4834.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02892, over 972698.70 frames.], batch size: 30, lr: 1.22e-04 +2022-05-09 12:39:17,960 INFO [train.py:715] (7/8) Epoch 18, batch 32800, loss[loss=0.1328, simple_loss=0.2081, pruned_loss=0.0288, over 4892.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02879, over 972095.41 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 12:39:57,150 INFO [train.py:715] (7/8) Epoch 18, batch 32850, loss[loss=0.1322, simple_loss=0.2018, pruned_loss=0.03131, over 4964.00 frames.], tot_loss[loss=0.1308, simple_loss=0.205, pruned_loss=0.02828, over 972587.57 frames.], batch size: 35, lr: 1.22e-04 +2022-05-09 12:40:35,667 INFO [train.py:715] (7/8) Epoch 18, batch 32900, loss[loss=0.1238, simple_loss=0.2041, pruned_loss=0.0217, over 4974.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2057, pruned_loss=0.02869, over 972240.70 frames.], batch size: 28, lr: 1.22e-04 +2022-05-09 12:41:14,764 INFO [train.py:715] (7/8) Epoch 18, batch 32950, loss[loss=0.1382, simple_loss=0.2074, pruned_loss=0.03446, over 4807.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02818, over 972250.24 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:41:53,958 INFO [train.py:715] (7/8) Epoch 18, batch 33000, loss[loss=0.1607, simple_loss=0.2259, pruned_loss=0.04772, over 4812.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02844, over 971368.11 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:41:53,959 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 12:42:03,826 INFO [train.py:742] (7/8) Epoch 18, validation: loss=0.1046, simple_loss=0.1878, pruned_loss=0.01068, over 914524.00 frames. +2022-05-09 12:42:43,656 INFO [train.py:715] (7/8) Epoch 18, batch 33050, loss[loss=0.1188, simple_loss=0.1893, pruned_loss=0.02413, over 4988.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02853, over 972381.32 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:43:22,617 INFO [train.py:715] (7/8) Epoch 18, batch 33100, loss[loss=0.1398, simple_loss=0.2194, pruned_loss=0.03013, over 4811.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02836, over 972870.67 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:44:02,104 INFO [train.py:715] (7/8) Epoch 18, batch 33150, loss[loss=0.1123, simple_loss=0.1858, pruned_loss=0.0194, over 4807.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2055, pruned_loss=0.02795, over 972301.13 frames.], batch size: 26, lr: 1.22e-04 +2022-05-09 12:44:41,945 INFO [train.py:715] (7/8) Epoch 18, batch 33200, loss[loss=0.1297, simple_loss=0.2077, pruned_loss=0.0259, over 4947.00 frames.], tot_loss[loss=0.1304, simple_loss=0.205, pruned_loss=0.02791, over 972082.97 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:45:20,901 INFO [train.py:715] (7/8) Epoch 18, batch 33250, loss[loss=0.1422, simple_loss=0.2205, pruned_loss=0.03196, over 4880.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.02852, over 971931.67 frames.], batch size: 22, lr: 1.22e-04 +2022-05-09 12:45:59,530 INFO [train.py:715] (7/8) Epoch 18, batch 33300, loss[loss=0.1239, simple_loss=0.1981, pruned_loss=0.0249, over 4979.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02833, over 971756.77 frames.], batch size: 31, lr: 1.22e-04 +2022-05-09 12:46:38,975 INFO [train.py:715] (7/8) Epoch 18, batch 33350, loss[loss=0.13, simple_loss=0.2025, pruned_loss=0.02879, over 4781.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02834, over 971692.15 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 12:47:18,352 INFO [train.py:715] (7/8) Epoch 18, batch 33400, loss[loss=0.1252, simple_loss=0.2029, pruned_loss=0.02374, over 4912.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02837, over 971988.38 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 12:47:57,079 INFO [train.py:715] (7/8) Epoch 18, batch 33450, loss[loss=0.1403, simple_loss=0.2176, pruned_loss=0.03148, over 4965.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02831, over 970741.20 frames.], batch size: 28, lr: 1.22e-04 +2022-05-09 12:48:36,022 INFO [train.py:715] (7/8) Epoch 18, batch 33500, loss[loss=0.1415, simple_loss=0.212, pruned_loss=0.03551, over 4701.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2063, pruned_loss=0.02844, over 971347.70 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:49:15,397 INFO [train.py:715] (7/8) Epoch 18, batch 33550, loss[loss=0.136, simple_loss=0.2131, pruned_loss=0.02949, over 4853.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2067, pruned_loss=0.02836, over 970994.27 frames.], batch size: 20, lr: 1.22e-04 +2022-05-09 12:49:54,438 INFO [train.py:715] (7/8) Epoch 18, batch 33600, loss[loss=0.1358, simple_loss=0.2083, pruned_loss=0.03165, over 4788.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2069, pruned_loss=0.02821, over 971455.22 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 12:50:32,503 INFO [train.py:715] (7/8) Epoch 18, batch 33650, loss[loss=0.1584, simple_loss=0.2257, pruned_loss=0.04557, over 4966.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2076, pruned_loss=0.02854, over 971413.45 frames.], batch size: 35, lr: 1.22e-04 +2022-05-09 12:51:11,946 INFO [train.py:715] (7/8) Epoch 18, batch 33700, loss[loss=0.1401, simple_loss=0.2179, pruned_loss=0.03112, over 4967.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2064, pruned_loss=0.02816, over 971600.39 frames.], batch size: 39, lr: 1.22e-04 +2022-05-09 12:51:51,115 INFO [train.py:715] (7/8) Epoch 18, batch 33750, loss[loss=0.1292, simple_loss=0.2054, pruned_loss=0.02654, over 4892.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2056, pruned_loss=0.02797, over 972256.32 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 12:52:30,430 INFO [train.py:715] (7/8) Epoch 18, batch 33800, loss[loss=0.1245, simple_loss=0.192, pruned_loss=0.02853, over 4959.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2054, pruned_loss=0.02796, over 971110.89 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:53:09,707 INFO [train.py:715] (7/8) Epoch 18, batch 33850, loss[loss=0.1573, simple_loss=0.218, pruned_loss=0.04831, over 4799.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2063, pruned_loss=0.02811, over 971447.78 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 12:53:49,536 INFO [train.py:715] (7/8) Epoch 18, batch 33900, loss[loss=0.1379, simple_loss=0.216, pruned_loss=0.02994, over 4787.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2067, pruned_loss=0.02834, over 972061.47 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 12:54:28,738 INFO [train.py:715] (7/8) Epoch 18, batch 33950, loss[loss=0.1344, simple_loss=0.2075, pruned_loss=0.03065, over 4947.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2061, pruned_loss=0.02784, over 970968.26 frames.], batch size: 21, lr: 1.22e-04 +2022-05-09 12:55:07,057 INFO [train.py:715] (7/8) Epoch 18, batch 34000, loss[loss=0.1018, simple_loss=0.1855, pruned_loss=0.00903, over 4816.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2061, pruned_loss=0.02812, over 971260.39 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:55:46,477 INFO [train.py:715] (7/8) Epoch 18, batch 34050, loss[loss=0.1093, simple_loss=0.1919, pruned_loss=0.0134, over 4797.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.0285, over 971844.43 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 12:56:25,892 INFO [train.py:715] (7/8) Epoch 18, batch 34100, loss[loss=0.1267, simple_loss=0.2069, pruned_loss=0.02321, over 4813.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2057, pruned_loss=0.02795, over 971600.72 frames.], batch size: 13, lr: 1.22e-04 +2022-05-09 12:57:05,033 INFO [train.py:715] (7/8) Epoch 18, batch 34150, loss[loss=0.1412, simple_loss=0.2188, pruned_loss=0.03183, over 4691.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2063, pruned_loss=0.0279, over 972931.98 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:57:44,081 INFO [train.py:715] (7/8) Epoch 18, batch 34200, loss[loss=0.1268, simple_loss=0.206, pruned_loss=0.02374, over 4697.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2065, pruned_loss=0.0277, over 973533.22 frames.], batch size: 15, lr: 1.22e-04 +2022-05-09 12:58:23,226 INFO [train.py:715] (7/8) Epoch 18, batch 34250, loss[loss=0.1028, simple_loss=0.1772, pruned_loss=0.01418, over 4795.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2062, pruned_loss=0.02805, over 974621.07 frames.], batch size: 17, lr: 1.22e-04 +2022-05-09 12:59:02,033 INFO [train.py:715] (7/8) Epoch 18, batch 34300, loss[loss=0.1435, simple_loss=0.2184, pruned_loss=0.03434, over 4775.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2064, pruned_loss=0.02786, over 974038.90 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 12:59:40,334 INFO [train.py:715] (7/8) Epoch 18, batch 34350, loss[loss=0.1363, simple_loss=0.2042, pruned_loss=0.03423, over 4808.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2073, pruned_loss=0.02822, over 974262.06 frames.], batch size: 14, lr: 1.22e-04 +2022-05-09 13:00:19,860 INFO [train.py:715] (7/8) Epoch 18, batch 34400, loss[loss=0.1171, simple_loss=0.1936, pruned_loss=0.02029, over 4984.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2063, pruned_loss=0.0282, over 973782.27 frames.], batch size: 25, lr: 1.22e-04 +2022-05-09 13:00:59,444 INFO [train.py:715] (7/8) Epoch 18, batch 34450, loss[loss=0.1457, simple_loss=0.2325, pruned_loss=0.02943, over 4885.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2064, pruned_loss=0.02814, over 973832.83 frames.], batch size: 16, lr: 1.22e-04 +2022-05-09 13:01:39,372 INFO [train.py:715] (7/8) Epoch 18, batch 34500, loss[loss=0.1477, simple_loss=0.2317, pruned_loss=0.03182, over 4904.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2074, pruned_loss=0.02878, over 973507.62 frames.], batch size: 19, lr: 1.22e-04 +2022-05-09 13:02:18,896 INFO [train.py:715] (7/8) Epoch 18, batch 34550, loss[loss=0.1541, simple_loss=0.2191, pruned_loss=0.04449, over 4968.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02903, over 972814.57 frames.], batch size: 24, lr: 1.22e-04 +2022-05-09 13:02:58,574 INFO [train.py:715] (7/8) Epoch 18, batch 34600, loss[loss=0.1378, simple_loss=0.2027, pruned_loss=0.03646, over 4843.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2073, pruned_loss=0.02917, over 973761.47 frames.], batch size: 30, lr: 1.22e-04 +2022-05-09 13:03:37,756 INFO [train.py:715] (7/8) Epoch 18, batch 34650, loss[loss=0.1221, simple_loss=0.2034, pruned_loss=0.02043, over 4768.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2075, pruned_loss=0.02892, over 973321.55 frames.], batch size: 18, lr: 1.22e-04 +2022-05-09 13:04:17,382 INFO [train.py:715] (7/8) Epoch 18, batch 34700, loss[loss=0.1461, simple_loss=0.2243, pruned_loss=0.03393, over 4830.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2076, pruned_loss=0.02898, over 972983.32 frames.], batch size: 27, lr: 1.21e-04 +2022-05-09 13:04:56,519 INFO [train.py:715] (7/8) Epoch 18, batch 34750, loss[loss=0.1411, simple_loss=0.2111, pruned_loss=0.03554, over 4861.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2079, pruned_loss=0.0297, over 973159.81 frames.], batch size: 32, lr: 1.21e-04 +2022-05-09 13:05:34,145 INFO [train.py:715] (7/8) Epoch 18, batch 34800, loss[loss=0.1171, simple_loss=0.1856, pruned_loss=0.02424, over 4762.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.02932, over 971492.68 frames.], batch size: 12, lr: 1.21e-04 +2022-05-09 13:06:24,914 INFO [train.py:715] (7/8) Epoch 19, batch 0, loss[loss=0.1152, simple_loss=0.1996, pruned_loss=0.01539, over 4818.00 frames.], tot_loss[loss=0.1152, simple_loss=0.1996, pruned_loss=0.01539, over 4818.00 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 13:07:03,503 INFO [train.py:715] (7/8) Epoch 19, batch 50, loss[loss=0.1301, simple_loss=0.2077, pruned_loss=0.02622, over 4936.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2063, pruned_loss=0.02864, over 219945.21 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 13:07:43,526 INFO [train.py:715] (7/8) Epoch 19, batch 100, loss[loss=0.1097, simple_loss=0.1869, pruned_loss=0.0162, over 4958.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02907, over 387166.67 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:08:23,946 INFO [train.py:715] (7/8) Epoch 19, batch 150, loss[loss=0.1424, simple_loss=0.2117, pruned_loss=0.03656, over 4828.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02932, over 517809.77 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 13:09:04,151 INFO [train.py:715] (7/8) Epoch 19, batch 200, loss[loss=0.1083, simple_loss=0.1785, pruned_loss=0.01906, over 4902.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02875, over 618415.27 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:09:44,076 INFO [train.py:715] (7/8) Epoch 19, batch 250, loss[loss=0.1559, simple_loss=0.2268, pruned_loss=0.04251, over 4865.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02935, over 697401.97 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:10:24,216 INFO [train.py:715] (7/8) Epoch 19, batch 300, loss[loss=0.1269, simple_loss=0.1991, pruned_loss=0.02732, over 4870.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2074, pruned_loss=0.02991, over 758832.24 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:11:04,671 INFO [train.py:715] (7/8) Epoch 19, batch 350, loss[loss=0.1161, simple_loss=0.1954, pruned_loss=0.0184, over 4963.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2077, pruned_loss=0.02952, over 805722.15 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:11:43,717 INFO [train.py:715] (7/8) Epoch 19, batch 400, loss[loss=0.1293, simple_loss=0.2009, pruned_loss=0.02885, over 4807.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02991, over 842534.22 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:12:24,049 INFO [train.py:715] (7/8) Epoch 19, batch 450, loss[loss=0.1316, simple_loss=0.2174, pruned_loss=0.02295, over 4972.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2081, pruned_loss=0.02979, over 871559.84 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:13:04,621 INFO [train.py:715] (7/8) Epoch 19, batch 500, loss[loss=0.1365, simple_loss=0.2192, pruned_loss=0.0269, over 4910.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02918, over 894648.22 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:13:44,280 INFO [train.py:715] (7/8) Epoch 19, batch 550, loss[loss=0.1299, simple_loss=0.1947, pruned_loss=0.03258, over 4740.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02917, over 912035.28 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:14:24,236 INFO [train.py:715] (7/8) Epoch 19, batch 600, loss[loss=0.1629, simple_loss=0.2223, pruned_loss=0.05178, over 4800.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02926, over 924990.82 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 13:15:04,547 INFO [train.py:715] (7/8) Epoch 19, batch 650, loss[loss=0.1428, simple_loss=0.2079, pruned_loss=0.03888, over 4811.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2067, pruned_loss=0.0293, over 935302.55 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:15:44,877 INFO [train.py:715] (7/8) Epoch 19, batch 700, loss[loss=0.129, simple_loss=0.2103, pruned_loss=0.02391, over 4934.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02915, over 943797.33 frames.], batch size: 39, lr: 1.18e-04 +2022-05-09 13:16:24,134 INFO [train.py:715] (7/8) Epoch 19, batch 750, loss[loss=0.1101, simple_loss=0.1782, pruned_loss=0.02099, over 4766.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02916, over 950382.99 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:17:03,939 INFO [train.py:715] (7/8) Epoch 19, batch 800, loss[loss=0.108, simple_loss=0.1899, pruned_loss=0.01307, over 4934.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.02839, over 955727.36 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 13:17:44,201 INFO [train.py:715] (7/8) Epoch 19, batch 850, loss[loss=0.1251, simple_loss=0.1984, pruned_loss=0.02591, over 4901.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02823, over 959643.66 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:18:24,380 INFO [train.py:715] (7/8) Epoch 19, batch 900, loss[loss=0.1245, simple_loss=0.2077, pruned_loss=0.02067, over 4852.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02859, over 962200.39 frames.], batch size: 30, lr: 1.18e-04 +2022-05-09 13:19:03,894 INFO [train.py:715] (7/8) Epoch 19, batch 950, loss[loss=0.1249, simple_loss=0.1965, pruned_loss=0.02668, over 4913.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02853, over 963801.25 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 13:19:43,255 INFO [train.py:715] (7/8) Epoch 19, batch 1000, loss[loss=0.1421, simple_loss=0.2245, pruned_loss=0.02982, over 4984.00 frames.], tot_loss[loss=0.131, simple_loss=0.205, pruned_loss=0.02848, over 965354.40 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 13:20:23,197 INFO [train.py:715] (7/8) Epoch 19, batch 1050, loss[loss=0.1724, simple_loss=0.2302, pruned_loss=0.05734, over 4836.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2059, pruned_loss=0.02909, over 966975.82 frames.], batch size: 30, lr: 1.18e-04 +2022-05-09 13:21:02,195 INFO [train.py:715] (7/8) Epoch 19, batch 1100, loss[loss=0.1369, simple_loss=0.2153, pruned_loss=0.02921, over 4806.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2059, pruned_loss=0.02924, over 968236.69 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 13:21:42,017 INFO [train.py:715] (7/8) Epoch 19, batch 1150, loss[loss=0.1086, simple_loss=0.1881, pruned_loss=0.01452, over 4809.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2056, pruned_loss=0.0288, over 969038.32 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 13:22:21,964 INFO [train.py:715] (7/8) Epoch 19, batch 1200, loss[loss=0.1302, simple_loss=0.2029, pruned_loss=0.0287, over 4847.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2058, pruned_loss=0.02893, over 970053.29 frames.], batch size: 30, lr: 1.18e-04 +2022-05-09 13:23:01,716 INFO [train.py:715] (7/8) Epoch 19, batch 1250, loss[loss=0.1203, simple_loss=0.1971, pruned_loss=0.02175, over 4752.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2049, pruned_loss=0.02868, over 969376.32 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:23:41,061 INFO [train.py:715] (7/8) Epoch 19, batch 1300, loss[loss=0.1201, simple_loss=0.1885, pruned_loss=0.02588, over 4788.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2053, pruned_loss=0.02888, over 970123.50 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 13:24:20,596 INFO [train.py:715] (7/8) Epoch 19, batch 1350, loss[loss=0.1676, simple_loss=0.2419, pruned_loss=0.04667, over 4974.00 frames.], tot_loss[loss=0.1311, simple_loss=0.205, pruned_loss=0.02863, over 971467.23 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:25:00,619 INFO [train.py:715] (7/8) Epoch 19, batch 1400, loss[loss=0.1408, simple_loss=0.2085, pruned_loss=0.03651, over 4819.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2046, pruned_loss=0.02854, over 970695.42 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:25:39,919 INFO [train.py:715] (7/8) Epoch 19, batch 1450, loss[loss=0.1591, simple_loss=0.2372, pruned_loss=0.04046, over 4912.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2055, pruned_loss=0.02858, over 972068.60 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:26:20,232 INFO [train.py:715] (7/8) Epoch 19, batch 1500, loss[loss=0.1277, simple_loss=0.1966, pruned_loss=0.02933, over 4855.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02845, over 972878.57 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 13:27:00,283 INFO [train.py:715] (7/8) Epoch 19, batch 1550, loss[loss=0.1242, simple_loss=0.2007, pruned_loss=0.02387, over 4970.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.02854, over 973049.67 frames.], batch size: 39, lr: 1.18e-04 +2022-05-09 13:27:40,367 INFO [train.py:715] (7/8) Epoch 19, batch 1600, loss[loss=0.1386, simple_loss=0.2043, pruned_loss=0.03643, over 4905.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02884, over 972542.46 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:28:19,706 INFO [train.py:715] (7/8) Epoch 19, batch 1650, loss[loss=0.132, simple_loss=0.2083, pruned_loss=0.02787, over 4962.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02836, over 972609.93 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:28:59,072 INFO [train.py:715] (7/8) Epoch 19, batch 1700, loss[loss=0.1423, simple_loss=0.2064, pruned_loss=0.03908, over 4892.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02831, over 972870.85 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:29:39,058 INFO [train.py:715] (7/8) Epoch 19, batch 1750, loss[loss=0.1412, simple_loss=0.2088, pruned_loss=0.03676, over 4980.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02875, over 972720.42 frames.], batch size: 39, lr: 1.18e-04 +2022-05-09 13:30:18,171 INFO [train.py:715] (7/8) Epoch 19, batch 1800, loss[loss=0.1474, simple_loss=0.2116, pruned_loss=0.04159, over 4847.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02888, over 972650.70 frames.], batch size: 30, lr: 1.18e-04 +2022-05-09 13:30:57,613 INFO [train.py:715] (7/8) Epoch 19, batch 1850, loss[loss=0.1329, simple_loss=0.2129, pruned_loss=0.02641, over 4931.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02917, over 972078.29 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 13:31:36,858 INFO [train.py:715] (7/8) Epoch 19, batch 1900, loss[loss=0.1277, simple_loss=0.1975, pruned_loss=0.02897, over 4884.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2069, pruned_loss=0.02914, over 972522.95 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:32:16,777 INFO [train.py:715] (7/8) Epoch 19, batch 1950, loss[loss=0.1394, simple_loss=0.2146, pruned_loss=0.03212, over 4904.00 frames.], tot_loss[loss=0.133, simple_loss=0.2073, pruned_loss=0.02935, over 972180.68 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:32:55,075 INFO [train.py:715] (7/8) Epoch 19, batch 2000, loss[loss=0.1431, simple_loss=0.2097, pruned_loss=0.03827, over 4688.00 frames.], tot_loss[loss=0.133, simple_loss=0.2072, pruned_loss=0.0294, over 971945.51 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:33:34,216 INFO [train.py:715] (7/8) Epoch 19, batch 2050, loss[loss=0.1211, simple_loss=0.1999, pruned_loss=0.02114, over 4805.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2075, pruned_loss=0.02968, over 972326.29 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 13:34:13,315 INFO [train.py:715] (7/8) Epoch 19, batch 2100, loss[loss=0.1669, simple_loss=0.2152, pruned_loss=0.05929, over 4860.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02926, over 973007.40 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 13:34:52,132 INFO [train.py:715] (7/8) Epoch 19, batch 2150, loss[loss=0.1341, simple_loss=0.2075, pruned_loss=0.03038, over 4885.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2071, pruned_loss=0.02954, over 973332.23 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:35:31,127 INFO [train.py:715] (7/8) Epoch 19, batch 2200, loss[loss=0.1354, simple_loss=0.1969, pruned_loss=0.03698, over 4920.00 frames.], tot_loss[loss=0.1331, simple_loss=0.2072, pruned_loss=0.02952, over 973933.33 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:36:09,825 INFO [train.py:715] (7/8) Epoch 19, batch 2250, loss[loss=0.1622, simple_loss=0.2285, pruned_loss=0.04798, over 4865.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2062, pruned_loss=0.02941, over 973090.92 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 13:36:49,417 INFO [train.py:715] (7/8) Epoch 19, batch 2300, loss[loss=0.1397, simple_loss=0.2194, pruned_loss=0.03001, over 4816.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2064, pruned_loss=0.0296, over 972982.03 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:37:28,009 INFO [train.py:715] (7/8) Epoch 19, batch 2350, loss[loss=0.1136, simple_loss=0.1932, pruned_loss=0.01697, over 4951.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02943, over 973219.12 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 13:38:07,167 INFO [train.py:715] (7/8) Epoch 19, batch 2400, loss[loss=0.1164, simple_loss=0.1939, pruned_loss=0.01942, over 4906.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.0292, over 973296.77 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 13:38:46,613 INFO [train.py:715] (7/8) Epoch 19, batch 2450, loss[loss=0.1518, simple_loss=0.228, pruned_loss=0.03777, over 4825.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02917, over 972273.19 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:39:25,450 INFO [train.py:715] (7/8) Epoch 19, batch 2500, loss[loss=0.1403, simple_loss=0.2073, pruned_loss=0.0366, over 4845.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02899, over 973090.12 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 13:40:04,478 INFO [train.py:715] (7/8) Epoch 19, batch 2550, loss[loss=0.09168, simple_loss=0.1637, pruned_loss=0.00985, over 4730.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2079, pruned_loss=0.0294, over 972239.37 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 13:40:44,007 INFO [train.py:715] (7/8) Epoch 19, batch 2600, loss[loss=0.09948, simple_loss=0.1729, pruned_loss=0.01304, over 4968.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2074, pruned_loss=0.02944, over 972312.80 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 13:41:26,472 INFO [train.py:715] (7/8) Epoch 19, batch 2650, loss[loss=0.1683, simple_loss=0.2465, pruned_loss=0.04503, over 4803.00 frames.], tot_loss[loss=0.1339, simple_loss=0.208, pruned_loss=0.02986, over 971799.64 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 13:42:05,379 INFO [train.py:715] (7/8) Epoch 19, batch 2700, loss[loss=0.1184, simple_loss=0.1974, pruned_loss=0.01966, over 4951.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02896, over 971683.62 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 13:42:44,051 INFO [train.py:715] (7/8) Epoch 19, batch 2750, loss[loss=0.1287, simple_loss=0.1985, pruned_loss=0.02952, over 4807.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02833, over 971483.48 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 13:43:23,791 INFO [train.py:715] (7/8) Epoch 19, batch 2800, loss[loss=0.1331, simple_loss=0.2098, pruned_loss=0.02818, over 4888.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2065, pruned_loss=0.02834, over 972142.02 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:44:03,074 INFO [train.py:715] (7/8) Epoch 19, batch 2850, loss[loss=0.1186, simple_loss=0.1992, pruned_loss=0.01905, over 4974.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.02815, over 972274.81 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:44:42,002 INFO [train.py:715] (7/8) Epoch 19, batch 2900, loss[loss=0.1378, simple_loss=0.2022, pruned_loss=0.03669, over 4983.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02827, over 972813.54 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 13:45:20,761 INFO [train.py:715] (7/8) Epoch 19, batch 2950, loss[loss=0.1093, simple_loss=0.1865, pruned_loss=0.01609, over 4961.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2054, pruned_loss=0.02805, over 972919.08 frames.], batch size: 35, lr: 1.18e-04 +2022-05-09 13:46:00,074 INFO [train.py:715] (7/8) Epoch 19, batch 3000, loss[loss=0.1332, simple_loss=0.215, pruned_loss=0.02566, over 4746.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2056, pruned_loss=0.02768, over 972139.74 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:46:00,074 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 13:46:10,051 INFO [train.py:742] (7/8) Epoch 19, validation: loss=0.1045, simple_loss=0.1877, pruned_loss=0.01062, over 914524.00 frames. +2022-05-09 13:46:50,338 INFO [train.py:715] (7/8) Epoch 19, batch 3050, loss[loss=0.1274, simple_loss=0.2021, pruned_loss=0.0264, over 4967.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2049, pruned_loss=0.02768, over 972070.01 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:47:29,688 INFO [train.py:715] (7/8) Epoch 19, batch 3100, loss[loss=0.1281, simple_loss=0.2001, pruned_loss=0.02804, over 4765.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2053, pruned_loss=0.02838, over 973293.27 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:48:08,832 INFO [train.py:715] (7/8) Epoch 19, batch 3150, loss[loss=0.1407, simple_loss=0.2124, pruned_loss=0.03449, over 4779.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02852, over 972917.66 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 13:48:48,670 INFO [train.py:715] (7/8) Epoch 19, batch 3200, loss[loss=0.1175, simple_loss=0.1992, pruned_loss=0.01787, over 4973.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02876, over 973462.53 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:49:27,691 INFO [train.py:715] (7/8) Epoch 19, batch 3250, loss[loss=0.1179, simple_loss=0.1896, pruned_loss=0.0231, over 4859.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02877, over 972752.34 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:50:07,133 INFO [train.py:715] (7/8) Epoch 19, batch 3300, loss[loss=0.1064, simple_loss=0.1856, pruned_loss=0.01358, over 4920.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02872, over 971532.72 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 13:50:46,360 INFO [train.py:715] (7/8) Epoch 19, batch 3350, loss[loss=0.1086, simple_loss=0.1835, pruned_loss=0.01689, over 4930.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02807, over 972499.64 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 13:51:26,505 INFO [train.py:715] (7/8) Epoch 19, batch 3400, loss[loss=0.1264, simple_loss=0.2064, pruned_loss=0.02321, over 4836.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02825, over 972430.39 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:52:05,358 INFO [train.py:715] (7/8) Epoch 19, batch 3450, loss[loss=0.1196, simple_loss=0.1946, pruned_loss=0.02223, over 4914.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02836, over 972720.75 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 13:52:44,613 INFO [train.py:715] (7/8) Epoch 19, batch 3500, loss[loss=0.1255, simple_loss=0.2042, pruned_loss=0.02343, over 4753.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02858, over 973443.09 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:53:23,733 INFO [train.py:715] (7/8) Epoch 19, batch 3550, loss[loss=0.1372, simple_loss=0.2034, pruned_loss=0.0355, over 4876.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2057, pruned_loss=0.0288, over 973042.55 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 13:54:02,621 INFO [train.py:715] (7/8) Epoch 19, batch 3600, loss[loss=0.1253, simple_loss=0.2007, pruned_loss=0.0249, over 4897.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2056, pruned_loss=0.02884, over 974197.92 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 13:54:42,251 INFO [train.py:715] (7/8) Epoch 19, batch 3650, loss[loss=0.1407, simple_loss=0.22, pruned_loss=0.0307, over 4915.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2055, pruned_loss=0.02877, over 973881.02 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 13:55:21,398 INFO [train.py:715] (7/8) Epoch 19, batch 3700, loss[loss=0.1315, simple_loss=0.2051, pruned_loss=0.02895, over 4872.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2059, pruned_loss=0.02896, over 972784.59 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:56:01,852 INFO [train.py:715] (7/8) Epoch 19, batch 3750, loss[loss=0.1247, simple_loss=0.2044, pruned_loss=0.02247, over 4969.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2054, pruned_loss=0.02846, over 972620.13 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 13:56:40,838 INFO [train.py:715] (7/8) Epoch 19, batch 3800, loss[loss=0.1438, simple_loss=0.2189, pruned_loss=0.03434, over 4838.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02824, over 972003.39 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 13:57:19,817 INFO [train.py:715] (7/8) Epoch 19, batch 3850, loss[loss=0.1319, simple_loss=0.2036, pruned_loss=0.03011, over 4746.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02857, over 972116.25 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 13:57:59,512 INFO [train.py:715] (7/8) Epoch 19, batch 3900, loss[loss=0.1208, simple_loss=0.1908, pruned_loss=0.0254, over 4858.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2055, pruned_loss=0.0284, over 972380.06 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 13:58:38,560 INFO [train.py:715] (7/8) Epoch 19, batch 3950, loss[loss=0.1613, simple_loss=0.2463, pruned_loss=0.03816, over 4832.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02872, over 972230.47 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 13:59:17,194 INFO [train.py:715] (7/8) Epoch 19, batch 4000, loss[loss=0.1594, simple_loss=0.2266, pruned_loss=0.04611, over 4991.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02889, over 971890.77 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 13:59:56,645 INFO [train.py:715] (7/8) Epoch 19, batch 4050, loss[loss=0.147, simple_loss=0.2269, pruned_loss=0.03353, over 4769.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02911, over 972292.59 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:00:36,777 INFO [train.py:715] (7/8) Epoch 19, batch 4100, loss[loss=0.1288, simple_loss=0.2025, pruned_loss=0.02757, over 4868.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02898, over 972712.43 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 14:01:15,968 INFO [train.py:715] (7/8) Epoch 19, batch 4150, loss[loss=0.1211, simple_loss=0.1998, pruned_loss=0.02116, over 4848.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02864, over 972376.50 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 14:01:54,741 INFO [train.py:715] (7/8) Epoch 19, batch 4200, loss[loss=0.1057, simple_loss=0.1783, pruned_loss=0.01659, over 4984.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2053, pruned_loss=0.02813, over 972000.34 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 14:02:34,000 INFO [train.py:715] (7/8) Epoch 19, batch 4250, loss[loss=0.1124, simple_loss=0.1799, pruned_loss=0.02244, over 4786.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02849, over 971822.76 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 14:03:13,061 INFO [train.py:715] (7/8) Epoch 19, batch 4300, loss[loss=0.1405, simple_loss=0.2286, pruned_loss=0.02624, over 4765.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2056, pruned_loss=0.0286, over 971815.53 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:03:52,546 INFO [train.py:715] (7/8) Epoch 19, batch 4350, loss[loss=0.1331, simple_loss=0.2062, pruned_loss=0.03005, over 4962.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.0288, over 972896.25 frames.], batch size: 31, lr: 1.18e-04 +2022-05-09 14:04:31,617 INFO [train.py:715] (7/8) Epoch 19, batch 4400, loss[loss=0.1116, simple_loss=0.1934, pruned_loss=0.01492, over 4967.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02886, over 972340.42 frames.], batch size: 28, lr: 1.18e-04 +2022-05-09 14:05:11,664 INFO [train.py:715] (7/8) Epoch 19, batch 4450, loss[loss=0.1095, simple_loss=0.1881, pruned_loss=0.01543, over 4893.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02907, over 971640.01 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 14:05:50,509 INFO [train.py:715] (7/8) Epoch 19, batch 4500, loss[loss=0.1325, simple_loss=0.2173, pruned_loss=0.02384, over 4777.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2068, pruned_loss=0.02936, over 972046.82 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 14:06:29,206 INFO [train.py:715] (7/8) Epoch 19, batch 4550, loss[loss=0.137, simple_loss=0.2204, pruned_loss=0.0268, over 4877.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02938, over 971993.09 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 14:07:08,890 INFO [train.py:715] (7/8) Epoch 19, batch 4600, loss[loss=0.1396, simple_loss=0.2093, pruned_loss=0.035, over 4834.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2078, pruned_loss=0.02981, over 972373.39 frames.], batch size: 13, lr: 1.18e-04 +2022-05-09 14:07:48,137 INFO [train.py:715] (7/8) Epoch 19, batch 4650, loss[loss=0.1164, simple_loss=0.197, pruned_loss=0.0179, over 4935.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02951, over 971857.45 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 14:08:27,121 INFO [train.py:715] (7/8) Epoch 19, batch 4700, loss[loss=0.1302, simple_loss=0.2142, pruned_loss=0.0231, over 4887.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2073, pruned_loss=0.02902, over 972077.24 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 14:09:06,337 INFO [train.py:715] (7/8) Epoch 19, batch 4750, loss[loss=0.1263, simple_loss=0.2123, pruned_loss=0.0201, over 4976.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2076, pruned_loss=0.02898, over 972675.33 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:09:46,295 INFO [train.py:715] (7/8) Epoch 19, batch 4800, loss[loss=0.1421, simple_loss=0.2158, pruned_loss=0.03421, over 4783.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2075, pruned_loss=0.0289, over 972061.58 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:10:25,677 INFO [train.py:715] (7/8) Epoch 19, batch 4850, loss[loss=0.1116, simple_loss=0.1866, pruned_loss=0.01833, over 4823.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2063, pruned_loss=0.02845, over 972003.62 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 14:11:04,338 INFO [train.py:715] (7/8) Epoch 19, batch 4900, loss[loss=0.1272, simple_loss=0.1931, pruned_loss=0.03068, over 4903.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2059, pruned_loss=0.02825, over 972177.19 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 14:11:44,094 INFO [train.py:715] (7/8) Epoch 19, batch 4950, loss[loss=0.1515, simple_loss=0.2303, pruned_loss=0.03631, over 4699.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02855, over 973041.61 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:12:23,741 INFO [train.py:715] (7/8) Epoch 19, batch 5000, loss[loss=0.1298, simple_loss=0.2212, pruned_loss=0.01924, over 4714.00 frames.], tot_loss[loss=0.1321, simple_loss=0.207, pruned_loss=0.02859, over 972323.81 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:13:02,753 INFO [train.py:715] (7/8) Epoch 19, batch 5050, loss[loss=0.1159, simple_loss=0.1956, pruned_loss=0.01816, over 4956.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02866, over 972354.07 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 14:13:41,115 INFO [train.py:715] (7/8) Epoch 19, batch 5100, loss[loss=0.1192, simple_loss=0.1914, pruned_loss=0.02351, over 4689.00 frames.], tot_loss[loss=0.1321, simple_loss=0.207, pruned_loss=0.02863, over 972096.80 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:14:21,161 INFO [train.py:715] (7/8) Epoch 19, batch 5150, loss[loss=0.1356, simple_loss=0.2033, pruned_loss=0.03391, over 4804.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2071, pruned_loss=0.02872, over 971705.89 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 14:15:00,187 INFO [train.py:715] (7/8) Epoch 19, batch 5200, loss[loss=0.1096, simple_loss=0.1814, pruned_loss=0.01889, over 4976.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02881, over 971898.84 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:15:38,856 INFO [train.py:715] (7/8) Epoch 19, batch 5250, loss[loss=0.1241, simple_loss=0.2009, pruned_loss=0.02366, over 4887.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02872, over 972015.19 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 14:16:18,536 INFO [train.py:715] (7/8) Epoch 19, batch 5300, loss[loss=0.1176, simple_loss=0.2024, pruned_loss=0.01643, over 4803.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02893, over 972386.66 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 14:16:58,485 INFO [train.py:715] (7/8) Epoch 19, batch 5350, loss[loss=0.138, simple_loss=0.1977, pruned_loss=0.03921, over 4860.00 frames.], tot_loss[loss=0.1317, simple_loss=0.206, pruned_loss=0.02872, over 973023.72 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:17:38,566 INFO [train.py:715] (7/8) Epoch 19, batch 5400, loss[loss=0.1235, simple_loss=0.2025, pruned_loss=0.02223, over 4843.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02921, over 973201.85 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 14:18:17,826 INFO [train.py:715] (7/8) Epoch 19, batch 5450, loss[loss=0.134, simple_loss=0.2084, pruned_loss=0.02978, over 4828.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2065, pruned_loss=0.02917, over 973100.76 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 14:18:58,017 INFO [train.py:715] (7/8) Epoch 19, batch 5500, loss[loss=0.1413, simple_loss=0.2275, pruned_loss=0.02751, over 4805.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02902, over 973080.52 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 14:19:37,203 INFO [train.py:715] (7/8) Epoch 19, batch 5550, loss[loss=0.1716, simple_loss=0.2394, pruned_loss=0.05188, over 4767.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2062, pruned_loss=0.02914, over 972629.62 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:20:16,792 INFO [train.py:715] (7/8) Epoch 19, batch 5600, loss[loss=0.1114, simple_loss=0.1821, pruned_loss=0.02033, over 4839.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2064, pruned_loss=0.02913, over 973366.63 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 14:20:56,102 INFO [train.py:715] (7/8) Epoch 19, batch 5650, loss[loss=0.1247, simple_loss=0.2105, pruned_loss=0.01947, over 4702.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02862, over 971854.34 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:21:35,829 INFO [train.py:715] (7/8) Epoch 19, batch 5700, loss[loss=0.1308, simple_loss=0.2102, pruned_loss=0.02573, over 4814.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02834, over 971898.13 frames.], batch size: 27, lr: 1.18e-04 +2022-05-09 14:22:15,336 INFO [train.py:715] (7/8) Epoch 19, batch 5750, loss[loss=0.1298, simple_loss=0.1969, pruned_loss=0.03135, over 4871.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2056, pruned_loss=0.02839, over 971962.96 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 14:22:53,920 INFO [train.py:715] (7/8) Epoch 19, batch 5800, loss[loss=0.1318, simple_loss=0.2101, pruned_loss=0.02678, over 4879.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2047, pruned_loss=0.02795, over 972169.45 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:23:33,193 INFO [train.py:715] (7/8) Epoch 19, batch 5850, loss[loss=0.146, simple_loss=0.221, pruned_loss=0.03545, over 4745.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2052, pruned_loss=0.02801, over 971899.97 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:24:11,660 INFO [train.py:715] (7/8) Epoch 19, batch 5900, loss[loss=0.1325, simple_loss=0.1955, pruned_loss=0.03476, over 4775.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2048, pruned_loss=0.02793, over 971407.27 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:24:51,078 INFO [train.py:715] (7/8) Epoch 19, batch 5950, loss[loss=0.1259, simple_loss=0.1958, pruned_loss=0.02797, over 4774.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2047, pruned_loss=0.02772, over 971588.01 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 14:25:30,281 INFO [train.py:715] (7/8) Epoch 19, batch 6000, loss[loss=0.1439, simple_loss=0.2238, pruned_loss=0.03205, over 4919.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2049, pruned_loss=0.02774, over 972072.69 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 14:25:30,282 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 14:25:40,197 INFO [train.py:742] (7/8) Epoch 19, validation: loss=0.1046, simple_loss=0.1878, pruned_loss=0.01067, over 914524.00 frames. +2022-05-09 14:26:19,491 INFO [train.py:715] (7/8) Epoch 19, batch 6050, loss[loss=0.1221, simple_loss=0.1983, pruned_loss=0.02297, over 4748.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.0279, over 972346.83 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:26:58,349 INFO [train.py:715] (7/8) Epoch 19, batch 6100, loss[loss=0.1671, simple_loss=0.2429, pruned_loss=0.04559, over 4973.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2062, pruned_loss=0.02828, over 972529.56 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:27:37,412 INFO [train.py:715] (7/8) Epoch 19, batch 6150, loss[loss=0.1264, simple_loss=0.19, pruned_loss=0.03135, over 4748.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02852, over 973307.27 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 14:28:15,614 INFO [train.py:715] (7/8) Epoch 19, batch 6200, loss[loss=0.1209, simple_loss=0.1916, pruned_loss=0.02514, over 4942.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.02853, over 974016.58 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:28:55,855 INFO [train.py:715] (7/8) Epoch 19, batch 6250, loss[loss=0.12, simple_loss=0.1964, pruned_loss=0.02178, over 4941.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2052, pruned_loss=0.02806, over 973765.93 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 14:29:35,038 INFO [train.py:715] (7/8) Epoch 19, batch 6300, loss[loss=0.1512, simple_loss=0.2268, pruned_loss=0.03778, over 4742.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02837, over 972841.20 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:30:14,722 INFO [train.py:715] (7/8) Epoch 19, batch 6350, loss[loss=0.1572, simple_loss=0.2239, pruned_loss=0.04526, over 4842.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2056, pruned_loss=0.02856, over 972659.59 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:30:54,200 INFO [train.py:715] (7/8) Epoch 19, batch 6400, loss[loss=0.1349, simple_loss=0.212, pruned_loss=0.02894, over 4918.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02833, over 972581.97 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:31:33,477 INFO [train.py:715] (7/8) Epoch 19, batch 6450, loss[loss=0.1258, simple_loss=0.2079, pruned_loss=0.02178, over 4919.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02843, over 972138.22 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 14:32:12,984 INFO [train.py:715] (7/8) Epoch 19, batch 6500, loss[loss=0.136, simple_loss=0.2021, pruned_loss=0.03492, over 4850.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2056, pruned_loss=0.02861, over 972377.16 frames.], batch size: 30, lr: 1.18e-04 +2022-05-09 14:32:51,557 INFO [train.py:715] (7/8) Epoch 19, batch 6550, loss[loss=0.1431, simple_loss=0.2242, pruned_loss=0.03105, over 4927.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02915, over 972414.31 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 14:33:31,043 INFO [train.py:715] (7/8) Epoch 19, batch 6600, loss[loss=0.1413, simple_loss=0.2027, pruned_loss=0.03995, over 4645.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02888, over 971752.30 frames.], batch size: 13, lr: 1.18e-04 +2022-05-09 14:34:10,196 INFO [train.py:715] (7/8) Epoch 19, batch 6650, loss[loss=0.1321, simple_loss=0.2039, pruned_loss=0.03018, over 4878.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2059, pruned_loss=0.02893, over 972644.24 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:34:48,936 INFO [train.py:715] (7/8) Epoch 19, batch 6700, loss[loss=0.1282, simple_loss=0.2065, pruned_loss=0.02497, over 4894.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02886, over 972120.26 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:35:28,066 INFO [train.py:715] (7/8) Epoch 19, batch 6750, loss[loss=0.1266, simple_loss=0.1929, pruned_loss=0.03019, over 4972.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02873, over 972663.04 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:36:07,538 INFO [train.py:715] (7/8) Epoch 19, batch 6800, loss[loss=0.1175, simple_loss=0.1934, pruned_loss=0.02083, over 4790.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2066, pruned_loss=0.02888, over 972384.43 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 14:36:46,931 INFO [train.py:715] (7/8) Epoch 19, batch 6850, loss[loss=0.1311, simple_loss=0.2004, pruned_loss=0.03089, over 4954.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2057, pruned_loss=0.0284, over 972580.70 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 14:37:25,090 INFO [train.py:715] (7/8) Epoch 19, batch 6900, loss[loss=0.13, simple_loss=0.2096, pruned_loss=0.02526, over 4783.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2054, pruned_loss=0.02819, over 972116.02 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:38:04,135 INFO [train.py:715] (7/8) Epoch 19, batch 6950, loss[loss=0.1191, simple_loss=0.1922, pruned_loss=0.02301, over 4791.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2062, pruned_loss=0.02825, over 972714.69 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 14:38:43,602 INFO [train.py:715] (7/8) Epoch 19, batch 7000, loss[loss=0.1563, simple_loss=0.2258, pruned_loss=0.04339, over 4918.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2061, pruned_loss=0.02815, over 972947.44 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 14:39:22,853 INFO [train.py:715] (7/8) Epoch 19, batch 7050, loss[loss=0.1161, simple_loss=0.1966, pruned_loss=0.01783, over 4851.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.0284, over 973250.57 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 14:40:02,434 INFO [train.py:715] (7/8) Epoch 19, batch 7100, loss[loss=0.1475, simple_loss=0.2211, pruned_loss=0.0369, over 4880.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2072, pruned_loss=0.02875, over 973630.23 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 14:40:42,073 INFO [train.py:715] (7/8) Epoch 19, batch 7150, loss[loss=0.1341, simple_loss=0.2112, pruned_loss=0.02856, over 4813.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2069, pruned_loss=0.02837, over 972534.99 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 14:41:20,978 INFO [train.py:715] (7/8) Epoch 19, batch 7200, loss[loss=0.1528, simple_loss=0.2434, pruned_loss=0.03113, over 4692.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02839, over 971792.18 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 14:41:59,734 INFO [train.py:715] (7/8) Epoch 19, batch 7250, loss[loss=0.1052, simple_loss=0.1885, pruned_loss=0.01092, over 4987.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02858, over 972241.57 frames.], batch size: 28, lr: 1.18e-04 +2022-05-09 14:42:39,096 INFO [train.py:715] (7/8) Epoch 19, batch 7300, loss[loss=0.1432, simple_loss=0.2165, pruned_loss=0.03491, over 4754.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02837, over 972128.77 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:43:18,261 INFO [train.py:715] (7/8) Epoch 19, batch 7350, loss[loss=0.1644, simple_loss=0.2377, pruned_loss=0.0456, over 4742.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.02846, over 971886.70 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:43:57,161 INFO [train.py:715] (7/8) Epoch 19, batch 7400, loss[loss=0.1299, simple_loss=0.2053, pruned_loss=0.02729, over 4826.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02869, over 972897.15 frames.], batch size: 27, lr: 1.18e-04 +2022-05-09 14:44:37,625 INFO [train.py:715] (7/8) Epoch 19, batch 7450, loss[loss=0.1505, simple_loss=0.2357, pruned_loss=0.03268, over 4939.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02824, over 972972.92 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 14:45:17,485 INFO [train.py:715] (7/8) Epoch 19, batch 7500, loss[loss=0.1506, simple_loss=0.2223, pruned_loss=0.03948, over 4846.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2063, pruned_loss=0.02846, over 972600.75 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 14:45:56,705 INFO [train.py:715] (7/8) Epoch 19, batch 7550, loss[loss=0.1302, simple_loss=0.2095, pruned_loss=0.02547, over 4812.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.02803, over 971900.04 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 14:46:36,053 INFO [train.py:715] (7/8) Epoch 19, batch 7600, loss[loss=0.1315, simple_loss=0.22, pruned_loss=0.02149, over 4758.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02808, over 971523.18 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:47:16,834 INFO [train.py:715] (7/8) Epoch 19, batch 7650, loss[loss=0.172, simple_loss=0.253, pruned_loss=0.04551, over 4813.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2051, pruned_loss=0.0278, over 971362.88 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 14:47:56,154 INFO [train.py:715] (7/8) Epoch 19, batch 7700, loss[loss=0.09975, simple_loss=0.1698, pruned_loss=0.01483, over 4801.00 frames.], tot_loss[loss=0.13, simple_loss=0.2048, pruned_loss=0.02761, over 971959.79 frames.], batch size: 12, lr: 1.18e-04 +2022-05-09 14:48:34,963 INFO [train.py:715] (7/8) Epoch 19, batch 7750, loss[loss=0.1103, simple_loss=0.1863, pruned_loss=0.01715, over 4863.00 frames.], tot_loss[loss=0.13, simple_loss=0.2048, pruned_loss=0.02763, over 972293.49 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 14:49:14,660 INFO [train.py:715] (7/8) Epoch 19, batch 7800, loss[loss=0.1405, simple_loss=0.2147, pruned_loss=0.03312, over 4750.00 frames.], tot_loss[loss=0.13, simple_loss=0.2052, pruned_loss=0.02739, over 973027.13 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:49:54,095 INFO [train.py:715] (7/8) Epoch 19, batch 7850, loss[loss=0.1262, simple_loss=0.1994, pruned_loss=0.02645, over 4906.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2055, pruned_loss=0.02764, over 972320.54 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:50:33,365 INFO [train.py:715] (7/8) Epoch 19, batch 7900, loss[loss=0.1641, simple_loss=0.2403, pruned_loss=0.04399, over 4974.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2069, pruned_loss=0.0284, over 972072.01 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 14:51:11,767 INFO [train.py:715] (7/8) Epoch 19, batch 7950, loss[loss=0.1138, simple_loss=0.1913, pruned_loss=0.01812, over 4757.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2065, pruned_loss=0.02824, over 972416.82 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 14:51:51,092 INFO [train.py:715] (7/8) Epoch 19, batch 8000, loss[loss=0.1427, simple_loss=0.2137, pruned_loss=0.03581, over 4922.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02842, over 972536.12 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:52:30,292 INFO [train.py:715] (7/8) Epoch 19, batch 8050, loss[loss=0.1389, simple_loss=0.2262, pruned_loss=0.02578, over 4921.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02826, over 972376.88 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:53:08,825 INFO [train.py:715] (7/8) Epoch 19, batch 8100, loss[loss=0.1275, simple_loss=0.2059, pruned_loss=0.02455, over 4970.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2066, pruned_loss=0.02846, over 971978.83 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 14:53:48,277 INFO [train.py:715] (7/8) Epoch 19, batch 8150, loss[loss=0.144, simple_loss=0.2133, pruned_loss=0.03736, over 4874.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.0284, over 972200.97 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 14:54:27,925 INFO [train.py:715] (7/8) Epoch 19, batch 8200, loss[loss=0.117, simple_loss=0.1967, pruned_loss=0.01863, over 4885.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02837, over 972131.83 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 14:55:06,910 INFO [train.py:715] (7/8) Epoch 19, batch 8250, loss[loss=0.1281, simple_loss=0.2051, pruned_loss=0.02554, over 4863.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02885, over 972530.08 frames.], batch size: 13, lr: 1.18e-04 +2022-05-09 14:55:45,535 INFO [train.py:715] (7/8) Epoch 19, batch 8300, loss[loss=0.1053, simple_loss=0.1794, pruned_loss=0.01557, over 4818.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2067, pruned_loss=0.02896, over 972808.99 frames.], batch size: 27, lr: 1.18e-04 +2022-05-09 14:56:25,225 INFO [train.py:715] (7/8) Epoch 19, batch 8350, loss[loss=0.1419, simple_loss=0.2221, pruned_loss=0.03085, over 4778.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2072, pruned_loss=0.02908, over 972737.58 frames.], batch size: 18, lr: 1.18e-04 +2022-05-09 14:57:04,446 INFO [train.py:715] (7/8) Epoch 19, batch 8400, loss[loss=0.1232, simple_loss=0.2011, pruned_loss=0.02264, over 4834.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2057, pruned_loss=0.02791, over 972436.60 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 14:57:43,452 INFO [train.py:715] (7/8) Epoch 19, batch 8450, loss[loss=0.1414, simple_loss=0.2133, pruned_loss=0.03475, over 4974.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02837, over 973653.38 frames.], batch size: 39, lr: 1.18e-04 +2022-05-09 14:58:23,235 INFO [train.py:715] (7/8) Epoch 19, batch 8500, loss[loss=0.15, simple_loss=0.2354, pruned_loss=0.03228, over 4950.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02838, over 973876.18 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 14:59:01,926 INFO [train.py:715] (7/8) Epoch 19, batch 8550, loss[loss=0.1283, simple_loss=0.1977, pruned_loss=0.02947, over 4892.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02866, over 973187.51 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 14:59:41,021 INFO [train.py:715] (7/8) Epoch 19, batch 8600, loss[loss=0.1131, simple_loss=0.1807, pruned_loss=0.02277, over 4847.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02833, over 973117.86 frames.], batch size: 32, lr: 1.18e-04 +2022-05-09 15:00:20,525 INFO [train.py:715] (7/8) Epoch 19, batch 8650, loss[loss=0.1401, simple_loss=0.2164, pruned_loss=0.03188, over 4833.00 frames.], tot_loss[loss=0.1307, simple_loss=0.205, pruned_loss=0.02823, over 973447.43 frames.], batch size: 26, lr: 1.18e-04 +2022-05-09 15:01:00,040 INFO [train.py:715] (7/8) Epoch 19, batch 8700, loss[loss=0.1187, simple_loss=0.2016, pruned_loss=0.01788, over 4932.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2051, pruned_loss=0.02823, over 973748.66 frames.], batch size: 23, lr: 1.18e-04 +2022-05-09 15:01:39,202 INFO [train.py:715] (7/8) Epoch 19, batch 8750, loss[loss=0.1419, simple_loss=0.2233, pruned_loss=0.03024, over 4765.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2069, pruned_loss=0.02862, over 972855.71 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 15:02:17,956 INFO [train.py:715] (7/8) Epoch 19, batch 8800, loss[loss=0.1365, simple_loss=0.2123, pruned_loss=0.03035, over 4923.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.0289, over 973037.90 frames.], batch size: 39, lr: 1.18e-04 +2022-05-09 15:02:57,609 INFO [train.py:715] (7/8) Epoch 19, batch 8850, loss[loss=0.128, simple_loss=0.2062, pruned_loss=0.02486, over 4880.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02871, over 974907.66 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 15:03:36,664 INFO [train.py:715] (7/8) Epoch 19, batch 8900, loss[loss=0.147, simple_loss=0.2203, pruned_loss=0.03689, over 4870.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02928, over 973792.36 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 15:04:16,006 INFO [train.py:715] (7/8) Epoch 19, batch 8950, loss[loss=0.1269, simple_loss=0.2067, pruned_loss=0.02353, over 4793.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2071, pruned_loss=0.02922, over 972925.93 frames.], batch size: 24, lr: 1.18e-04 +2022-05-09 15:04:54,901 INFO [train.py:715] (7/8) Epoch 19, batch 9000, loss[loss=0.148, simple_loss=0.2196, pruned_loss=0.03816, over 4757.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2079, pruned_loss=0.02941, over 973706.03 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 15:04:54,902 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 15:05:04,820 INFO [train.py:742] (7/8) Epoch 19, validation: loss=0.1047, simple_loss=0.1879, pruned_loss=0.01072, over 914524.00 frames. +2022-05-09 15:05:44,267 INFO [train.py:715] (7/8) Epoch 19, batch 9050, loss[loss=0.1449, simple_loss=0.2169, pruned_loss=0.03649, over 4823.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2078, pruned_loss=0.02899, over 973588.50 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 15:06:23,991 INFO [train.py:715] (7/8) Epoch 19, batch 9100, loss[loss=0.1182, simple_loss=0.2018, pruned_loss=0.0173, over 4893.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2077, pruned_loss=0.02889, over 972534.93 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 15:07:03,254 INFO [train.py:715] (7/8) Epoch 19, batch 9150, loss[loss=0.1435, simple_loss=0.2157, pruned_loss=0.03564, over 4982.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2076, pruned_loss=0.02893, over 972836.57 frames.], batch size: 14, lr: 1.18e-04 +2022-05-09 15:07:42,032 INFO [train.py:715] (7/8) Epoch 19, batch 9200, loss[loss=0.1449, simple_loss=0.2296, pruned_loss=0.03009, over 4977.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2065, pruned_loss=0.02816, over 973616.90 frames.], batch size: 28, lr: 1.18e-04 +2022-05-09 15:08:21,756 INFO [train.py:715] (7/8) Epoch 19, batch 9250, loss[loss=0.1285, simple_loss=0.1969, pruned_loss=0.03001, over 4685.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02835, over 973144.69 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 15:09:00,953 INFO [train.py:715] (7/8) Epoch 19, batch 9300, loss[loss=0.1269, simple_loss=0.1937, pruned_loss=0.03007, over 4883.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02846, over 972287.18 frames.], batch size: 22, lr: 1.18e-04 +2022-05-09 15:09:39,865 INFO [train.py:715] (7/8) Epoch 19, batch 9350, loss[loss=0.144, simple_loss=0.2191, pruned_loss=0.03444, over 4835.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02832, over 972065.67 frames.], batch size: 30, lr: 1.18e-04 +2022-05-09 15:10:19,957 INFO [train.py:715] (7/8) Epoch 19, batch 9400, loss[loss=0.166, simple_loss=0.254, pruned_loss=0.03902, over 4836.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02848, over 972160.88 frames.], batch size: 15, lr: 1.18e-04 +2022-05-09 15:11:00,059 INFO [train.py:715] (7/8) Epoch 19, batch 9450, loss[loss=0.1333, simple_loss=0.2107, pruned_loss=0.02796, over 4917.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02896, over 972265.62 frames.], batch size: 17, lr: 1.18e-04 +2022-05-09 15:11:38,885 INFO [train.py:715] (7/8) Epoch 19, batch 9500, loss[loss=0.1366, simple_loss=0.2149, pruned_loss=0.02914, over 4943.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.0286, over 972396.30 frames.], batch size: 21, lr: 1.18e-04 +2022-05-09 15:12:18,094 INFO [train.py:715] (7/8) Epoch 19, batch 9550, loss[loss=0.1272, simple_loss=0.2022, pruned_loss=0.02606, over 4871.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02868, over 973102.13 frames.], batch size: 30, lr: 1.18e-04 +2022-05-09 15:12:57,473 INFO [train.py:715] (7/8) Epoch 19, batch 9600, loss[loss=0.132, simple_loss=0.2157, pruned_loss=0.02409, over 4956.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02825, over 972746.20 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 15:13:36,649 INFO [train.py:715] (7/8) Epoch 19, batch 9650, loss[loss=0.1071, simple_loss=0.174, pruned_loss=0.02014, over 4826.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.0284, over 973049.05 frames.], batch size: 13, lr: 1.18e-04 +2022-05-09 15:14:14,976 INFO [train.py:715] (7/8) Epoch 19, batch 9700, loss[loss=0.1396, simple_loss=0.2222, pruned_loss=0.02846, over 4906.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02874, over 972571.00 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 15:14:54,704 INFO [train.py:715] (7/8) Epoch 19, batch 9750, loss[loss=0.116, simple_loss=0.2, pruned_loss=0.01599, over 4939.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02866, over 972612.84 frames.], batch size: 29, lr: 1.18e-04 +2022-05-09 15:15:34,784 INFO [train.py:715] (7/8) Epoch 19, batch 9800, loss[loss=0.1394, simple_loss=0.2156, pruned_loss=0.03164, over 4895.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02876, over 972282.59 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 15:16:14,505 INFO [train.py:715] (7/8) Epoch 19, batch 9850, loss[loss=0.1207, simple_loss=0.2064, pruned_loss=0.01747, over 4816.00 frames.], tot_loss[loss=0.132, simple_loss=0.2069, pruned_loss=0.02859, over 972139.70 frames.], batch size: 25, lr: 1.18e-04 +2022-05-09 15:16:53,383 INFO [train.py:715] (7/8) Epoch 19, batch 9900, loss[loss=0.1461, simple_loss=0.2239, pruned_loss=0.03412, over 4872.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02836, over 972252.42 frames.], batch size: 19, lr: 1.18e-04 +2022-05-09 15:17:33,334 INFO [train.py:715] (7/8) Epoch 19, batch 9950, loss[loss=0.09767, simple_loss=0.1704, pruned_loss=0.01245, over 4862.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2053, pruned_loss=0.0279, over 972396.58 frames.], batch size: 20, lr: 1.18e-04 +2022-05-09 15:18:12,860 INFO [train.py:715] (7/8) Epoch 19, batch 10000, loss[loss=0.1278, simple_loss=0.1996, pruned_loss=0.02805, over 4882.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2056, pruned_loss=0.02803, over 972549.24 frames.], batch size: 16, lr: 1.18e-04 +2022-05-09 15:18:51,543 INFO [train.py:715] (7/8) Epoch 19, batch 10050, loss[loss=0.1125, simple_loss=0.1896, pruned_loss=0.01773, over 4651.00 frames.], tot_loss[loss=0.131, simple_loss=0.2058, pruned_loss=0.0281, over 971505.55 frames.], batch size: 13, lr: 1.18e-04 +2022-05-09 15:19:31,276 INFO [train.py:715] (7/8) Epoch 19, batch 10100, loss[loss=0.1216, simple_loss=0.1914, pruned_loss=0.02589, over 4813.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02837, over 972417.74 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 15:20:10,775 INFO [train.py:715] (7/8) Epoch 19, batch 10150, loss[loss=0.135, simple_loss=0.2032, pruned_loss=0.03343, over 4747.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.0282, over 971631.16 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 15:20:49,757 INFO [train.py:715] (7/8) Epoch 19, batch 10200, loss[loss=0.1426, simple_loss=0.2182, pruned_loss=0.03347, over 4929.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2051, pruned_loss=0.02798, over 972599.13 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 15:21:29,137 INFO [train.py:715] (7/8) Epoch 19, batch 10250, loss[loss=0.1128, simple_loss=0.1929, pruned_loss=0.0164, over 4934.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.0282, over 972553.97 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 15:22:09,266 INFO [train.py:715] (7/8) Epoch 19, batch 10300, loss[loss=0.1118, simple_loss=0.1913, pruned_loss=0.01612, over 4861.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2054, pruned_loss=0.02786, over 972651.21 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 15:22:48,850 INFO [train.py:715] (7/8) Epoch 19, batch 10350, loss[loss=0.1202, simple_loss=0.1918, pruned_loss=0.0243, over 4906.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2052, pruned_loss=0.02783, over 972346.76 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 15:23:27,536 INFO [train.py:715] (7/8) Epoch 19, batch 10400, loss[loss=0.1269, simple_loss=0.2084, pruned_loss=0.02269, over 4937.00 frames.], tot_loss[loss=0.13, simple_loss=0.2045, pruned_loss=0.02776, over 971171.94 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 15:24:07,293 INFO [train.py:715] (7/8) Epoch 19, batch 10450, loss[loss=0.1208, simple_loss=0.2018, pruned_loss=0.01989, over 4811.00 frames.], tot_loss[loss=0.1297, simple_loss=0.2044, pruned_loss=0.02754, over 971673.99 frames.], batch size: 27, lr: 1.17e-04 +2022-05-09 15:24:47,016 INFO [train.py:715] (7/8) Epoch 19, batch 10500, loss[loss=0.1394, simple_loss=0.2149, pruned_loss=0.03199, over 4879.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2052, pruned_loss=0.02817, over 971978.07 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 15:25:25,926 INFO [train.py:715] (7/8) Epoch 19, batch 10550, loss[loss=0.1085, simple_loss=0.1793, pruned_loss=0.01889, over 4764.00 frames.], tot_loss[loss=0.1304, simple_loss=0.205, pruned_loss=0.02785, over 972593.19 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 15:26:04,898 INFO [train.py:715] (7/8) Epoch 19, batch 10600, loss[loss=0.1155, simple_loss=0.1899, pruned_loss=0.02057, over 4801.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.02791, over 972343.60 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 15:26:47,170 INFO [train.py:715] (7/8) Epoch 19, batch 10650, loss[loss=0.1573, simple_loss=0.2273, pruned_loss=0.0437, over 4852.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2054, pruned_loss=0.02811, over 973172.12 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 15:27:26,344 INFO [train.py:715] (7/8) Epoch 19, batch 10700, loss[loss=0.1127, simple_loss=0.1857, pruned_loss=0.01985, over 4902.00 frames.], tot_loss[loss=0.1306, simple_loss=0.205, pruned_loss=0.02806, over 972617.85 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:28:05,704 INFO [train.py:715] (7/8) Epoch 19, batch 10750, loss[loss=0.1189, simple_loss=0.1943, pruned_loss=0.02177, over 4872.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2049, pruned_loss=0.02811, over 972245.95 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 15:28:45,281 INFO [train.py:715] (7/8) Epoch 19, batch 10800, loss[loss=0.1553, simple_loss=0.2346, pruned_loss=0.03801, over 4848.00 frames.], tot_loss[loss=0.1307, simple_loss=0.205, pruned_loss=0.02815, over 972242.07 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 15:29:25,028 INFO [train.py:715] (7/8) Epoch 19, batch 10850, loss[loss=0.1048, simple_loss=0.1833, pruned_loss=0.01321, over 4967.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2055, pruned_loss=0.02797, over 972810.24 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 15:30:03,633 INFO [train.py:715] (7/8) Epoch 19, batch 10900, loss[loss=0.1268, simple_loss=0.203, pruned_loss=0.0253, over 4808.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2058, pruned_loss=0.02781, over 972538.12 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 15:30:42,650 INFO [train.py:715] (7/8) Epoch 19, batch 10950, loss[loss=0.1494, simple_loss=0.2131, pruned_loss=0.0428, over 4752.00 frames.], tot_loss[loss=0.13, simple_loss=0.205, pruned_loss=0.02749, over 972880.31 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 15:31:22,374 INFO [train.py:715] (7/8) Epoch 19, batch 11000, loss[loss=0.1332, simple_loss=0.2008, pruned_loss=0.03285, over 4954.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2058, pruned_loss=0.0278, over 972597.16 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 15:32:02,209 INFO [train.py:715] (7/8) Epoch 19, batch 11050, loss[loss=0.1085, simple_loss=0.1872, pruned_loss=0.01493, over 4874.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2055, pruned_loss=0.02795, over 973135.91 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 15:32:40,675 INFO [train.py:715] (7/8) Epoch 19, batch 11100, loss[loss=0.1049, simple_loss=0.1727, pruned_loss=0.01856, over 4739.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2057, pruned_loss=0.02794, over 973772.21 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 15:33:20,043 INFO [train.py:715] (7/8) Epoch 19, batch 11150, loss[loss=0.1516, simple_loss=0.2282, pruned_loss=0.03754, over 4959.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2062, pruned_loss=0.0283, over 974050.57 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 15:33:59,454 INFO [train.py:715] (7/8) Epoch 19, batch 11200, loss[loss=0.1483, simple_loss=0.2265, pruned_loss=0.03501, over 4828.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02833, over 974292.38 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 15:34:38,834 INFO [train.py:715] (7/8) Epoch 19, batch 11250, loss[loss=0.1352, simple_loss=0.2106, pruned_loss=0.02989, over 4925.00 frames.], tot_loss[loss=0.1312, simple_loss=0.206, pruned_loss=0.02818, over 973558.33 frames.], batch size: 29, lr: 1.17e-04 +2022-05-09 15:35:18,205 INFO [train.py:715] (7/8) Epoch 19, batch 11300, loss[loss=0.1016, simple_loss=0.1724, pruned_loss=0.0154, over 4701.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.02829, over 973301.78 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 15:35:56,986 INFO [train.py:715] (7/8) Epoch 19, batch 11350, loss[loss=0.1429, simple_loss=0.2094, pruned_loss=0.03826, over 4983.00 frames.], tot_loss[loss=0.1321, simple_loss=0.207, pruned_loss=0.0286, over 972791.73 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 15:36:36,599 INFO [train.py:715] (7/8) Epoch 19, batch 11400, loss[loss=0.1351, simple_loss=0.2159, pruned_loss=0.02714, over 4928.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2068, pruned_loss=0.02877, over 972127.50 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 15:37:16,208 INFO [train.py:715] (7/8) Epoch 19, batch 11450, loss[loss=0.1235, simple_loss=0.1975, pruned_loss=0.02476, over 4937.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2058, pruned_loss=0.02803, over 973467.50 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 15:37:56,154 INFO [train.py:715] (7/8) Epoch 19, batch 11500, loss[loss=0.1124, simple_loss=0.1921, pruned_loss=0.01633, over 4913.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2054, pruned_loss=0.02778, over 973719.75 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:38:35,582 INFO [train.py:715] (7/8) Epoch 19, batch 11550, loss[loss=0.1346, simple_loss=0.211, pruned_loss=0.02907, over 4926.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2052, pruned_loss=0.02754, over 973438.48 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 15:39:14,555 INFO [train.py:715] (7/8) Epoch 19, batch 11600, loss[loss=0.1196, simple_loss=0.1919, pruned_loss=0.02361, over 4766.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2053, pruned_loss=0.02745, over 972540.45 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:39:54,377 INFO [train.py:715] (7/8) Epoch 19, batch 11650, loss[loss=0.1219, simple_loss=0.1948, pruned_loss=0.02453, over 4894.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2054, pruned_loss=0.02755, over 972601.51 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 15:40:33,464 INFO [train.py:715] (7/8) Epoch 19, batch 11700, loss[loss=0.1386, simple_loss=0.2192, pruned_loss=0.02902, over 4829.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2055, pruned_loss=0.02758, over 972360.13 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 15:41:13,004 INFO [train.py:715] (7/8) Epoch 19, batch 11750, loss[loss=0.1333, simple_loss=0.2012, pruned_loss=0.03274, over 4831.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2048, pruned_loss=0.02774, over 971425.14 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 15:41:52,544 INFO [train.py:715] (7/8) Epoch 19, batch 11800, loss[loss=0.1488, simple_loss=0.2267, pruned_loss=0.03548, over 4919.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.02812, over 971135.88 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 15:42:32,203 INFO [train.py:715] (7/8) Epoch 19, batch 11850, loss[loss=0.1325, simple_loss=0.2061, pruned_loss=0.02945, over 4776.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02856, over 971236.56 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 15:43:11,795 INFO [train.py:715] (7/8) Epoch 19, batch 11900, loss[loss=0.1159, simple_loss=0.1875, pruned_loss=0.02216, over 4894.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02828, over 971110.45 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 15:43:51,292 INFO [train.py:715] (7/8) Epoch 19, batch 11950, loss[loss=0.1282, simple_loss=0.1958, pruned_loss=0.03029, over 4844.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2055, pruned_loss=0.02796, over 971307.07 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 15:44:30,455 INFO [train.py:715] (7/8) Epoch 19, batch 12000, loss[loss=0.1132, simple_loss=0.1855, pruned_loss=0.02044, over 4943.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2045, pruned_loss=0.02763, over 972499.17 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 15:44:30,456 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 15:44:40,311 INFO [train.py:742] (7/8) Epoch 19, validation: loss=0.1044, simple_loss=0.1877, pruned_loss=0.01054, over 914524.00 frames. +2022-05-09 15:45:20,293 INFO [train.py:715] (7/8) Epoch 19, batch 12050, loss[loss=0.1376, simple_loss=0.2072, pruned_loss=0.03398, over 4878.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02853, over 972697.16 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 15:46:00,177 INFO [train.py:715] (7/8) Epoch 19, batch 12100, loss[loss=0.1243, simple_loss=0.2033, pruned_loss=0.02263, over 4764.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2065, pruned_loss=0.02895, over 971850.34 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:46:39,305 INFO [train.py:715] (7/8) Epoch 19, batch 12150, loss[loss=0.1334, simple_loss=0.2021, pruned_loss=0.03235, over 4966.00 frames.], tot_loss[loss=0.132, simple_loss=0.2064, pruned_loss=0.0288, over 972060.49 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 15:47:18,768 INFO [train.py:715] (7/8) Epoch 19, batch 12200, loss[loss=0.1426, simple_loss=0.2233, pruned_loss=0.031, over 4826.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02864, over 972332.24 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 15:47:58,254 INFO [train.py:715] (7/8) Epoch 19, batch 12250, loss[loss=0.1265, simple_loss=0.2084, pruned_loss=0.02234, over 4798.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2069, pruned_loss=0.02877, over 972210.65 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 15:48:37,869 INFO [train.py:715] (7/8) Epoch 19, batch 12300, loss[loss=0.1477, simple_loss=0.2197, pruned_loss=0.03788, over 4766.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2069, pruned_loss=0.02864, over 972272.25 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 15:49:17,547 INFO [train.py:715] (7/8) Epoch 19, batch 12350, loss[loss=0.1363, simple_loss=0.2184, pruned_loss=0.02708, over 4973.00 frames.], tot_loss[loss=0.132, simple_loss=0.207, pruned_loss=0.02849, over 972867.86 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 15:49:56,105 INFO [train.py:715] (7/8) Epoch 19, batch 12400, loss[loss=0.1395, simple_loss=0.2026, pruned_loss=0.03816, over 4993.00 frames.], tot_loss[loss=0.1312, simple_loss=0.206, pruned_loss=0.02819, over 971979.99 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 15:50:35,578 INFO [train.py:715] (7/8) Epoch 19, batch 12450, loss[loss=0.1305, simple_loss=0.2112, pruned_loss=0.02488, over 4755.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02821, over 971517.14 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:51:14,303 INFO [train.py:715] (7/8) Epoch 19, batch 12500, loss[loss=0.1133, simple_loss=0.1809, pruned_loss=0.02284, over 4908.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02829, over 971391.51 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 15:51:53,641 INFO [train.py:715] (7/8) Epoch 19, batch 12550, loss[loss=0.1292, simple_loss=0.2135, pruned_loss=0.02242, over 4947.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02851, over 970827.96 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 15:52:33,123 INFO [train.py:715] (7/8) Epoch 19, batch 12600, loss[loss=0.1162, simple_loss=0.1998, pruned_loss=0.01637, over 4823.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2063, pruned_loss=0.0285, over 971380.27 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 15:53:12,609 INFO [train.py:715] (7/8) Epoch 19, batch 12650, loss[loss=0.1411, simple_loss=0.2137, pruned_loss=0.03424, over 4858.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2061, pruned_loss=0.0283, over 971608.51 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 15:53:51,568 INFO [train.py:715] (7/8) Epoch 19, batch 12700, loss[loss=0.1498, simple_loss=0.2257, pruned_loss=0.03697, over 4988.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02863, over 972209.35 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 15:54:30,743 INFO [train.py:715] (7/8) Epoch 19, batch 12750, loss[loss=0.1505, simple_loss=0.2144, pruned_loss=0.04325, over 4842.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02851, over 971938.05 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 15:55:10,388 INFO [train.py:715] (7/8) Epoch 19, batch 12800, loss[loss=0.1411, simple_loss=0.2211, pruned_loss=0.03059, over 4904.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02848, over 971474.28 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 15:55:49,788 INFO [train.py:715] (7/8) Epoch 19, batch 12850, loss[loss=0.1399, simple_loss=0.2187, pruned_loss=0.03059, over 4784.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2054, pruned_loss=0.02837, over 970054.76 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 15:56:28,732 INFO [train.py:715] (7/8) Epoch 19, batch 12900, loss[loss=0.1052, simple_loss=0.1841, pruned_loss=0.01313, over 4862.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2044, pruned_loss=0.02757, over 970257.65 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 15:57:08,293 INFO [train.py:715] (7/8) Epoch 19, batch 12950, loss[loss=0.1482, simple_loss=0.2267, pruned_loss=0.0349, over 4812.00 frames.], tot_loss[loss=0.1292, simple_loss=0.2038, pruned_loss=0.02731, over 970784.23 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 15:57:47,534 INFO [train.py:715] (7/8) Epoch 19, batch 13000, loss[loss=0.1321, simple_loss=0.217, pruned_loss=0.02366, over 4892.00 frames.], tot_loss[loss=0.129, simple_loss=0.2041, pruned_loss=0.02693, over 970674.29 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 15:58:26,678 INFO [train.py:715] (7/8) Epoch 19, batch 13050, loss[loss=0.1316, simple_loss=0.2048, pruned_loss=0.02924, over 4842.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2047, pruned_loss=0.02779, over 971086.45 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 15:59:05,571 INFO [train.py:715] (7/8) Epoch 19, batch 13100, loss[loss=0.1259, simple_loss=0.2042, pruned_loss=0.02382, over 4834.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2053, pruned_loss=0.02803, over 971963.87 frames.], batch size: 27, lr: 1.17e-04 +2022-05-09 15:59:44,833 INFO [train.py:715] (7/8) Epoch 19, batch 13150, loss[loss=0.1309, simple_loss=0.2152, pruned_loss=0.02328, over 4754.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2062, pruned_loss=0.02813, over 972066.75 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:00:24,455 INFO [train.py:715] (7/8) Epoch 19, batch 13200, loss[loss=0.1331, simple_loss=0.2035, pruned_loss=0.03131, over 4752.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02845, over 973022.66 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:01:03,679 INFO [train.py:715] (7/8) Epoch 19, batch 13250, loss[loss=0.1347, simple_loss=0.2106, pruned_loss=0.02942, over 4843.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02816, over 972977.37 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 16:01:42,981 INFO [train.py:715] (7/8) Epoch 19, batch 13300, loss[loss=0.1202, simple_loss=0.1968, pruned_loss=0.02173, over 4907.00 frames.], tot_loss[loss=0.131, simple_loss=0.2052, pruned_loss=0.02835, over 973318.22 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 16:02:22,614 INFO [train.py:715] (7/8) Epoch 19, batch 13350, loss[loss=0.1824, simple_loss=0.2307, pruned_loss=0.06703, over 4739.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2052, pruned_loss=0.02832, over 972440.01 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 16:03:01,278 INFO [train.py:715] (7/8) Epoch 19, batch 13400, loss[loss=0.1168, simple_loss=0.2003, pruned_loss=0.01671, over 4758.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2045, pruned_loss=0.02796, over 972305.92 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:03:40,774 INFO [train.py:715] (7/8) Epoch 19, batch 13450, loss[loss=0.1302, simple_loss=0.2042, pruned_loss=0.02812, over 4920.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02827, over 971778.80 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:04:20,047 INFO [train.py:715] (7/8) Epoch 19, batch 13500, loss[loss=0.1323, simple_loss=0.1963, pruned_loss=0.03419, over 4888.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.02846, over 972273.85 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 16:04:59,471 INFO [train.py:715] (7/8) Epoch 19, batch 13550, loss[loss=0.1285, simple_loss=0.2056, pruned_loss=0.02573, over 4846.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02863, over 972767.77 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 16:05:38,407 INFO [train.py:715] (7/8) Epoch 19, batch 13600, loss[loss=0.1363, simple_loss=0.205, pruned_loss=0.03374, over 4843.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.0288, over 972535.73 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:06:17,571 INFO [train.py:715] (7/8) Epoch 19, batch 13650, loss[loss=0.1166, simple_loss=0.1934, pruned_loss=0.01993, over 4802.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02837, over 972545.12 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 16:06:57,016 INFO [train.py:715] (7/8) Epoch 19, batch 13700, loss[loss=0.1489, simple_loss=0.2279, pruned_loss=0.03495, over 4780.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.02798, over 972030.54 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 16:07:35,733 INFO [train.py:715] (7/8) Epoch 19, batch 13750, loss[loss=0.1624, simple_loss=0.2304, pruned_loss=0.04721, over 4812.00 frames.], tot_loss[loss=0.131, simple_loss=0.2052, pruned_loss=0.0284, over 971864.91 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:08:14,999 INFO [train.py:715] (7/8) Epoch 19, batch 13800, loss[loss=0.1414, simple_loss=0.2194, pruned_loss=0.03168, over 4759.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2049, pruned_loss=0.02832, over 972104.80 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:08:55,060 INFO [train.py:715] (7/8) Epoch 19, batch 13850, loss[loss=0.1273, simple_loss=0.1959, pruned_loss=0.02938, over 4846.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2045, pruned_loss=0.02813, over 972594.71 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 16:09:34,678 INFO [train.py:715] (7/8) Epoch 19, batch 13900, loss[loss=0.1173, simple_loss=0.1983, pruned_loss=0.0182, over 4808.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2051, pruned_loss=0.02834, over 972756.39 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 16:10:14,445 INFO [train.py:715] (7/8) Epoch 19, batch 13950, loss[loss=0.1107, simple_loss=0.1893, pruned_loss=0.016, over 4930.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2052, pruned_loss=0.02813, over 973056.55 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:10:53,217 INFO [train.py:715] (7/8) Epoch 19, batch 14000, loss[loss=0.132, simple_loss=0.208, pruned_loss=0.02805, over 4902.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.02796, over 973479.86 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 16:11:32,645 INFO [train.py:715] (7/8) Epoch 19, batch 14050, loss[loss=0.1659, simple_loss=0.2545, pruned_loss=0.03869, over 4912.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2054, pruned_loss=0.02791, over 973084.98 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:12:11,845 INFO [train.py:715] (7/8) Epoch 19, batch 14100, loss[loss=0.1471, simple_loss=0.2192, pruned_loss=0.03747, over 4899.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02879, over 973976.02 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:12:51,005 INFO [train.py:715] (7/8) Epoch 19, batch 14150, loss[loss=0.1488, simple_loss=0.2102, pruned_loss=0.04374, over 4857.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02877, over 972954.88 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 16:13:30,123 INFO [train.py:715] (7/8) Epoch 19, batch 14200, loss[loss=0.09593, simple_loss=0.1712, pruned_loss=0.01033, over 4745.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02877, over 974228.51 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 16:14:08,940 INFO [train.py:715] (7/8) Epoch 19, batch 14250, loss[loss=0.1255, simple_loss=0.2017, pruned_loss=0.02461, over 4756.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2064, pruned_loss=0.02903, over 972876.27 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 16:14:48,116 INFO [train.py:715] (7/8) Epoch 19, batch 14300, loss[loss=0.1131, simple_loss=0.1883, pruned_loss=0.01896, over 4923.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02873, over 973002.68 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:15:27,233 INFO [train.py:715] (7/8) Epoch 19, batch 14350, loss[loss=0.1317, simple_loss=0.2058, pruned_loss=0.02883, over 4754.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2072, pruned_loss=0.02893, over 971823.83 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 16:16:06,786 INFO [train.py:715] (7/8) Epoch 19, batch 14400, loss[loss=0.1706, simple_loss=0.2416, pruned_loss=0.04982, over 4918.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2075, pruned_loss=0.0291, over 971789.09 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:16:45,666 INFO [train.py:715] (7/8) Epoch 19, batch 14450, loss[loss=0.1187, simple_loss=0.1886, pruned_loss=0.02442, over 4755.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02931, over 972372.42 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 16:17:24,667 INFO [train.py:715] (7/8) Epoch 19, batch 14500, loss[loss=0.1188, simple_loss=0.1999, pruned_loss=0.01888, over 4897.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2083, pruned_loss=0.02964, over 971983.82 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:18:03,480 INFO [train.py:715] (7/8) Epoch 19, batch 14550, loss[loss=0.1151, simple_loss=0.1816, pruned_loss=0.02433, over 4964.00 frames.], tot_loss[loss=0.1334, simple_loss=0.2078, pruned_loss=0.02948, over 972835.81 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 16:18:43,155 INFO [train.py:715] (7/8) Epoch 19, batch 14600, loss[loss=0.1314, simple_loss=0.2028, pruned_loss=0.03001, over 4923.00 frames.], tot_loss[loss=0.1335, simple_loss=0.208, pruned_loss=0.02956, over 972933.62 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 16:19:22,225 INFO [train.py:715] (7/8) Epoch 19, batch 14650, loss[loss=0.1201, simple_loss=0.1953, pruned_loss=0.02245, over 4965.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2079, pruned_loss=0.02933, over 973203.53 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 16:20:01,164 INFO [train.py:715] (7/8) Epoch 19, batch 14700, loss[loss=0.1533, simple_loss=0.2219, pruned_loss=0.04237, over 4891.00 frames.], tot_loss[loss=0.1336, simple_loss=0.2083, pruned_loss=0.02949, over 972942.55 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 16:20:40,522 INFO [train.py:715] (7/8) Epoch 19, batch 14750, loss[loss=0.1412, simple_loss=0.2115, pruned_loss=0.03545, over 4973.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2084, pruned_loss=0.02956, over 972046.16 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:21:19,781 INFO [train.py:715] (7/8) Epoch 19, batch 14800, loss[loss=0.1299, simple_loss=0.2021, pruned_loss=0.02885, over 4804.00 frames.], tot_loss[loss=0.1335, simple_loss=0.2081, pruned_loss=0.02948, over 971551.09 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:21:58,087 INFO [train.py:715] (7/8) Epoch 19, batch 14850, loss[loss=0.1308, simple_loss=0.2152, pruned_loss=0.0232, over 4829.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02861, over 971949.08 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 16:22:37,373 INFO [train.py:715] (7/8) Epoch 19, batch 14900, loss[loss=0.1265, simple_loss=0.2001, pruned_loss=0.02643, over 4788.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2076, pruned_loss=0.02907, over 972339.03 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 16:23:16,324 INFO [train.py:715] (7/8) Epoch 19, batch 14950, loss[loss=0.1425, simple_loss=0.2151, pruned_loss=0.03496, over 4949.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.02832, over 972298.32 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 16:23:55,095 INFO [train.py:715] (7/8) Epoch 19, batch 15000, loss[loss=0.1177, simple_loss=0.2002, pruned_loss=0.01759, over 4914.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02827, over 972652.37 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 16:23:55,096 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 16:24:07,488 INFO [train.py:742] (7/8) Epoch 19, validation: loss=0.1045, simple_loss=0.1877, pruned_loss=0.01064, over 914524.00 frames. +2022-05-09 16:24:46,706 INFO [train.py:715] (7/8) Epoch 19, batch 15050, loss[loss=0.1275, simple_loss=0.1971, pruned_loss=0.029, over 4824.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02836, over 973177.43 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:25:26,174 INFO [train.py:715] (7/8) Epoch 19, batch 15100, loss[loss=0.1398, simple_loss=0.2181, pruned_loss=0.03075, over 4777.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.02865, over 972877.16 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:26:05,807 INFO [train.py:715] (7/8) Epoch 19, batch 15150, loss[loss=0.1269, simple_loss=0.2151, pruned_loss=0.01933, over 4760.00 frames.], tot_loss[loss=0.1319, simple_loss=0.206, pruned_loss=0.02889, over 972789.73 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:26:45,269 INFO [train.py:715] (7/8) Epoch 19, batch 15200, loss[loss=0.1437, simple_loss=0.2277, pruned_loss=0.02982, over 4926.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.0285, over 972437.94 frames.], batch size: 29, lr: 1.17e-04 +2022-05-09 16:27:24,253 INFO [train.py:715] (7/8) Epoch 19, batch 15250, loss[loss=0.1137, simple_loss=0.1942, pruned_loss=0.01665, over 4908.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02829, over 973384.80 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:28:04,173 INFO [train.py:715] (7/8) Epoch 19, batch 15300, loss[loss=0.1272, simple_loss=0.2029, pruned_loss=0.02573, over 4788.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2054, pruned_loss=0.02803, over 972667.26 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 16:28:43,720 INFO [train.py:715] (7/8) Epoch 19, batch 15350, loss[loss=0.1347, simple_loss=0.2106, pruned_loss=0.02941, over 4799.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2049, pruned_loss=0.02795, over 972299.85 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:29:23,531 INFO [train.py:715] (7/8) Epoch 19, batch 15400, loss[loss=0.1226, simple_loss=0.1996, pruned_loss=0.02283, over 4763.00 frames.], tot_loss[loss=0.1305, simple_loss=0.205, pruned_loss=0.02796, over 972239.75 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 16:30:03,014 INFO [train.py:715] (7/8) Epoch 19, batch 15450, loss[loss=0.1239, simple_loss=0.1968, pruned_loss=0.02544, over 4928.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02826, over 971937.50 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 16:30:42,453 INFO [train.py:715] (7/8) Epoch 19, batch 15500, loss[loss=0.1254, simple_loss=0.2116, pruned_loss=0.01961, over 4806.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02822, over 970609.80 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 16:31:21,339 INFO [train.py:715] (7/8) Epoch 19, batch 15550, loss[loss=0.1661, simple_loss=0.2268, pruned_loss=0.0527, over 4768.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02903, over 970415.59 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:32:00,420 INFO [train.py:715] (7/8) Epoch 19, batch 15600, loss[loss=0.1276, simple_loss=0.2084, pruned_loss=0.02339, over 4842.00 frames.], tot_loss[loss=0.1323, simple_loss=0.207, pruned_loss=0.02879, over 971619.32 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:32:40,091 INFO [train.py:715] (7/8) Epoch 19, batch 15650, loss[loss=0.1226, simple_loss=0.1794, pruned_loss=0.03295, over 4821.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.02836, over 971541.11 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 16:33:19,041 INFO [train.py:715] (7/8) Epoch 19, batch 15700, loss[loss=0.121, simple_loss=0.1913, pruned_loss=0.02539, over 4988.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2057, pruned_loss=0.02842, over 972186.69 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 16:33:59,140 INFO [train.py:715] (7/8) Epoch 19, batch 15750, loss[loss=0.1257, simple_loss=0.2023, pruned_loss=0.02457, over 4784.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2068, pruned_loss=0.0289, over 972543.48 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 16:34:38,396 INFO [train.py:715] (7/8) Epoch 19, batch 15800, loss[loss=0.1247, simple_loss=0.1972, pruned_loss=0.02611, over 4865.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.02852, over 973039.44 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 16:35:17,531 INFO [train.py:715] (7/8) Epoch 19, batch 15850, loss[loss=0.1254, simple_loss=0.2039, pruned_loss=0.02349, over 4921.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02837, over 973183.39 frames.], batch size: 29, lr: 1.17e-04 +2022-05-09 16:35:56,471 INFO [train.py:715] (7/8) Epoch 19, batch 15900, loss[loss=0.1357, simple_loss=0.2093, pruned_loss=0.03111, over 4985.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02861, over 973514.27 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 16:36:35,621 INFO [train.py:715] (7/8) Epoch 19, batch 15950, loss[loss=0.09937, simple_loss=0.1734, pruned_loss=0.01266, over 4934.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02872, over 972957.81 frames.], batch size: 29, lr: 1.17e-04 +2022-05-09 16:37:15,273 INFO [train.py:715] (7/8) Epoch 19, batch 16000, loss[loss=0.1325, simple_loss=0.2154, pruned_loss=0.02477, over 4861.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2062, pruned_loss=0.02873, over 972833.45 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 16:37:53,933 INFO [train.py:715] (7/8) Epoch 19, batch 16050, loss[loss=0.1238, simple_loss=0.2034, pruned_loss=0.0221, over 4832.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2067, pruned_loss=0.02886, over 973276.16 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 16:38:33,245 INFO [train.py:715] (7/8) Epoch 19, batch 16100, loss[loss=0.1415, simple_loss=0.2227, pruned_loss=0.03016, over 4859.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2065, pruned_loss=0.02837, over 972492.94 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 16:39:12,541 INFO [train.py:715] (7/8) Epoch 19, batch 16150, loss[loss=0.1674, simple_loss=0.2405, pruned_loss=0.0471, over 4960.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02856, over 972635.39 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 16:39:51,596 INFO [train.py:715] (7/8) Epoch 19, batch 16200, loss[loss=0.1138, simple_loss=0.1938, pruned_loss=0.01689, over 4881.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02843, over 972941.95 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 16:40:29,890 INFO [train.py:715] (7/8) Epoch 19, batch 16250, loss[loss=0.1429, simple_loss=0.225, pruned_loss=0.03042, over 4964.00 frames.], tot_loss[loss=0.131, simple_loss=0.2058, pruned_loss=0.02803, over 972603.27 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 16:41:08,938 INFO [train.py:715] (7/8) Epoch 19, batch 16300, loss[loss=0.1486, simple_loss=0.2255, pruned_loss=0.03586, over 4942.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2065, pruned_loss=0.02819, over 973253.73 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 16:41:48,391 INFO [train.py:715] (7/8) Epoch 19, batch 16350, loss[loss=0.1234, simple_loss=0.1922, pruned_loss=0.02724, over 4773.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2065, pruned_loss=0.02847, over 973340.60 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:42:26,936 INFO [train.py:715] (7/8) Epoch 19, batch 16400, loss[loss=0.1248, simple_loss=0.2034, pruned_loss=0.02306, over 4907.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2061, pruned_loss=0.02799, over 971448.54 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 16:43:05,776 INFO [train.py:715] (7/8) Epoch 19, batch 16450, loss[loss=0.1212, simple_loss=0.1926, pruned_loss=0.02487, over 4910.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.0284, over 972387.70 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:43:44,324 INFO [train.py:715] (7/8) Epoch 19, batch 16500, loss[loss=0.1256, simple_loss=0.2092, pruned_loss=0.02102, over 4985.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02838, over 972739.15 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 16:44:23,745 INFO [train.py:715] (7/8) Epoch 19, batch 16550, loss[loss=0.1339, simple_loss=0.2085, pruned_loss=0.02963, over 4891.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2057, pruned_loss=0.02797, over 972833.39 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 16:45:02,747 INFO [train.py:715] (7/8) Epoch 19, batch 16600, loss[loss=0.13, simple_loss=0.2034, pruned_loss=0.02833, over 4835.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2067, pruned_loss=0.02849, over 971817.01 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 16:45:41,760 INFO [train.py:715] (7/8) Epoch 19, batch 16650, loss[loss=0.1328, simple_loss=0.2097, pruned_loss=0.02795, over 4939.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2055, pruned_loss=0.02793, over 971974.55 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 16:46:21,731 INFO [train.py:715] (7/8) Epoch 19, batch 16700, loss[loss=0.1156, simple_loss=0.1907, pruned_loss=0.02027, over 4986.00 frames.], tot_loss[loss=0.131, simple_loss=0.2058, pruned_loss=0.02813, over 972377.92 frames.], batch size: 31, lr: 1.17e-04 +2022-05-09 16:47:00,844 INFO [train.py:715] (7/8) Epoch 19, batch 16750, loss[loss=0.1196, simple_loss=0.1941, pruned_loss=0.02256, over 4986.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2062, pruned_loss=0.02832, over 972896.37 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 16:47:40,568 INFO [train.py:715] (7/8) Epoch 19, batch 16800, loss[loss=0.1199, simple_loss=0.1971, pruned_loss=0.02138, over 4981.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.02801, over 973447.48 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 16:48:19,964 INFO [train.py:715] (7/8) Epoch 19, batch 16850, loss[loss=0.1196, simple_loss=0.1975, pruned_loss=0.02085, over 4971.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2049, pruned_loss=0.02776, over 973054.72 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 16:48:59,492 INFO [train.py:715] (7/8) Epoch 19, batch 16900, loss[loss=0.1131, simple_loss=0.1894, pruned_loss=0.01834, over 4978.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2047, pruned_loss=0.02799, over 972731.34 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 16:49:38,106 INFO [train.py:715] (7/8) Epoch 19, batch 16950, loss[loss=0.1061, simple_loss=0.1715, pruned_loss=0.02032, over 4986.00 frames.], tot_loss[loss=0.1295, simple_loss=0.2041, pruned_loss=0.02746, over 973534.08 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 16:50:17,671 INFO [train.py:715] (7/8) Epoch 19, batch 17000, loss[loss=0.2165, simple_loss=0.2654, pruned_loss=0.08382, over 4977.00 frames.], tot_loss[loss=0.1298, simple_loss=0.2042, pruned_loss=0.02771, over 972847.09 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 16:50:57,097 INFO [train.py:715] (7/8) Epoch 19, batch 17050, loss[loss=0.144, simple_loss=0.2186, pruned_loss=0.03472, over 4798.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2049, pruned_loss=0.02765, over 972627.14 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 16:51:36,156 INFO [train.py:715] (7/8) Epoch 19, batch 17100, loss[loss=0.1281, simple_loss=0.1985, pruned_loss=0.0289, over 4812.00 frames.], tot_loss[loss=0.1303, simple_loss=0.205, pruned_loss=0.02773, over 971821.25 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 16:52:15,339 INFO [train.py:715] (7/8) Epoch 19, batch 17150, loss[loss=0.131, simple_loss=0.195, pruned_loss=0.03345, over 4933.00 frames.], tot_loss[loss=0.1302, simple_loss=0.205, pruned_loss=0.02774, over 972302.03 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 16:52:54,355 INFO [train.py:715] (7/8) Epoch 19, batch 17200, loss[loss=0.101, simple_loss=0.1698, pruned_loss=0.01613, over 4818.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2047, pruned_loss=0.02784, over 972128.56 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 16:53:33,091 INFO [train.py:715] (7/8) Epoch 19, batch 17250, loss[loss=0.1428, simple_loss=0.2119, pruned_loss=0.03683, over 4808.00 frames.], tot_loss[loss=0.1297, simple_loss=0.2042, pruned_loss=0.02764, over 971927.25 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 16:54:12,080 INFO [train.py:715] (7/8) Epoch 19, batch 17300, loss[loss=0.1296, simple_loss=0.1946, pruned_loss=0.03232, over 4870.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2048, pruned_loss=0.02794, over 971499.15 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 16:54:51,727 INFO [train.py:715] (7/8) Epoch 19, batch 17350, loss[loss=0.144, simple_loss=0.2175, pruned_loss=0.03531, over 4978.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2052, pruned_loss=0.02798, over 971910.24 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 16:55:31,275 INFO [train.py:715] (7/8) Epoch 19, batch 17400, loss[loss=0.1123, simple_loss=0.1904, pruned_loss=0.0171, over 4796.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02825, over 971689.85 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 16:56:10,518 INFO [train.py:715] (7/8) Epoch 19, batch 17450, loss[loss=0.134, simple_loss=0.2184, pruned_loss=0.02482, over 4898.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.02806, over 971082.02 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 16:56:49,866 INFO [train.py:715] (7/8) Epoch 19, batch 17500, loss[loss=0.1228, simple_loss=0.1877, pruned_loss=0.02893, over 4743.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.0283, over 970890.26 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 16:57:29,144 INFO [train.py:715] (7/8) Epoch 19, batch 17550, loss[loss=0.1333, simple_loss=0.206, pruned_loss=0.03036, over 4766.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02832, over 971431.44 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 16:58:08,753 INFO [train.py:715] (7/8) Epoch 19, batch 17600, loss[loss=0.1338, simple_loss=0.2082, pruned_loss=0.02969, over 4776.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.0285, over 971377.20 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 16:58:47,930 INFO [train.py:715] (7/8) Epoch 19, batch 17650, loss[loss=0.114, simple_loss=0.199, pruned_loss=0.01456, over 4809.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.0292, over 971543.62 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 16:59:27,073 INFO [train.py:715] (7/8) Epoch 19, batch 17700, loss[loss=0.1474, simple_loss=0.2194, pruned_loss=0.03766, over 4820.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02934, over 971832.23 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 17:00:06,649 INFO [train.py:715] (7/8) Epoch 19, batch 17750, loss[loss=0.1307, simple_loss=0.2109, pruned_loss=0.02521, over 4901.00 frames.], tot_loss[loss=0.1325, simple_loss=0.207, pruned_loss=0.02899, over 972731.24 frames.], batch size: 29, lr: 1.17e-04 +2022-05-09 17:00:45,239 INFO [train.py:715] (7/8) Epoch 19, batch 17800, loss[loss=0.1153, simple_loss=0.1895, pruned_loss=0.02056, over 4795.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2068, pruned_loss=0.02902, over 972121.69 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 17:01:24,012 INFO [train.py:715] (7/8) Epoch 19, batch 17850, loss[loss=0.1158, simple_loss=0.1873, pruned_loss=0.02219, over 4832.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.0288, over 971555.90 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:02:03,486 INFO [train.py:715] (7/8) Epoch 19, batch 17900, loss[loss=0.1404, simple_loss=0.2164, pruned_loss=0.03216, over 4740.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02839, over 971014.97 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:02:41,969 INFO [train.py:715] (7/8) Epoch 19, batch 17950, loss[loss=0.12, simple_loss=0.1909, pruned_loss=0.02456, over 4790.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02877, over 970748.62 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 17:03:21,255 INFO [train.py:715] (7/8) Epoch 19, batch 18000, loss[loss=0.1676, simple_loss=0.2334, pruned_loss=0.05092, over 4983.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2066, pruned_loss=0.02907, over 971200.71 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 17:03:21,256 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 17:03:31,130 INFO [train.py:742] (7/8) Epoch 19, validation: loss=0.1046, simple_loss=0.1877, pruned_loss=0.01074, over 914524.00 frames. +2022-05-09 17:04:10,641 INFO [train.py:715] (7/8) Epoch 19, batch 18050, loss[loss=0.1314, simple_loss=0.2039, pruned_loss=0.02945, over 4859.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02927, over 971881.09 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:04:50,210 INFO [train.py:715] (7/8) Epoch 19, batch 18100, loss[loss=0.1353, simple_loss=0.2166, pruned_loss=0.02701, over 4807.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2057, pruned_loss=0.02856, over 972644.32 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 17:05:30,069 INFO [train.py:715] (7/8) Epoch 19, batch 18150, loss[loss=0.1815, simple_loss=0.2496, pruned_loss=0.05666, over 4833.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2053, pruned_loss=0.02839, over 972791.81 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:06:09,193 INFO [train.py:715] (7/8) Epoch 19, batch 18200, loss[loss=0.1279, simple_loss=0.195, pruned_loss=0.03035, over 4843.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2053, pruned_loss=0.02827, over 973300.47 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 17:06:48,112 INFO [train.py:715] (7/8) Epoch 19, batch 18250, loss[loss=0.1442, simple_loss=0.2296, pruned_loss=0.02943, over 4764.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02844, over 972303.46 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:07:28,074 INFO [train.py:715] (7/8) Epoch 19, batch 18300, loss[loss=0.1225, simple_loss=0.1861, pruned_loss=0.0295, over 4697.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02872, over 972645.55 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:08:07,532 INFO [train.py:715] (7/8) Epoch 19, batch 18350, loss[loss=0.1136, simple_loss=0.1826, pruned_loss=0.02225, over 4820.00 frames.], tot_loss[loss=0.1315, simple_loss=0.206, pruned_loss=0.02855, over 972476.01 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 17:08:47,417 INFO [train.py:715] (7/8) Epoch 19, batch 18400, loss[loss=0.14, simple_loss=0.2075, pruned_loss=0.03624, over 4979.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.0287, over 973552.02 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 17:09:26,675 INFO [train.py:715] (7/8) Epoch 19, batch 18450, loss[loss=0.1308, simple_loss=0.1986, pruned_loss=0.03149, over 4750.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2061, pruned_loss=0.02868, over 973077.33 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:10:06,108 INFO [train.py:715] (7/8) Epoch 19, batch 18500, loss[loss=0.1375, simple_loss=0.2059, pruned_loss=0.03455, over 4844.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02882, over 972117.77 frames.], batch size: 34, lr: 1.17e-04 +2022-05-09 17:10:45,346 INFO [train.py:715] (7/8) Epoch 19, batch 18550, loss[loss=0.1481, simple_loss=0.2243, pruned_loss=0.03595, over 4860.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2059, pruned_loss=0.02868, over 972210.68 frames.], batch size: 32, lr: 1.17e-04 +2022-05-09 17:11:24,401 INFO [train.py:715] (7/8) Epoch 19, batch 18600, loss[loss=0.1304, simple_loss=0.2152, pruned_loss=0.02279, over 4764.00 frames.], tot_loss[loss=0.132, simple_loss=0.2061, pruned_loss=0.02891, over 971908.21 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:12:06,304 INFO [train.py:715] (7/8) Epoch 19, batch 18650, loss[loss=0.1256, simple_loss=0.2077, pruned_loss=0.02169, over 4890.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2069, pruned_loss=0.02888, over 970385.78 frames.], batch size: 22, lr: 1.17e-04 +2022-05-09 17:12:45,156 INFO [train.py:715] (7/8) Epoch 19, batch 18700, loss[loss=0.126, simple_loss=0.2061, pruned_loss=0.02291, over 4822.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2067, pruned_loss=0.02876, over 970639.53 frames.], batch size: 27, lr: 1.17e-04 +2022-05-09 17:13:24,442 INFO [train.py:715] (7/8) Epoch 19, batch 18750, loss[loss=0.1291, simple_loss=0.2011, pruned_loss=0.02858, over 4981.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2071, pruned_loss=0.02939, over 972160.82 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 17:14:04,380 INFO [train.py:715] (7/8) Epoch 19, batch 18800, loss[loss=0.1572, simple_loss=0.2326, pruned_loss=0.04089, over 4950.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2064, pruned_loss=0.02927, over 971913.40 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 17:14:44,261 INFO [train.py:715] (7/8) Epoch 19, batch 18850, loss[loss=0.1336, simple_loss=0.2136, pruned_loss=0.02675, over 4799.00 frames.], tot_loss[loss=0.1329, simple_loss=0.2067, pruned_loss=0.02952, over 972335.85 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 17:15:23,451 INFO [train.py:715] (7/8) Epoch 19, batch 18900, loss[loss=0.142, simple_loss=0.2118, pruned_loss=0.03614, over 4940.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02915, over 972275.50 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 17:16:02,833 INFO [train.py:715] (7/8) Epoch 19, batch 18950, loss[loss=0.1199, simple_loss=0.1997, pruned_loss=0.02011, over 4687.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2072, pruned_loss=0.02898, over 972245.01 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:16:42,874 INFO [train.py:715] (7/8) Epoch 19, batch 19000, loss[loss=0.1382, simple_loss=0.2216, pruned_loss=0.02736, over 4977.00 frames.], tot_loss[loss=0.1328, simple_loss=0.207, pruned_loss=0.02929, over 972794.11 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 17:17:22,372 INFO [train.py:715] (7/8) Epoch 19, batch 19050, loss[loss=0.09952, simple_loss=0.174, pruned_loss=0.01253, over 4838.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02874, over 973061.81 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 17:18:01,437 INFO [train.py:715] (7/8) Epoch 19, batch 19100, loss[loss=0.1413, simple_loss=0.216, pruned_loss=0.03336, over 4846.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02893, over 972814.10 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:18:41,051 INFO [train.py:715] (7/8) Epoch 19, batch 19150, loss[loss=0.1127, simple_loss=0.1878, pruned_loss=0.01882, over 4924.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02915, over 973372.94 frames.], batch size: 29, lr: 1.17e-04 +2022-05-09 17:19:20,396 INFO [train.py:715] (7/8) Epoch 19, batch 19200, loss[loss=0.1209, simple_loss=0.1955, pruned_loss=0.02317, over 4760.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02881, over 973522.05 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:19:59,883 INFO [train.py:715] (7/8) Epoch 19, batch 19250, loss[loss=0.1345, simple_loss=0.2132, pruned_loss=0.02786, over 4769.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2065, pruned_loss=0.02899, over 973482.10 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:20:39,159 INFO [train.py:715] (7/8) Epoch 19, batch 19300, loss[loss=0.1465, simple_loss=0.2147, pruned_loss=0.03911, over 4974.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02927, over 973025.77 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 17:21:19,523 INFO [train.py:715] (7/8) Epoch 19, batch 19350, loss[loss=0.1572, simple_loss=0.2352, pruned_loss=0.03957, over 4952.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.0293, over 972256.13 frames.], batch size: 39, lr: 1.17e-04 +2022-05-09 17:21:58,947 INFO [train.py:715] (7/8) Epoch 19, batch 19400, loss[loss=0.1412, simple_loss=0.2192, pruned_loss=0.03156, over 4856.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2065, pruned_loss=0.02927, over 971910.18 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:22:38,650 INFO [train.py:715] (7/8) Epoch 19, batch 19450, loss[loss=0.1264, simple_loss=0.1905, pruned_loss=0.03117, over 4772.00 frames.], tot_loss[loss=0.1337, simple_loss=0.2076, pruned_loss=0.02989, over 971584.30 frames.], batch size: 14, lr: 1.17e-04 +2022-05-09 17:23:18,390 INFO [train.py:715] (7/8) Epoch 19, batch 19500, loss[loss=0.1414, simple_loss=0.2168, pruned_loss=0.03296, over 4842.00 frames.], tot_loss[loss=0.1331, simple_loss=0.207, pruned_loss=0.0296, over 971608.18 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:23:57,782 INFO [train.py:715] (7/8) Epoch 19, batch 19550, loss[loss=0.1233, simple_loss=0.1997, pruned_loss=0.02346, over 4743.00 frames.], tot_loss[loss=0.1333, simple_loss=0.2071, pruned_loss=0.02978, over 971064.05 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:24:36,953 INFO [train.py:715] (7/8) Epoch 19, batch 19600, loss[loss=0.1279, simple_loss=0.2114, pruned_loss=0.02225, over 4990.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2066, pruned_loss=0.02924, over 970800.00 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 17:25:17,642 INFO [train.py:715] (7/8) Epoch 19, batch 19650, loss[loss=0.1066, simple_loss=0.1752, pruned_loss=0.01901, over 4855.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2071, pruned_loss=0.02922, over 971740.29 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 17:25:56,968 INFO [train.py:715] (7/8) Epoch 19, batch 19700, loss[loss=0.1285, simple_loss=0.203, pruned_loss=0.02699, over 4959.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2064, pruned_loss=0.02888, over 972127.01 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:26:35,808 INFO [train.py:715] (7/8) Epoch 19, batch 19750, loss[loss=0.1518, simple_loss=0.2197, pruned_loss=0.04191, over 4774.00 frames.], tot_loss[loss=0.1327, simple_loss=0.207, pruned_loss=0.02921, over 971594.34 frames.], batch size: 17, lr: 1.17e-04 +2022-05-09 17:27:16,061 INFO [train.py:715] (7/8) Epoch 19, batch 19800, loss[loss=0.171, simple_loss=0.2549, pruned_loss=0.04354, over 4929.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.0291, over 971812.75 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 17:27:55,920 INFO [train.py:715] (7/8) Epoch 19, batch 19850, loss[loss=0.1072, simple_loss=0.1843, pruned_loss=0.01505, over 4961.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2074, pruned_loss=0.0291, over 972332.39 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 17:28:35,203 INFO [train.py:715] (7/8) Epoch 19, batch 19900, loss[loss=0.1189, simple_loss=0.1916, pruned_loss=0.02315, over 4870.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2068, pruned_loss=0.02927, over 972049.13 frames.], batch size: 38, lr: 1.17e-04 +2022-05-09 17:29:13,881 INFO [train.py:715] (7/8) Epoch 19, batch 19950, loss[loss=0.1273, simple_loss=0.2097, pruned_loss=0.02244, over 4932.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02885, over 972630.03 frames.], batch size: 23, lr: 1.17e-04 +2022-05-09 17:29:53,617 INFO [train.py:715] (7/8) Epoch 19, batch 20000, loss[loss=0.1064, simple_loss=0.1728, pruned_loss=0.01996, over 4987.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02854, over 972677.72 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 17:30:33,008 INFO [train.py:715] (7/8) Epoch 19, batch 20050, loss[loss=0.1315, simple_loss=0.2064, pruned_loss=0.02829, over 4809.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02822, over 973116.17 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 17:31:12,642 INFO [train.py:715] (7/8) Epoch 19, batch 20100, loss[loss=0.1406, simple_loss=0.2147, pruned_loss=0.03323, over 4747.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2069, pruned_loss=0.02898, over 972305.36 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:31:52,173 INFO [train.py:715] (7/8) Epoch 19, batch 20150, loss[loss=0.1398, simple_loss=0.2145, pruned_loss=0.03253, over 4833.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02909, over 972867.05 frames.], batch size: 30, lr: 1.17e-04 +2022-05-09 17:32:31,818 INFO [train.py:715] (7/8) Epoch 19, batch 20200, loss[loss=0.1329, simple_loss=0.2069, pruned_loss=0.02947, over 4970.00 frames.], tot_loss[loss=0.1328, simple_loss=0.2072, pruned_loss=0.02917, over 973657.58 frames.], batch size: 35, lr: 1.17e-04 +2022-05-09 17:33:11,350 INFO [train.py:715] (7/8) Epoch 19, batch 20250, loss[loss=0.142, simple_loss=0.2087, pruned_loss=0.03765, over 4676.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2064, pruned_loss=0.0286, over 973632.44 frames.], batch size: 13, lr: 1.17e-04 +2022-05-09 17:33:50,683 INFO [train.py:715] (7/8) Epoch 19, batch 20300, loss[loss=0.1225, simple_loss=0.1946, pruned_loss=0.02522, over 4949.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2059, pruned_loss=0.0285, over 974098.68 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 17:34:30,224 INFO [train.py:715] (7/8) Epoch 19, batch 20350, loss[loss=0.1373, simple_loss=0.2091, pruned_loss=0.03277, over 4689.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2047, pruned_loss=0.028, over 971936.07 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:35:09,432 INFO [train.py:715] (7/8) Epoch 19, batch 20400, loss[loss=0.1442, simple_loss=0.2194, pruned_loss=0.03448, over 4736.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2048, pruned_loss=0.02821, over 971405.29 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:35:48,301 INFO [train.py:715] (7/8) Epoch 19, batch 20450, loss[loss=0.1456, simple_loss=0.222, pruned_loss=0.03455, over 4946.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2057, pruned_loss=0.02796, over 971858.01 frames.], batch size: 21, lr: 1.17e-04 +2022-05-09 17:36:28,043 INFO [train.py:715] (7/8) Epoch 19, batch 20500, loss[loss=0.1509, simple_loss=0.2268, pruned_loss=0.03751, over 4874.00 frames.], tot_loss[loss=0.1298, simple_loss=0.205, pruned_loss=0.02737, over 971836.65 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:37:07,746 INFO [train.py:715] (7/8) Epoch 19, batch 20550, loss[loss=0.1349, simple_loss=0.2092, pruned_loss=0.03029, over 4823.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2054, pruned_loss=0.02795, over 971557.84 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 17:37:46,563 INFO [train.py:715] (7/8) Epoch 19, batch 20600, loss[loss=0.1459, simple_loss=0.2109, pruned_loss=0.04051, over 4791.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02831, over 971846.19 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 17:38:26,012 INFO [train.py:715] (7/8) Epoch 19, batch 20650, loss[loss=0.1018, simple_loss=0.1818, pruned_loss=0.01091, over 4750.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2054, pruned_loss=0.02812, over 971498.95 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:39:05,343 INFO [train.py:715] (7/8) Epoch 19, batch 20700, loss[loss=0.1299, simple_loss=0.2048, pruned_loss=0.02746, over 4741.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2047, pruned_loss=0.02789, over 971404.20 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:39:44,822 INFO [train.py:715] (7/8) Epoch 19, batch 20750, loss[loss=0.1434, simple_loss=0.2201, pruned_loss=0.03332, over 4805.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02821, over 970980.67 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 17:40:23,535 INFO [train.py:715] (7/8) Epoch 19, batch 20800, loss[loss=0.1012, simple_loss=0.1791, pruned_loss=0.01167, over 4798.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2046, pruned_loss=0.02791, over 970679.71 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 17:41:02,810 INFO [train.py:715] (7/8) Epoch 19, batch 20850, loss[loss=0.1148, simple_loss=0.1865, pruned_loss=0.02154, over 4840.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2051, pruned_loss=0.02835, over 971232.56 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 17:41:42,484 INFO [train.py:715] (7/8) Epoch 19, batch 20900, loss[loss=0.134, simple_loss=0.2049, pruned_loss=0.03158, over 4787.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2049, pruned_loss=0.02812, over 971524.22 frames.], batch size: 12, lr: 1.17e-04 +2022-05-09 17:42:21,289 INFO [train.py:715] (7/8) Epoch 19, batch 20950, loss[loss=0.09556, simple_loss=0.1695, pruned_loss=0.01083, over 4813.00 frames.], tot_loss[loss=0.13, simple_loss=0.2044, pruned_loss=0.02777, over 971998.55 frames.], batch size: 26, lr: 1.17e-04 +2022-05-09 17:43:01,035 INFO [train.py:715] (7/8) Epoch 19, batch 21000, loss[loss=0.1285, simple_loss=0.2147, pruned_loss=0.02118, over 4881.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02825, over 971586.13 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:43:01,035 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 17:43:11,506 INFO [train.py:742] (7/8) Epoch 19, validation: loss=0.1045, simple_loss=0.1878, pruned_loss=0.01062, over 914524.00 frames. +2022-05-09 17:43:51,338 INFO [train.py:715] (7/8) Epoch 19, batch 21050, loss[loss=0.1409, simple_loss=0.2135, pruned_loss=0.03411, over 4950.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2056, pruned_loss=0.02863, over 971453.47 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 17:44:31,303 INFO [train.py:715] (7/8) Epoch 19, batch 21100, loss[loss=0.1126, simple_loss=0.1828, pruned_loss=0.02116, over 4908.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.0286, over 971866.79 frames.], batch size: 18, lr: 1.17e-04 +2022-05-09 17:45:10,119 INFO [train.py:715] (7/8) Epoch 19, batch 21150, loss[loss=0.1267, simple_loss=0.2013, pruned_loss=0.026, over 4872.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2068, pruned_loss=0.02924, over 971893.70 frames.], batch size: 16, lr: 1.17e-04 +2022-05-09 17:45:49,703 INFO [train.py:715] (7/8) Epoch 19, batch 21200, loss[loss=0.1309, simple_loss=0.2017, pruned_loss=0.03009, over 4702.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2061, pruned_loss=0.02858, over 971437.45 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:46:28,951 INFO [train.py:715] (7/8) Epoch 19, batch 21250, loss[loss=0.1373, simple_loss=0.2265, pruned_loss=0.0241, over 4880.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02835, over 971139.91 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:47:07,992 INFO [train.py:715] (7/8) Epoch 19, batch 21300, loss[loss=0.1367, simple_loss=0.2219, pruned_loss=0.02573, over 4849.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2063, pruned_loss=0.02823, over 972128.82 frames.], batch size: 20, lr: 1.17e-04 +2022-05-09 17:47:46,807 INFO [train.py:715] (7/8) Epoch 19, batch 21350, loss[loss=0.1098, simple_loss=0.181, pruned_loss=0.01929, over 4986.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2063, pruned_loss=0.02824, over 972592.39 frames.], batch size: 25, lr: 1.17e-04 +2022-05-09 17:48:26,341 INFO [train.py:715] (7/8) Epoch 19, batch 21400, loss[loss=0.1251, simple_loss=0.2122, pruned_loss=0.01898, over 4972.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02839, over 972558.08 frames.], batch size: 28, lr: 1.17e-04 +2022-05-09 17:49:05,856 INFO [train.py:715] (7/8) Epoch 19, batch 21450, loss[loss=0.1185, simple_loss=0.2039, pruned_loss=0.01652, over 4794.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2052, pruned_loss=0.02783, over 972289.16 frames.], batch size: 24, lr: 1.17e-04 +2022-05-09 17:49:44,646 INFO [train.py:715] (7/8) Epoch 19, batch 21500, loss[loss=0.1182, simple_loss=0.189, pruned_loss=0.02373, over 4899.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2055, pruned_loss=0.02787, over 972378.74 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:50:24,358 INFO [train.py:715] (7/8) Epoch 19, batch 21550, loss[loss=0.1315, simple_loss=0.2062, pruned_loss=0.02839, over 4893.00 frames.], tot_loss[loss=0.13, simple_loss=0.2047, pruned_loss=0.02764, over 973309.98 frames.], batch size: 19, lr: 1.17e-04 +2022-05-09 17:51:04,082 INFO [train.py:715] (7/8) Epoch 19, batch 21600, loss[loss=0.1331, simple_loss=0.2087, pruned_loss=0.02872, over 4841.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2055, pruned_loss=0.02793, over 972862.32 frames.], batch size: 15, lr: 1.17e-04 +2022-05-09 17:51:43,840 INFO [train.py:715] (7/8) Epoch 19, batch 21650, loss[loss=0.1371, simple_loss=0.2142, pruned_loss=0.02995, over 4808.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2055, pruned_loss=0.02788, over 972541.74 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 17:52:22,735 INFO [train.py:715] (7/8) Epoch 19, batch 21700, loss[loss=0.148, simple_loss=0.2196, pruned_loss=0.03823, over 4987.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02858, over 972929.04 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 17:53:02,146 INFO [train.py:715] (7/8) Epoch 19, batch 21750, loss[loss=0.1394, simple_loss=0.22, pruned_loss=0.0294, over 4978.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02838, over 972307.27 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 17:53:43,115 INFO [train.py:715] (7/8) Epoch 19, batch 21800, loss[loss=0.1218, simple_loss=0.2001, pruned_loss=0.02176, over 4932.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2049, pruned_loss=0.02802, over 973375.33 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 17:54:22,936 INFO [train.py:715] (7/8) Epoch 19, batch 21850, loss[loss=0.1277, simple_loss=0.197, pruned_loss=0.02923, over 4820.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2051, pruned_loss=0.02811, over 972448.01 frames.], batch size: 27, lr: 1.16e-04 +2022-05-09 17:55:03,314 INFO [train.py:715] (7/8) Epoch 19, batch 21900, loss[loss=0.1252, simple_loss=0.1978, pruned_loss=0.02631, over 4810.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2051, pruned_loss=0.02822, over 972858.32 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 17:55:43,308 INFO [train.py:715] (7/8) Epoch 19, batch 21950, loss[loss=0.1433, simple_loss=0.2071, pruned_loss=0.03977, over 4801.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.0285, over 973215.44 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 17:56:22,526 INFO [train.py:715] (7/8) Epoch 19, batch 22000, loss[loss=0.1046, simple_loss=0.1803, pruned_loss=0.01448, over 4883.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02877, over 972837.84 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 17:57:01,820 INFO [train.py:715] (7/8) Epoch 19, batch 22050, loss[loss=0.1445, simple_loss=0.2203, pruned_loss=0.03432, over 4913.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2054, pruned_loss=0.02855, over 972998.73 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 17:57:41,389 INFO [train.py:715] (7/8) Epoch 19, batch 22100, loss[loss=0.1188, simple_loss=0.1833, pruned_loss=0.02716, over 4832.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2048, pruned_loss=0.02866, over 972765.84 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 17:58:21,391 INFO [train.py:715] (7/8) Epoch 19, batch 22150, loss[loss=0.1278, simple_loss=0.2027, pruned_loss=0.02647, over 4857.00 frames.], tot_loss[loss=0.1305, simple_loss=0.204, pruned_loss=0.02847, over 972844.41 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 17:59:00,690 INFO [train.py:715] (7/8) Epoch 19, batch 22200, loss[loss=0.1151, simple_loss=0.1914, pruned_loss=0.01941, over 4856.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2051, pruned_loss=0.02882, over 973362.05 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 17:59:40,599 INFO [train.py:715] (7/8) Epoch 19, batch 22250, loss[loss=0.1292, simple_loss=0.2092, pruned_loss=0.02458, over 4940.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2045, pruned_loss=0.02825, over 973511.51 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:00:20,345 INFO [train.py:715] (7/8) Epoch 19, batch 22300, loss[loss=0.1334, simple_loss=0.1978, pruned_loss=0.03447, over 4812.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2041, pruned_loss=0.02789, over 972696.55 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:00:59,299 INFO [train.py:715] (7/8) Epoch 19, batch 22350, loss[loss=0.1269, simple_loss=0.2026, pruned_loss=0.02555, over 4867.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2053, pruned_loss=0.02817, over 971782.88 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 18:01:38,320 INFO [train.py:715] (7/8) Epoch 19, batch 22400, loss[loss=0.1345, simple_loss=0.211, pruned_loss=0.02899, over 4824.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2052, pruned_loss=0.02804, over 971730.96 frames.], batch size: 30, lr: 1.16e-04 +2022-05-09 18:02:17,609 INFO [train.py:715] (7/8) Epoch 19, batch 22450, loss[loss=0.1184, simple_loss=0.1919, pruned_loss=0.02244, over 4776.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2052, pruned_loss=0.02819, over 971930.14 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:02:57,543 INFO [train.py:715] (7/8) Epoch 19, batch 22500, loss[loss=0.123, simple_loss=0.1975, pruned_loss=0.02422, over 4947.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2048, pruned_loss=0.02799, over 972322.05 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:03:36,398 INFO [train.py:715] (7/8) Epoch 19, batch 22550, loss[loss=0.1279, simple_loss=0.1984, pruned_loss=0.02868, over 4969.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2042, pruned_loss=0.02777, over 972957.23 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:04:16,059 INFO [train.py:715] (7/8) Epoch 19, batch 22600, loss[loss=0.1465, simple_loss=0.2285, pruned_loss=0.03228, over 4812.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2047, pruned_loss=0.02794, over 972429.45 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 18:04:55,711 INFO [train.py:715] (7/8) Epoch 19, batch 22650, loss[loss=0.1295, simple_loss=0.2011, pruned_loss=0.02894, over 4962.00 frames.], tot_loss[loss=0.1307, simple_loss=0.205, pruned_loss=0.02814, over 972432.03 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 18:05:34,685 INFO [train.py:715] (7/8) Epoch 19, batch 22700, loss[loss=0.1252, simple_loss=0.202, pruned_loss=0.02421, over 4864.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2048, pruned_loss=0.02788, over 972753.06 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 18:06:13,670 INFO [train.py:715] (7/8) Epoch 19, batch 22750, loss[loss=0.1157, simple_loss=0.1914, pruned_loss=0.02004, over 4807.00 frames.], tot_loss[loss=0.131, simple_loss=0.2052, pruned_loss=0.02836, over 973407.15 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:06:53,395 INFO [train.py:715] (7/8) Epoch 19, batch 22800, loss[loss=0.1383, simple_loss=0.2209, pruned_loss=0.02786, over 4980.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2041, pruned_loss=0.0281, over 972711.08 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:07:33,728 INFO [train.py:715] (7/8) Epoch 19, batch 22850, loss[loss=0.1291, simple_loss=0.2078, pruned_loss=0.02517, over 4961.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2049, pruned_loss=0.02862, over 973258.69 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:08:11,747 INFO [train.py:715] (7/8) Epoch 19, batch 22900, loss[loss=0.1241, simple_loss=0.208, pruned_loss=0.02011, over 4880.00 frames.], tot_loss[loss=0.131, simple_loss=0.2052, pruned_loss=0.02837, over 972875.98 frames.], batch size: 39, lr: 1.16e-04 +2022-05-09 18:08:51,141 INFO [train.py:715] (7/8) Epoch 19, batch 22950, loss[loss=0.108, simple_loss=0.1772, pruned_loss=0.01938, over 4833.00 frames.], tot_loss[loss=0.1306, simple_loss=0.205, pruned_loss=0.02808, over 972815.53 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 18:09:31,741 INFO [train.py:715] (7/8) Epoch 19, batch 23000, loss[loss=0.1245, simple_loss=0.193, pruned_loss=0.02796, over 4793.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2043, pruned_loss=0.02807, over 972279.82 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:10:12,243 INFO [train.py:715] (7/8) Epoch 19, batch 23050, loss[loss=0.1376, simple_loss=0.2051, pruned_loss=0.03506, over 4824.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2048, pruned_loss=0.02794, over 972830.79 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:10:52,465 INFO [train.py:715] (7/8) Epoch 19, batch 23100, loss[loss=0.1613, simple_loss=0.229, pruned_loss=0.04679, over 4798.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2054, pruned_loss=0.02837, over 972950.80 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:11:33,198 INFO [train.py:715] (7/8) Epoch 19, batch 23150, loss[loss=0.1134, simple_loss=0.1855, pruned_loss=0.02062, over 4646.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02855, over 972290.20 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:12:14,189 INFO [train.py:715] (7/8) Epoch 19, batch 23200, loss[loss=0.1277, simple_loss=0.1991, pruned_loss=0.02817, over 4919.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02857, over 971400.82 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 18:12:53,610 INFO [train.py:715] (7/8) Epoch 19, batch 23250, loss[loss=0.1383, simple_loss=0.2124, pruned_loss=0.03215, over 4751.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.02848, over 971271.89 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 18:13:34,365 INFO [train.py:715] (7/8) Epoch 19, batch 23300, loss[loss=0.116, simple_loss=0.1866, pruned_loss=0.0227, over 4773.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.02848, over 971258.44 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 18:14:16,102 INFO [train.py:715] (7/8) Epoch 19, batch 23350, loss[loss=0.1333, simple_loss=0.1979, pruned_loss=0.03436, over 4916.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.0285, over 971252.93 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 18:14:56,713 INFO [train.py:715] (7/8) Epoch 19, batch 23400, loss[loss=0.1392, simple_loss=0.2132, pruned_loss=0.03262, over 4699.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02871, over 971625.29 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:15:37,873 INFO [train.py:715] (7/8) Epoch 19, batch 23450, loss[loss=0.125, simple_loss=0.2086, pruned_loss=0.02073, over 4815.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2063, pruned_loss=0.02843, over 971265.45 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:16:19,142 INFO [train.py:715] (7/8) Epoch 19, batch 23500, loss[loss=0.1406, simple_loss=0.2194, pruned_loss=0.03094, over 4865.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2066, pruned_loss=0.0285, over 972309.86 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 18:17:00,518 INFO [train.py:715] (7/8) Epoch 19, batch 23550, loss[loss=0.1329, simple_loss=0.2008, pruned_loss=0.03248, over 4820.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2061, pruned_loss=0.02829, over 972722.10 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 18:17:41,325 INFO [train.py:715] (7/8) Epoch 19, batch 23600, loss[loss=0.1203, simple_loss=0.1932, pruned_loss=0.02373, over 4807.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2052, pruned_loss=0.02802, over 972167.99 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 18:18:22,140 INFO [train.py:715] (7/8) Epoch 19, batch 23650, loss[loss=0.1217, simple_loss=0.1923, pruned_loss=0.0255, over 4837.00 frames.], tot_loss[loss=0.1303, simple_loss=0.205, pruned_loss=0.02784, over 971770.49 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 18:19:04,124 INFO [train.py:715] (7/8) Epoch 19, batch 23700, loss[loss=0.1077, simple_loss=0.1796, pruned_loss=0.01795, over 4844.00 frames.], tot_loss[loss=0.1291, simple_loss=0.2038, pruned_loss=0.02721, over 972076.57 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 18:19:44,518 INFO [train.py:715] (7/8) Epoch 19, batch 23750, loss[loss=0.1175, simple_loss=0.1919, pruned_loss=0.02159, over 4834.00 frames.], tot_loss[loss=0.13, simple_loss=0.2046, pruned_loss=0.02774, over 971952.18 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:20:24,721 INFO [train.py:715] (7/8) Epoch 19, batch 23800, loss[loss=0.1358, simple_loss=0.2041, pruned_loss=0.03373, over 4829.00 frames.], tot_loss[loss=0.13, simple_loss=0.2044, pruned_loss=0.02779, over 971593.22 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:21:05,132 INFO [train.py:715] (7/8) Epoch 19, batch 23850, loss[loss=0.1176, simple_loss=0.1876, pruned_loss=0.02375, over 4773.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2049, pruned_loss=0.02778, over 971261.95 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:21:45,587 INFO [train.py:715] (7/8) Epoch 19, batch 23900, loss[loss=0.1297, simple_loss=0.2031, pruned_loss=0.02819, over 4979.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2047, pruned_loss=0.02758, over 971310.67 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:22:24,883 INFO [train.py:715] (7/8) Epoch 19, batch 23950, loss[loss=0.1577, simple_loss=0.2192, pruned_loss=0.04807, over 4857.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2054, pruned_loss=0.02783, over 971487.14 frames.], batch size: 30, lr: 1.16e-04 +2022-05-09 18:23:05,244 INFO [train.py:715] (7/8) Epoch 19, batch 24000, loss[loss=0.155, simple_loss=0.2317, pruned_loss=0.03916, over 4823.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2055, pruned_loss=0.02781, over 972108.92 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:23:05,245 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 18:23:15,158 INFO [train.py:742] (7/8) Epoch 19, validation: loss=0.1046, simple_loss=0.1878, pruned_loss=0.01073, over 914524.00 frames. +2022-05-09 18:23:55,485 INFO [train.py:715] (7/8) Epoch 19, batch 24050, loss[loss=0.1429, simple_loss=0.2081, pruned_loss=0.03882, over 4979.00 frames.], tot_loss[loss=0.1296, simple_loss=0.2048, pruned_loss=0.02723, over 972161.63 frames.], batch size: 35, lr: 1.16e-04 +2022-05-09 18:24:36,272 INFO [train.py:715] (7/8) Epoch 19, batch 24100, loss[loss=0.1332, simple_loss=0.214, pruned_loss=0.02624, over 4985.00 frames.], tot_loss[loss=0.1294, simple_loss=0.2041, pruned_loss=0.02733, over 972529.12 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:25:16,110 INFO [train.py:715] (7/8) Epoch 19, batch 24150, loss[loss=0.1087, simple_loss=0.1846, pruned_loss=0.0164, over 4950.00 frames.], tot_loss[loss=0.1296, simple_loss=0.2044, pruned_loss=0.02739, over 972917.15 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:25:56,281 INFO [train.py:715] (7/8) Epoch 19, batch 24200, loss[loss=0.1186, simple_loss=0.1897, pruned_loss=0.02373, over 4822.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2047, pruned_loss=0.02753, over 973295.54 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 18:26:36,614 INFO [train.py:715] (7/8) Epoch 19, batch 24250, loss[loss=0.1162, simple_loss=0.1897, pruned_loss=0.02142, over 4920.00 frames.], tot_loss[loss=0.1297, simple_loss=0.2047, pruned_loss=0.02732, over 972888.31 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 18:27:17,347 INFO [train.py:715] (7/8) Epoch 19, batch 24300, loss[loss=0.1395, simple_loss=0.2168, pruned_loss=0.03111, over 4829.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2052, pruned_loss=0.02782, over 971979.86 frames.], batch size: 27, lr: 1.16e-04 +2022-05-09 18:27:56,390 INFO [train.py:715] (7/8) Epoch 19, batch 24350, loss[loss=0.1521, simple_loss=0.2184, pruned_loss=0.0429, over 4790.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2051, pruned_loss=0.02786, over 971414.46 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:28:36,033 INFO [train.py:715] (7/8) Epoch 19, batch 24400, loss[loss=0.1426, simple_loss=0.2197, pruned_loss=0.03277, over 4872.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02811, over 972199.28 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 18:29:16,437 INFO [train.py:715] (7/8) Epoch 19, batch 24450, loss[loss=0.1083, simple_loss=0.178, pruned_loss=0.01932, over 4751.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2048, pruned_loss=0.0277, over 971966.44 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 18:29:55,851 INFO [train.py:715] (7/8) Epoch 19, batch 24500, loss[loss=0.1188, simple_loss=0.194, pruned_loss=0.02182, over 4908.00 frames.], tot_loss[loss=0.13, simple_loss=0.2046, pruned_loss=0.02773, over 971849.89 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 18:30:34,373 INFO [train.py:715] (7/8) Epoch 19, batch 24550, loss[loss=0.1455, simple_loss=0.2138, pruned_loss=0.03859, over 4829.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02817, over 971970.52 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:31:13,261 INFO [train.py:715] (7/8) Epoch 19, batch 24600, loss[loss=0.1367, simple_loss=0.2034, pruned_loss=0.03494, over 4973.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02832, over 971812.89 frames.], batch size: 40, lr: 1.16e-04 +2022-05-09 18:31:52,757 INFO [train.py:715] (7/8) Epoch 19, batch 24650, loss[loss=0.1004, simple_loss=0.1643, pruned_loss=0.0182, over 4755.00 frames.], tot_loss[loss=0.1313, simple_loss=0.206, pruned_loss=0.02826, over 971704.16 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 18:32:31,487 INFO [train.py:715] (7/8) Epoch 19, batch 24700, loss[loss=0.1342, simple_loss=0.2025, pruned_loss=0.033, over 4892.00 frames.], tot_loss[loss=0.1322, simple_loss=0.207, pruned_loss=0.02865, over 971606.80 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 18:33:10,029 INFO [train.py:715] (7/8) Epoch 19, batch 24750, loss[loss=0.1507, simple_loss=0.2168, pruned_loss=0.04232, over 4920.00 frames.], tot_loss[loss=0.1326, simple_loss=0.2073, pruned_loss=0.0289, over 971857.08 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:33:50,382 INFO [train.py:715] (7/8) Epoch 19, batch 24800, loss[loss=0.1567, simple_loss=0.2266, pruned_loss=0.04342, over 4829.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2068, pruned_loss=0.02873, over 971639.70 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:34:30,022 INFO [train.py:715] (7/8) Epoch 19, batch 24850, loss[loss=0.122, simple_loss=0.2003, pruned_loss=0.02189, over 4767.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2058, pruned_loss=0.02836, over 970882.32 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 18:35:09,087 INFO [train.py:715] (7/8) Epoch 19, batch 24900, loss[loss=0.1197, simple_loss=0.1917, pruned_loss=0.02382, over 4684.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2062, pruned_loss=0.02854, over 970660.70 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:35:48,551 INFO [train.py:715] (7/8) Epoch 19, batch 24950, loss[loss=0.1328, simple_loss=0.204, pruned_loss=0.03086, over 4810.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2071, pruned_loss=0.0289, over 971728.89 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 18:36:28,349 INFO [train.py:715] (7/8) Epoch 19, batch 25000, loss[loss=0.1412, simple_loss=0.2023, pruned_loss=0.0401, over 4786.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2072, pruned_loss=0.02868, over 972980.30 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 18:37:07,184 INFO [train.py:715] (7/8) Epoch 19, batch 25050, loss[loss=0.1393, simple_loss=0.211, pruned_loss=0.03375, over 4762.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2065, pruned_loss=0.02841, over 972494.31 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 18:37:46,489 INFO [train.py:715] (7/8) Epoch 19, batch 25100, loss[loss=0.1206, simple_loss=0.1945, pruned_loss=0.02338, over 4679.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2066, pruned_loss=0.02841, over 971275.37 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:38:26,083 INFO [train.py:715] (7/8) Epoch 19, batch 25150, loss[loss=0.1268, simple_loss=0.2097, pruned_loss=0.02193, over 4977.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02837, over 971158.09 frames.], batch size: 35, lr: 1.16e-04 +2022-05-09 18:39:05,721 INFO [train.py:715] (7/8) Epoch 19, batch 25200, loss[loss=0.1375, simple_loss=0.1977, pruned_loss=0.03864, over 4877.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2057, pruned_loss=0.02856, over 970857.90 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 18:39:44,331 INFO [train.py:715] (7/8) Epoch 19, batch 25250, loss[loss=0.163, simple_loss=0.2395, pruned_loss=0.04329, over 4758.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2066, pruned_loss=0.02875, over 970549.30 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 18:40:23,572 INFO [train.py:715] (7/8) Epoch 19, batch 25300, loss[loss=0.1429, simple_loss=0.2211, pruned_loss=0.03237, over 4842.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02876, over 969651.03 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:41:03,216 INFO [train.py:715] (7/8) Epoch 19, batch 25350, loss[loss=0.1474, simple_loss=0.2267, pruned_loss=0.03401, over 4850.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2068, pruned_loss=0.02911, over 971293.22 frames.], batch size: 34, lr: 1.16e-04 +2022-05-09 18:41:42,427 INFO [train.py:715] (7/8) Epoch 19, batch 25400, loss[loss=0.1188, simple_loss=0.1931, pruned_loss=0.0223, over 4759.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02873, over 971423.17 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 18:42:21,497 INFO [train.py:715] (7/8) Epoch 19, batch 25450, loss[loss=0.1266, simple_loss=0.2043, pruned_loss=0.0244, over 4973.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02874, over 971201.19 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:43:00,714 INFO [train.py:715] (7/8) Epoch 19, batch 25500, loss[loss=0.1333, simple_loss=0.2054, pruned_loss=0.03064, over 4839.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02819, over 971432.54 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:43:39,823 INFO [train.py:715] (7/8) Epoch 19, batch 25550, loss[loss=0.1605, simple_loss=0.2241, pruned_loss=0.04846, over 4860.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.02806, over 971627.94 frames.], batch size: 30, lr: 1.16e-04 +2022-05-09 18:44:18,030 INFO [train.py:715] (7/8) Epoch 19, batch 25600, loss[loss=0.1605, simple_loss=0.2291, pruned_loss=0.04595, over 4908.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.02811, over 971302.21 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 18:44:56,959 INFO [train.py:715] (7/8) Epoch 19, batch 25650, loss[loss=0.1572, simple_loss=0.2183, pruned_loss=0.04812, over 4783.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2053, pruned_loss=0.02824, over 971458.15 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 18:45:36,002 INFO [train.py:715] (7/8) Epoch 19, batch 25700, loss[loss=0.1558, simple_loss=0.235, pruned_loss=0.03829, over 4777.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.0283, over 972002.95 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 18:46:14,563 INFO [train.py:715] (7/8) Epoch 19, batch 25750, loss[loss=0.1601, simple_loss=0.2279, pruned_loss=0.04612, over 4803.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02834, over 972073.94 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:46:53,572 INFO [train.py:715] (7/8) Epoch 19, batch 25800, loss[loss=0.1281, simple_loss=0.201, pruned_loss=0.02765, over 4788.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02831, over 971527.85 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:47:32,968 INFO [train.py:715] (7/8) Epoch 19, batch 25850, loss[loss=0.1271, simple_loss=0.1945, pruned_loss=0.02982, over 4835.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2055, pruned_loss=0.0283, over 971170.90 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 18:48:12,298 INFO [train.py:715] (7/8) Epoch 19, batch 25900, loss[loss=0.1255, simple_loss=0.2069, pruned_loss=0.02203, over 4775.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02824, over 972160.40 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:48:50,879 INFO [train.py:715] (7/8) Epoch 19, batch 25950, loss[loss=0.1526, simple_loss=0.2301, pruned_loss=0.03754, over 4813.00 frames.], tot_loss[loss=0.1307, simple_loss=0.205, pruned_loss=0.02819, over 971811.99 frames.], batch size: 27, lr: 1.16e-04 +2022-05-09 18:49:30,502 INFO [train.py:715] (7/8) Epoch 19, batch 26000, loss[loss=0.1327, simple_loss=0.2041, pruned_loss=0.03066, over 4831.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2047, pruned_loss=0.02831, over 971638.87 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:50:10,474 INFO [train.py:715] (7/8) Epoch 19, batch 26050, loss[loss=0.1306, simple_loss=0.2087, pruned_loss=0.02618, over 4988.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2049, pruned_loss=0.02843, over 972328.13 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:50:49,156 INFO [train.py:715] (7/8) Epoch 19, batch 26100, loss[loss=0.1532, simple_loss=0.2254, pruned_loss=0.04057, over 4746.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2048, pruned_loss=0.0284, over 971680.66 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 18:51:28,555 INFO [train.py:715] (7/8) Epoch 19, batch 26150, loss[loss=0.1039, simple_loss=0.1862, pruned_loss=0.01082, over 4793.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2045, pruned_loss=0.02808, over 970520.35 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:52:07,555 INFO [train.py:715] (7/8) Epoch 19, batch 26200, loss[loss=0.1821, simple_loss=0.2489, pruned_loss=0.05769, over 4982.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2052, pruned_loss=0.02814, over 971015.07 frames.], batch size: 40, lr: 1.16e-04 +2022-05-09 18:52:47,077 INFO [train.py:715] (7/8) Epoch 19, batch 26250, loss[loss=0.1298, simple_loss=0.2041, pruned_loss=0.02775, over 4964.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2047, pruned_loss=0.02827, over 971498.44 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:53:25,458 INFO [train.py:715] (7/8) Epoch 19, batch 26300, loss[loss=0.1239, simple_loss=0.1961, pruned_loss=0.02585, over 4823.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2053, pruned_loss=0.02824, over 971455.00 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:54:04,839 INFO [train.py:715] (7/8) Epoch 19, batch 26350, loss[loss=0.1259, simple_loss=0.205, pruned_loss=0.02342, over 4828.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2049, pruned_loss=0.02822, over 971066.99 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 18:54:44,062 INFO [train.py:715] (7/8) Epoch 19, batch 26400, loss[loss=0.1656, simple_loss=0.2451, pruned_loss=0.04312, over 4867.00 frames.], tot_loss[loss=0.131, simple_loss=0.2059, pruned_loss=0.02811, over 971767.13 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 18:55:23,158 INFO [train.py:715] (7/8) Epoch 19, batch 26450, loss[loss=0.1184, simple_loss=0.1935, pruned_loss=0.02171, over 4807.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02823, over 972133.61 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 18:56:02,215 INFO [train.py:715] (7/8) Epoch 19, batch 26500, loss[loss=0.1132, simple_loss=0.1827, pruned_loss=0.02179, over 4838.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02831, over 971667.86 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 18:56:40,884 INFO [train.py:715] (7/8) Epoch 19, batch 26550, loss[loss=0.1241, simple_loss=0.2057, pruned_loss=0.02126, over 4958.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2053, pruned_loss=0.02823, over 971331.88 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 18:57:21,643 INFO [train.py:715] (7/8) Epoch 19, batch 26600, loss[loss=0.126, simple_loss=0.1964, pruned_loss=0.02781, over 4883.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02816, over 972063.32 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 18:58:02,781 INFO [train.py:715] (7/8) Epoch 19, batch 26650, loss[loss=0.1161, simple_loss=0.1817, pruned_loss=0.02518, over 4768.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2048, pruned_loss=0.02766, over 971991.50 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 18:58:41,694 INFO [train.py:715] (7/8) Epoch 19, batch 26700, loss[loss=0.1155, simple_loss=0.1901, pruned_loss=0.02045, over 4649.00 frames.], tot_loss[loss=0.1295, simple_loss=0.2043, pruned_loss=0.02734, over 971905.31 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 18:59:21,010 INFO [train.py:715] (7/8) Epoch 19, batch 26750, loss[loss=0.1471, simple_loss=0.2185, pruned_loss=0.03786, over 4923.00 frames.], tot_loss[loss=0.129, simple_loss=0.204, pruned_loss=0.027, over 972474.38 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:00:00,955 INFO [train.py:715] (7/8) Epoch 19, batch 26800, loss[loss=0.1193, simple_loss=0.1945, pruned_loss=0.02205, over 4846.00 frames.], tot_loss[loss=0.1287, simple_loss=0.2037, pruned_loss=0.02687, over 972229.35 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:00:41,177 INFO [train.py:715] (7/8) Epoch 19, batch 26850, loss[loss=0.1506, simple_loss=0.237, pruned_loss=0.03209, over 4872.00 frames.], tot_loss[loss=0.1297, simple_loss=0.2043, pruned_loss=0.02758, over 971876.96 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 19:01:20,365 INFO [train.py:715] (7/8) Epoch 19, batch 26900, loss[loss=0.1387, simple_loss=0.214, pruned_loss=0.03172, over 4754.00 frames.], tot_loss[loss=0.1292, simple_loss=0.2037, pruned_loss=0.02731, over 971638.09 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:02:00,253 INFO [train.py:715] (7/8) Epoch 19, batch 26950, loss[loss=0.1209, simple_loss=0.1975, pruned_loss=0.02213, over 4826.00 frames.], tot_loss[loss=0.1293, simple_loss=0.2038, pruned_loss=0.02739, over 971434.11 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:02:39,715 INFO [train.py:715] (7/8) Epoch 19, batch 27000, loss[loss=0.1414, simple_loss=0.2136, pruned_loss=0.03456, over 4886.00 frames.], tot_loss[loss=0.1294, simple_loss=0.204, pruned_loss=0.0274, over 971760.19 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:02:39,715 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 19:02:49,600 INFO [train.py:742] (7/8) Epoch 19, validation: loss=0.1047, simple_loss=0.1878, pruned_loss=0.0108, over 914524.00 frames. +2022-05-09 19:03:29,473 INFO [train.py:715] (7/8) Epoch 19, batch 27050, loss[loss=0.1405, simple_loss=0.2178, pruned_loss=0.0316, over 4790.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2053, pruned_loss=0.02826, over 972807.58 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 19:04:09,789 INFO [train.py:715] (7/8) Epoch 19, batch 27100, loss[loss=0.1267, simple_loss=0.1942, pruned_loss=0.02961, over 4740.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2051, pruned_loss=0.02796, over 973117.86 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 19:04:50,663 INFO [train.py:715] (7/8) Epoch 19, batch 27150, loss[loss=0.1374, simple_loss=0.2132, pruned_loss=0.03082, over 4900.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2044, pruned_loss=0.02767, over 973315.02 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 19:05:30,593 INFO [train.py:715] (7/8) Epoch 19, batch 27200, loss[loss=0.1514, simple_loss=0.2176, pruned_loss=0.04259, over 4980.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2044, pruned_loss=0.02787, over 972964.97 frames.], batch size: 28, lr: 1.16e-04 +2022-05-09 19:06:11,128 INFO [train.py:715] (7/8) Epoch 19, batch 27250, loss[loss=0.1251, simple_loss=0.1966, pruned_loss=0.02683, over 4942.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2052, pruned_loss=0.02848, over 972763.76 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:06:52,922 INFO [train.py:715] (7/8) Epoch 19, batch 27300, loss[loss=0.139, simple_loss=0.2272, pruned_loss=0.02545, over 4919.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02884, over 973740.53 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 19:07:33,649 INFO [train.py:715] (7/8) Epoch 19, batch 27350, loss[loss=0.1108, simple_loss=0.1813, pruned_loss=0.02017, over 4827.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02854, over 973014.32 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:08:14,910 INFO [train.py:715] (7/8) Epoch 19, batch 27400, loss[loss=0.1127, simple_loss=0.1836, pruned_loss=0.02087, over 4962.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2049, pruned_loss=0.02795, over 972853.70 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 19:08:54,857 INFO [train.py:715] (7/8) Epoch 19, batch 27450, loss[loss=0.1369, simple_loss=0.21, pruned_loss=0.03192, over 4971.00 frames.], tot_loss[loss=0.1303, simple_loss=0.205, pruned_loss=0.02783, over 972875.08 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:09:36,483 INFO [train.py:715] (7/8) Epoch 19, batch 27500, loss[loss=0.1413, simple_loss=0.2111, pruned_loss=0.03574, over 4700.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2049, pruned_loss=0.02826, over 972409.77 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:10:17,084 INFO [train.py:715] (7/8) Epoch 19, batch 27550, loss[loss=0.1524, simple_loss=0.232, pruned_loss=0.03636, over 4935.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2049, pruned_loss=0.02834, over 971733.47 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:10:57,714 INFO [train.py:715] (7/8) Epoch 19, batch 27600, loss[loss=0.1255, simple_loss=0.2023, pruned_loss=0.0244, over 4764.00 frames.], tot_loss[loss=0.131, simple_loss=0.2052, pruned_loss=0.02835, over 971570.74 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:11:38,778 INFO [train.py:715] (7/8) Epoch 19, batch 27650, loss[loss=0.1405, simple_loss=0.216, pruned_loss=0.03252, over 4746.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2062, pruned_loss=0.02886, over 971368.53 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:12:19,403 INFO [train.py:715] (7/8) Epoch 19, batch 27700, loss[loss=0.1531, simple_loss=0.2296, pruned_loss=0.03828, over 4960.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2054, pruned_loss=0.02838, over 972215.91 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:13:00,035 INFO [train.py:715] (7/8) Epoch 19, batch 27750, loss[loss=0.1403, simple_loss=0.2161, pruned_loss=0.03225, over 4869.00 frames.], tot_loss[loss=0.131, simple_loss=0.2053, pruned_loss=0.02838, over 972702.00 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 19:13:40,111 INFO [train.py:715] (7/8) Epoch 19, batch 27800, loss[loss=0.1239, simple_loss=0.2002, pruned_loss=0.02379, over 4884.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02876, over 972848.66 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 19:14:21,134 INFO [train.py:715] (7/8) Epoch 19, batch 27850, loss[loss=0.1544, simple_loss=0.226, pruned_loss=0.0414, over 4781.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2072, pruned_loss=0.02892, over 972122.65 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 19:15:01,161 INFO [train.py:715] (7/8) Epoch 19, batch 27900, loss[loss=0.1323, simple_loss=0.2102, pruned_loss=0.02717, over 4927.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02867, over 971671.02 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 19:15:41,281 INFO [train.py:715] (7/8) Epoch 19, batch 27950, loss[loss=0.1138, simple_loss=0.1868, pruned_loss=0.02039, over 4969.00 frames.], tot_loss[loss=0.1308, simple_loss=0.205, pruned_loss=0.02829, over 971811.32 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:16:21,260 INFO [train.py:715] (7/8) Epoch 19, batch 28000, loss[loss=0.1375, simple_loss=0.2141, pruned_loss=0.03048, over 4867.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02862, over 971967.16 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 19:17:02,124 INFO [train.py:715] (7/8) Epoch 19, batch 28050, loss[loss=0.1283, simple_loss=0.2147, pruned_loss=0.02095, over 4836.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2053, pruned_loss=0.02863, over 971443.85 frames.], batch size: 30, lr: 1.16e-04 +2022-05-09 19:17:42,530 INFO [train.py:715] (7/8) Epoch 19, batch 28100, loss[loss=0.1281, simple_loss=0.199, pruned_loss=0.02855, over 4790.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2058, pruned_loss=0.02848, over 971395.94 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:18:22,477 INFO [train.py:715] (7/8) Epoch 19, batch 28150, loss[loss=0.1469, simple_loss=0.2218, pruned_loss=0.03606, over 4762.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2069, pruned_loss=0.02894, over 971495.66 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 19:19:02,905 INFO [train.py:715] (7/8) Epoch 19, batch 28200, loss[loss=0.146, simple_loss=0.2157, pruned_loss=0.0381, over 4776.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02861, over 971479.82 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 19:19:42,614 INFO [train.py:715] (7/8) Epoch 19, batch 28250, loss[loss=0.1396, simple_loss=0.2224, pruned_loss=0.02836, over 4978.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2066, pruned_loss=0.02835, over 970649.42 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 19:20:22,523 INFO [train.py:715] (7/8) Epoch 19, batch 28300, loss[loss=0.1333, simple_loss=0.2111, pruned_loss=0.02775, over 4805.00 frames.], tot_loss[loss=0.1311, simple_loss=0.206, pruned_loss=0.02804, over 970285.36 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 19:21:02,196 INFO [train.py:715] (7/8) Epoch 19, batch 28350, loss[loss=0.1415, simple_loss=0.2172, pruned_loss=0.0329, over 4818.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2063, pruned_loss=0.02836, over 971106.09 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:21:42,215 INFO [train.py:715] (7/8) Epoch 19, batch 28400, loss[loss=0.1048, simple_loss=0.1799, pruned_loss=0.01482, over 4946.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2051, pruned_loss=0.02785, over 970674.70 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:22:22,330 INFO [train.py:715] (7/8) Epoch 19, batch 28450, loss[loss=0.1421, simple_loss=0.2105, pruned_loss=0.03688, over 4963.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.0286, over 970843.54 frames.], batch size: 39, lr: 1.16e-04 +2022-05-09 19:23:02,157 INFO [train.py:715] (7/8) Epoch 19, batch 28500, loss[loss=0.1347, simple_loss=0.2121, pruned_loss=0.0287, over 4933.00 frames.], tot_loss[loss=0.1316, simple_loss=0.206, pruned_loss=0.02857, over 971098.45 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:23:42,831 INFO [train.py:715] (7/8) Epoch 19, batch 28550, loss[loss=0.1321, simple_loss=0.1964, pruned_loss=0.03387, over 4987.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2061, pruned_loss=0.02881, over 970987.07 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 19:24:22,312 INFO [train.py:715] (7/8) Epoch 19, batch 28600, loss[loss=0.1193, simple_loss=0.1932, pruned_loss=0.02265, over 4844.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2065, pruned_loss=0.02869, over 971429.69 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 19:25:02,353 INFO [train.py:715] (7/8) Epoch 19, batch 28650, loss[loss=0.1417, simple_loss=0.2123, pruned_loss=0.03554, over 4750.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2057, pruned_loss=0.02822, over 970911.41 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 19:25:43,122 INFO [train.py:715] (7/8) Epoch 19, batch 28700, loss[loss=0.1537, simple_loss=0.219, pruned_loss=0.04415, over 4790.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2052, pruned_loss=0.02797, over 971007.69 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 19:26:22,655 INFO [train.py:715] (7/8) Epoch 19, batch 28750, loss[loss=0.1246, simple_loss=0.1993, pruned_loss=0.02493, over 4633.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.02839, over 970976.87 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:27:02,562 INFO [train.py:715] (7/8) Epoch 19, batch 28800, loss[loss=0.133, simple_loss=0.2018, pruned_loss=0.03216, over 4766.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02851, over 971544.76 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 19:27:41,939 INFO [train.py:715] (7/8) Epoch 19, batch 28850, loss[loss=0.1066, simple_loss=0.1812, pruned_loss=0.01601, over 4752.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2067, pruned_loss=0.02911, over 971407.21 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 19:28:21,328 INFO [train.py:715] (7/8) Epoch 19, batch 28900, loss[loss=0.1184, simple_loss=0.1948, pruned_loss=0.02104, over 4950.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2062, pruned_loss=0.02851, over 972479.28 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:28:59,437 INFO [train.py:715] (7/8) Epoch 19, batch 28950, loss[loss=0.1258, simple_loss=0.1979, pruned_loss=0.02691, over 4900.00 frames.], tot_loss[loss=0.1322, simple_loss=0.207, pruned_loss=0.02866, over 971557.77 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 19:29:38,327 INFO [train.py:715] (7/8) Epoch 19, batch 29000, loss[loss=0.1595, simple_loss=0.2318, pruned_loss=0.04359, over 4848.00 frames.], tot_loss[loss=0.132, simple_loss=0.2066, pruned_loss=0.02875, over 971638.88 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 19:30:17,559 INFO [train.py:715] (7/8) Epoch 19, batch 29050, loss[loss=0.1432, simple_loss=0.2196, pruned_loss=0.03336, over 4766.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2064, pruned_loss=0.02876, over 971515.72 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 19:30:56,439 INFO [train.py:715] (7/8) Epoch 19, batch 29100, loss[loss=0.1472, simple_loss=0.2139, pruned_loss=0.04027, over 4681.00 frames.], tot_loss[loss=0.132, simple_loss=0.2063, pruned_loss=0.02881, over 971365.45 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:31:35,384 INFO [train.py:715] (7/8) Epoch 19, batch 29150, loss[loss=0.1245, simple_loss=0.1942, pruned_loss=0.0274, over 4943.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02881, over 972611.44 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 19:32:14,165 INFO [train.py:715] (7/8) Epoch 19, batch 29200, loss[loss=0.1286, simple_loss=0.1963, pruned_loss=0.03048, over 4785.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02895, over 973247.75 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 19:32:53,532 INFO [train.py:715] (7/8) Epoch 19, batch 29250, loss[loss=0.1347, simple_loss=0.2072, pruned_loss=0.0311, over 4644.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2055, pruned_loss=0.02859, over 973740.47 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:33:32,158 INFO [train.py:715] (7/8) Epoch 19, batch 29300, loss[loss=0.1148, simple_loss=0.1905, pruned_loss=0.01949, over 4970.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2059, pruned_loss=0.02853, over 974602.89 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 19:34:11,678 INFO [train.py:715] (7/8) Epoch 19, batch 29350, loss[loss=0.1104, simple_loss=0.194, pruned_loss=0.01338, over 4933.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02836, over 974884.26 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 19:34:50,605 INFO [train.py:715] (7/8) Epoch 19, batch 29400, loss[loss=0.1628, simple_loss=0.2372, pruned_loss=0.0442, over 4923.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2063, pruned_loss=0.02824, over 974109.53 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:35:29,741 INFO [train.py:715] (7/8) Epoch 19, batch 29450, loss[loss=0.1241, simple_loss=0.1984, pruned_loss=0.0249, over 4872.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2067, pruned_loss=0.02838, over 974119.71 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 19:36:09,172 INFO [train.py:715] (7/8) Epoch 19, batch 29500, loss[loss=0.1693, simple_loss=0.2347, pruned_loss=0.05197, over 4833.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2076, pruned_loss=0.02872, over 972552.74 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:36:48,560 INFO [train.py:715] (7/8) Epoch 19, batch 29550, loss[loss=0.136, simple_loss=0.2146, pruned_loss=0.02866, over 4837.00 frames.], tot_loss[loss=0.1327, simple_loss=0.2075, pruned_loss=0.02894, over 972180.53 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 19:37:28,170 INFO [train.py:715] (7/8) Epoch 19, batch 29600, loss[loss=0.1042, simple_loss=0.1742, pruned_loss=0.0171, over 4911.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2074, pruned_loss=0.02875, over 972012.00 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:38:07,300 INFO [train.py:715] (7/8) Epoch 19, batch 29650, loss[loss=0.156, simple_loss=0.2389, pruned_loss=0.03658, over 4705.00 frames.], tot_loss[loss=0.1323, simple_loss=0.2074, pruned_loss=0.02865, over 971575.31 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:38:47,452 INFO [train.py:715] (7/8) Epoch 19, batch 29700, loss[loss=0.1195, simple_loss=0.2036, pruned_loss=0.01772, over 4814.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2069, pruned_loss=0.02846, over 971537.79 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:39:26,743 INFO [train.py:715] (7/8) Epoch 19, batch 29750, loss[loss=0.109, simple_loss=0.1828, pruned_loss=0.01754, over 4906.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2066, pruned_loss=0.02807, over 971547.31 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 19:40:06,091 INFO [train.py:715] (7/8) Epoch 19, batch 29800, loss[loss=0.1024, simple_loss=0.1758, pruned_loss=0.01455, over 4975.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2053, pruned_loss=0.02752, over 972114.50 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 19:40:45,395 INFO [train.py:715] (7/8) Epoch 19, batch 29850, loss[loss=0.1411, simple_loss=0.2179, pruned_loss=0.03221, over 4952.00 frames.], tot_loss[loss=0.13, simple_loss=0.2047, pruned_loss=0.02764, over 972878.84 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 19:41:24,808 INFO [train.py:715] (7/8) Epoch 19, batch 29900, loss[loss=0.1099, simple_loss=0.186, pruned_loss=0.01687, over 4915.00 frames.], tot_loss[loss=0.1299, simple_loss=0.2047, pruned_loss=0.02757, over 972415.39 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:42:04,770 INFO [train.py:715] (7/8) Epoch 19, batch 29950, loss[loss=0.125, simple_loss=0.1903, pruned_loss=0.02991, over 4833.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2051, pruned_loss=0.02766, over 972528.28 frames.], batch size: 30, lr: 1.16e-04 +2022-05-09 19:42:43,618 INFO [train.py:715] (7/8) Epoch 19, batch 30000, loss[loss=0.1244, simple_loss=0.1873, pruned_loss=0.03077, over 4828.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2053, pruned_loss=0.02805, over 972678.25 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:42:43,618 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 19:42:53,508 INFO [train.py:742] (7/8) Epoch 19, validation: loss=0.1045, simple_loss=0.1877, pruned_loss=0.01067, over 914524.00 frames. +2022-05-09 19:43:32,626 INFO [train.py:715] (7/8) Epoch 19, batch 30050, loss[loss=0.1136, simple_loss=0.1915, pruned_loss=0.01785, over 4860.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.02807, over 972312.24 frames.], batch size: 20, lr: 1.16e-04 +2022-05-09 19:44:12,193 INFO [train.py:715] (7/8) Epoch 19, batch 30100, loss[loss=0.1085, simple_loss=0.1732, pruned_loss=0.02188, over 4741.00 frames.], tot_loss[loss=0.1318, simple_loss=0.206, pruned_loss=0.02873, over 971564.68 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 19:44:51,312 INFO [train.py:715] (7/8) Epoch 19, batch 30150, loss[loss=0.1384, simple_loss=0.2119, pruned_loss=0.03247, over 4962.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2058, pruned_loss=0.02872, over 970912.57 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:45:31,086 INFO [train.py:715] (7/8) Epoch 19, batch 30200, loss[loss=0.1032, simple_loss=0.1815, pruned_loss=0.01248, over 4779.00 frames.], tot_loss[loss=0.1321, simple_loss=0.2063, pruned_loss=0.02891, over 970698.96 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:46:09,573 INFO [train.py:715] (7/8) Epoch 19, batch 30250, loss[loss=0.1584, simple_loss=0.222, pruned_loss=0.04735, over 4861.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2063, pruned_loss=0.02874, over 972069.76 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 19:46:48,896 INFO [train.py:715] (7/8) Epoch 19, batch 30300, loss[loss=0.121, simple_loss=0.194, pruned_loss=0.024, over 4882.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2053, pruned_loss=0.02855, over 971386.69 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 19:47:28,484 INFO [train.py:715] (7/8) Epoch 19, batch 30350, loss[loss=0.1134, simple_loss=0.1849, pruned_loss=0.02095, over 4971.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2051, pruned_loss=0.02836, over 972308.17 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:48:08,092 INFO [train.py:715] (7/8) Epoch 19, batch 30400, loss[loss=0.1129, simple_loss=0.1876, pruned_loss=0.01908, over 4942.00 frames.], tot_loss[loss=0.13, simple_loss=0.2046, pruned_loss=0.02772, over 971838.45 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 19:48:47,854 INFO [train.py:715] (7/8) Epoch 19, batch 30450, loss[loss=0.1149, simple_loss=0.19, pruned_loss=0.01989, over 4958.00 frames.], tot_loss[loss=0.1304, simple_loss=0.205, pruned_loss=0.02794, over 972459.03 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 19:49:26,659 INFO [train.py:715] (7/8) Epoch 19, batch 30500, loss[loss=0.1523, simple_loss=0.228, pruned_loss=0.03828, over 4841.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2057, pruned_loss=0.02781, over 972572.18 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 19:50:06,595 INFO [train.py:715] (7/8) Epoch 19, batch 30550, loss[loss=0.1291, simple_loss=0.2121, pruned_loss=0.02307, over 4971.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2062, pruned_loss=0.02813, over 972989.63 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 19:50:45,751 INFO [train.py:715] (7/8) Epoch 19, batch 30600, loss[loss=0.106, simple_loss=0.1766, pruned_loss=0.01773, over 4768.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2058, pruned_loss=0.028, over 972844.24 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 19:51:25,830 INFO [train.py:715] (7/8) Epoch 19, batch 30650, loss[loss=0.1118, simple_loss=0.177, pruned_loss=0.02326, over 4821.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.02793, over 973068.36 frames.], batch size: 13, lr: 1.16e-04 +2022-05-09 19:52:05,612 INFO [train.py:715] (7/8) Epoch 19, batch 30700, loss[loss=0.1284, simple_loss=0.2061, pruned_loss=0.02532, over 4819.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.02784, over 972500.35 frames.], batch size: 26, lr: 1.16e-04 +2022-05-09 19:52:45,191 INFO [train.py:715] (7/8) Epoch 19, batch 30750, loss[loss=0.1317, simple_loss=0.212, pruned_loss=0.02571, over 4925.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2058, pruned_loss=0.02819, over 972626.97 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 19:53:25,713 INFO [train.py:715] (7/8) Epoch 19, batch 30800, loss[loss=0.134, simple_loss=0.205, pruned_loss=0.03151, over 4958.00 frames.], tot_loss[loss=0.1302, simple_loss=0.2048, pruned_loss=0.0278, over 972460.12 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 19:54:05,659 INFO [train.py:715] (7/8) Epoch 19, batch 30850, loss[loss=0.126, simple_loss=0.1874, pruned_loss=0.03235, over 4775.00 frames.], tot_loss[loss=0.1308, simple_loss=0.2055, pruned_loss=0.02805, over 971656.52 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 19:54:46,448 INFO [train.py:715] (7/8) Epoch 19, batch 30900, loss[loss=0.1235, simple_loss=0.1839, pruned_loss=0.03154, over 4776.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2052, pruned_loss=0.02781, over 972114.85 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 19:55:26,515 INFO [train.py:715] (7/8) Epoch 19, batch 30950, loss[loss=0.1553, simple_loss=0.226, pruned_loss=0.04228, over 4967.00 frames.], tot_loss[loss=0.131, simple_loss=0.2054, pruned_loss=0.02824, over 972712.30 frames.], batch size: 31, lr: 1.16e-04 +2022-05-09 19:56:07,125 INFO [train.py:715] (7/8) Epoch 19, batch 31000, loss[loss=0.1298, simple_loss=0.2049, pruned_loss=0.02736, over 4756.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2053, pruned_loss=0.02845, over 972062.21 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 19:56:47,790 INFO [train.py:715] (7/8) Epoch 19, batch 31050, loss[loss=0.1277, simple_loss=0.1991, pruned_loss=0.0282, over 4761.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2057, pruned_loss=0.02862, over 971501.94 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 19:57:28,117 INFO [train.py:715] (7/8) Epoch 19, batch 31100, loss[loss=0.1278, simple_loss=0.207, pruned_loss=0.02435, over 4955.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02838, over 971882.84 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 19:58:08,818 INFO [train.py:715] (7/8) Epoch 19, batch 31150, loss[loss=0.1216, simple_loss=0.1967, pruned_loss=0.02325, over 4831.00 frames.], tot_loss[loss=0.132, simple_loss=0.2065, pruned_loss=0.02873, over 971899.17 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 19:58:49,194 INFO [train.py:715] (7/8) Epoch 19, batch 31200, loss[loss=0.1297, simple_loss=0.196, pruned_loss=0.03164, over 4842.00 frames.], tot_loss[loss=0.1311, simple_loss=0.2056, pruned_loss=0.02827, over 972593.19 frames.], batch size: 30, lr: 1.16e-04 +2022-05-09 19:59:30,191 INFO [train.py:715] (7/8) Epoch 19, batch 31250, loss[loss=0.1128, simple_loss=0.1874, pruned_loss=0.01909, over 4779.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2052, pruned_loss=0.0281, over 971860.43 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 20:00:09,925 INFO [train.py:715] (7/8) Epoch 19, batch 31300, loss[loss=0.1126, simple_loss=0.1903, pruned_loss=0.01747, over 4957.00 frames.], tot_loss[loss=0.1293, simple_loss=0.2039, pruned_loss=0.02736, over 972161.01 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 20:00:50,569 INFO [train.py:715] (7/8) Epoch 19, batch 31350, loss[loss=0.1205, simple_loss=0.1993, pruned_loss=0.0209, over 4905.00 frames.], tot_loss[loss=0.1296, simple_loss=0.2044, pruned_loss=0.02741, over 972867.15 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:01:31,163 INFO [train.py:715] (7/8) Epoch 19, batch 31400, loss[loss=0.1225, simple_loss=0.2021, pruned_loss=0.02149, over 4923.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2061, pruned_loss=0.02827, over 972747.03 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 20:02:11,467 INFO [train.py:715] (7/8) Epoch 19, batch 31450, loss[loss=0.122, simple_loss=0.2078, pruned_loss=0.01815, over 4950.00 frames.], tot_loss[loss=0.131, simple_loss=0.2056, pruned_loss=0.02826, over 971430.16 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 20:02:52,732 INFO [train.py:715] (7/8) Epoch 19, batch 31500, loss[loss=0.1435, simple_loss=0.2167, pruned_loss=0.03512, over 4791.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2058, pruned_loss=0.02805, over 971760.94 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 20:03:32,849 INFO [train.py:715] (7/8) Epoch 19, batch 31550, loss[loss=0.1224, simple_loss=0.1953, pruned_loss=0.02479, over 4766.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2057, pruned_loss=0.02833, over 972142.71 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:04:13,430 INFO [train.py:715] (7/8) Epoch 19, batch 31600, loss[loss=0.1397, simple_loss=0.2095, pruned_loss=0.03499, over 4860.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02822, over 971644.29 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 20:04:53,554 INFO [train.py:715] (7/8) Epoch 19, batch 31650, loss[loss=0.1272, simple_loss=0.2125, pruned_loss=0.02096, over 4930.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2055, pruned_loss=0.02795, over 972026.92 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 20:05:33,955 INFO [train.py:715] (7/8) Epoch 19, batch 31700, loss[loss=0.1324, simple_loss=0.2138, pruned_loss=0.02554, over 4975.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2055, pruned_loss=0.02813, over 972713.11 frames.], batch size: 31, lr: 1.16e-04 +2022-05-09 20:06:14,405 INFO [train.py:715] (7/8) Epoch 19, batch 31750, loss[loss=0.1542, simple_loss=0.2241, pruned_loss=0.04219, over 4787.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02835, over 972415.05 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 20:06:54,585 INFO [train.py:715] (7/8) Epoch 19, batch 31800, loss[loss=0.1293, simple_loss=0.2122, pruned_loss=0.0232, over 4983.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.02818, over 972342.04 frames.], batch size: 25, lr: 1.16e-04 +2022-05-09 20:07:35,678 INFO [train.py:715] (7/8) Epoch 19, batch 31850, loss[loss=0.1026, simple_loss=0.1716, pruned_loss=0.0168, over 4800.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02848, over 971603.30 frames.], batch size: 24, lr: 1.16e-04 +2022-05-09 20:08:15,979 INFO [train.py:715] (7/8) Epoch 19, batch 31900, loss[loss=0.1231, simple_loss=0.2027, pruned_loss=0.02177, over 4934.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02838, over 972542.24 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 20:08:56,574 INFO [train.py:715] (7/8) Epoch 19, batch 31950, loss[loss=0.1264, simple_loss=0.193, pruned_loss=0.02992, over 4967.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2057, pruned_loss=0.02871, over 973284.17 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 20:09:36,646 INFO [train.py:715] (7/8) Epoch 19, batch 32000, loss[loss=0.1208, simple_loss=0.2004, pruned_loss=0.02061, over 4883.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2059, pruned_loss=0.02825, over 972781.00 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:10:16,976 INFO [train.py:715] (7/8) Epoch 19, batch 32050, loss[loss=0.1401, simple_loss=0.2142, pruned_loss=0.03296, over 4930.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2051, pruned_loss=0.02806, over 973073.38 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 20:10:57,302 INFO [train.py:715] (7/8) Epoch 19, batch 32100, loss[loss=0.1194, simple_loss=0.1947, pruned_loss=0.02206, over 4877.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02859, over 973185.19 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:11:37,104 INFO [train.py:715] (7/8) Epoch 19, batch 32150, loss[loss=0.1269, simple_loss=0.1854, pruned_loss=0.0342, over 4783.00 frames.], tot_loss[loss=0.132, simple_loss=0.2062, pruned_loss=0.02894, over 972700.52 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 20:12:18,364 INFO [train.py:715] (7/8) Epoch 19, batch 32200, loss[loss=0.1428, simple_loss=0.212, pruned_loss=0.03683, over 4890.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2058, pruned_loss=0.02863, over 972102.54 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 20:12:58,109 INFO [train.py:715] (7/8) Epoch 19, batch 32250, loss[loss=0.1362, simple_loss=0.2139, pruned_loss=0.02929, over 4819.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2059, pruned_loss=0.02835, over 971941.53 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 20:13:38,492 INFO [train.py:715] (7/8) Epoch 19, batch 32300, loss[loss=0.1246, simple_loss=0.2026, pruned_loss=0.02328, over 4905.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2058, pruned_loss=0.02832, over 972601.93 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:14:19,667 INFO [train.py:715] (7/8) Epoch 19, batch 32350, loss[loss=0.1311, simple_loss=0.2079, pruned_loss=0.02712, over 4891.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.02809, over 973918.46 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 20:15:00,206 INFO [train.py:715] (7/8) Epoch 19, batch 32400, loss[loss=0.1144, simple_loss=0.1867, pruned_loss=0.02104, over 4916.00 frames.], tot_loss[loss=0.131, simple_loss=0.206, pruned_loss=0.02801, over 973449.64 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 20:15:40,802 INFO [train.py:715] (7/8) Epoch 19, batch 32450, loss[loss=0.1367, simple_loss=0.2139, pruned_loss=0.02976, over 4923.00 frames.], tot_loss[loss=0.1317, simple_loss=0.207, pruned_loss=0.02819, over 974077.06 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 20:16:20,798 INFO [train.py:715] (7/8) Epoch 19, batch 32500, loss[loss=0.1348, simple_loss=0.2095, pruned_loss=0.03003, over 4943.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2076, pruned_loss=0.02796, over 973553.12 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 20:17:01,568 INFO [train.py:715] (7/8) Epoch 19, batch 32550, loss[loss=0.1097, simple_loss=0.1837, pruned_loss=0.01787, over 4762.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2073, pruned_loss=0.02773, over 972923.56 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:17:41,581 INFO [train.py:715] (7/8) Epoch 19, batch 32600, loss[loss=0.1314, simple_loss=0.1976, pruned_loss=0.03262, over 4932.00 frames.], tot_loss[loss=0.1312, simple_loss=0.2069, pruned_loss=0.02772, over 972986.18 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 20:18:21,658 INFO [train.py:715] (7/8) Epoch 19, batch 32650, loss[loss=0.1187, simple_loss=0.19, pruned_loss=0.02373, over 4904.00 frames.], tot_loss[loss=0.1313, simple_loss=0.2069, pruned_loss=0.0279, over 972123.91 frames.], batch size: 22, lr: 1.16e-04 +2022-05-09 20:19:02,302 INFO [train.py:715] (7/8) Epoch 19, batch 32700, loss[loss=0.1234, simple_loss=0.1849, pruned_loss=0.03092, over 4876.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2068, pruned_loss=0.02838, over 971939.44 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 20:19:42,121 INFO [train.py:715] (7/8) Epoch 19, batch 32750, loss[loss=0.1392, simple_loss=0.2118, pruned_loss=0.03324, over 4797.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2074, pruned_loss=0.02884, over 971632.47 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 20:20:21,840 INFO [train.py:715] (7/8) Epoch 19, batch 32800, loss[loss=0.1201, simple_loss=0.1895, pruned_loss=0.02537, over 4787.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2067, pruned_loss=0.02839, over 971806.86 frames.], batch size: 12, lr: 1.16e-04 +2022-05-09 20:21:00,656 INFO [train.py:715] (7/8) Epoch 19, batch 32850, loss[loss=0.1237, simple_loss=0.1997, pruned_loss=0.02385, over 4945.00 frames.], tot_loss[loss=0.1324, simple_loss=0.2073, pruned_loss=0.02875, over 971892.78 frames.], batch size: 21, lr: 1.16e-04 +2022-05-09 20:21:39,683 INFO [train.py:715] (7/8) Epoch 19, batch 32900, loss[loss=0.181, simple_loss=0.2604, pruned_loss=0.05077, over 4937.00 frames.], tot_loss[loss=0.1322, simple_loss=0.2071, pruned_loss=0.02868, over 972249.87 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 20:22:18,347 INFO [train.py:715] (7/8) Epoch 19, batch 32950, loss[loss=0.1241, simple_loss=0.1972, pruned_loss=0.02549, over 4939.00 frames.], tot_loss[loss=0.1332, simple_loss=0.2078, pruned_loss=0.02929, over 971944.87 frames.], batch size: 23, lr: 1.16e-04 +2022-05-09 20:22:57,661 INFO [train.py:715] (7/8) Epoch 19, batch 33000, loss[loss=0.1091, simple_loss=0.1832, pruned_loss=0.01747, over 4788.00 frames.], tot_loss[loss=0.1322, simple_loss=0.207, pruned_loss=0.02869, over 971263.51 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 20:22:57,662 INFO [train.py:733] (7/8) Computing validation loss +2022-05-09 20:23:07,492 INFO [train.py:742] (7/8) Epoch 19, validation: loss=0.1048, simple_loss=0.1878, pruned_loss=0.01088, over 914524.00 frames. +2022-05-09 20:23:46,770 INFO [train.py:715] (7/8) Epoch 19, batch 33050, loss[loss=0.1242, simple_loss=0.1952, pruned_loss=0.02663, over 4885.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2066, pruned_loss=0.02864, over 971775.91 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 20:24:26,212 INFO [train.py:715] (7/8) Epoch 19, batch 33100, loss[loss=0.1289, simple_loss=0.2087, pruned_loss=0.02451, over 4799.00 frames.], tot_loss[loss=0.131, simple_loss=0.2055, pruned_loss=0.02827, over 971862.31 frames.], batch size: 17, lr: 1.16e-04 +2022-05-09 20:25:05,027 INFO [train.py:715] (7/8) Epoch 19, batch 33150, loss[loss=0.1288, simple_loss=0.2016, pruned_loss=0.02803, over 4871.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2065, pruned_loss=0.02858, over 972180.62 frames.], batch size: 32, lr: 1.16e-04 +2022-05-09 20:25:44,209 INFO [train.py:715] (7/8) Epoch 19, batch 33200, loss[loss=0.1383, simple_loss=0.2184, pruned_loss=0.0291, over 4898.00 frames.], tot_loss[loss=0.1315, simple_loss=0.2061, pruned_loss=0.02847, over 972786.21 frames.], batch size: 19, lr: 1.16e-04 +2022-05-09 20:26:23,768 INFO [train.py:715] (7/8) Epoch 19, batch 33250, loss[loss=0.1368, simple_loss=0.2169, pruned_loss=0.02837, over 4774.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2056, pruned_loss=0.02812, over 972059.40 frames.], batch size: 14, lr: 1.16e-04 +2022-05-09 20:27:03,191 INFO [train.py:715] (7/8) Epoch 19, batch 33300, loss[loss=0.1445, simple_loss=0.1999, pruned_loss=0.04456, over 4692.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2064, pruned_loss=0.02851, over 972279.92 frames.], batch size: 15, lr: 1.16e-04 +2022-05-09 20:27:42,910 INFO [train.py:715] (7/8) Epoch 19, batch 33350, loss[loss=0.09858, simple_loss=0.1752, pruned_loss=0.01099, over 4928.00 frames.], tot_loss[loss=0.131, simple_loss=0.2057, pruned_loss=0.02816, over 972765.16 frames.], batch size: 29, lr: 1.16e-04 +2022-05-09 20:28:22,075 INFO [train.py:715] (7/8) Epoch 19, batch 33400, loss[loss=0.1236, simple_loss=0.1951, pruned_loss=0.02602, over 4774.00 frames.], tot_loss[loss=0.1304, simple_loss=0.2052, pruned_loss=0.02784, over 972151.56 frames.], batch size: 18, lr: 1.16e-04 +2022-05-09 20:29:01,053 INFO [train.py:715] (7/8) Epoch 19, batch 33450, loss[loss=0.1451, simple_loss=0.2214, pruned_loss=0.03443, over 4953.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2051, pruned_loss=0.02751, over 971655.36 frames.], batch size: 39, lr: 1.16e-04 +2022-05-09 20:29:40,021 INFO [train.py:715] (7/8) Epoch 19, batch 33500, loss[loss=0.1746, simple_loss=0.2422, pruned_loss=0.05353, over 4751.00 frames.], tot_loss[loss=0.1309, simple_loss=0.206, pruned_loss=0.02794, over 971625.79 frames.], batch size: 16, lr: 1.16e-04 +2022-05-09 20:30:18,904 INFO [train.py:715] (7/8) Epoch 19, batch 33550, loss[loss=0.1176, simple_loss=0.2019, pruned_loss=0.01667, over 4755.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2062, pruned_loss=0.02778, over 971245.38 frames.], batch size: 19, lr: 1.15e-04 +2022-05-09 20:30:58,230 INFO [train.py:715] (7/8) Epoch 19, batch 33600, loss[loss=0.148, simple_loss=0.2207, pruned_loss=0.03763, over 4881.00 frames.], tot_loss[loss=0.131, simple_loss=0.2062, pruned_loss=0.02791, over 971449.86 frames.], batch size: 16, lr: 1.15e-04 +2022-05-09 20:31:37,218 INFO [train.py:715] (7/8) Epoch 19, batch 33650, loss[loss=0.121, simple_loss=0.1965, pruned_loss=0.0227, over 4957.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2058, pruned_loss=0.02797, over 971578.54 frames.], batch size: 24, lr: 1.15e-04 +2022-05-09 20:32:16,615 INFO [train.py:715] (7/8) Epoch 19, batch 33700, loss[loss=0.1326, simple_loss=0.2253, pruned_loss=0.01994, over 4742.00 frames.], tot_loss[loss=0.1309, simple_loss=0.2059, pruned_loss=0.02792, over 971789.87 frames.], batch size: 16, lr: 1.15e-04 +2022-05-09 20:32:55,332 INFO [train.py:715] (7/8) Epoch 19, batch 33750, loss[loss=0.1413, simple_loss=0.2185, pruned_loss=0.03205, over 4885.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2052, pruned_loss=0.02784, over 972097.37 frames.], batch size: 16, lr: 1.15e-04 +2022-05-09 20:33:34,124 INFO [train.py:715] (7/8) Epoch 19, batch 33800, loss[loss=0.1242, simple_loss=0.2073, pruned_loss=0.02056, over 4952.00 frames.], tot_loss[loss=0.1306, simple_loss=0.2053, pruned_loss=0.02801, over 972153.19 frames.], batch size: 21, lr: 1.15e-04 +2022-05-09 20:34:12,732 INFO [train.py:715] (7/8) Epoch 19, batch 33850, loss[loss=0.1181, simple_loss=0.1891, pruned_loss=0.02361, over 4990.00 frames.], tot_loss[loss=0.1307, simple_loss=0.2056, pruned_loss=0.02792, over 971495.91 frames.], batch size: 14, lr: 1.15e-04 +2022-05-09 20:34:51,527 INFO [train.py:715] (7/8) Epoch 19, batch 33900, loss[loss=0.1442, simple_loss=0.2129, pruned_loss=0.03773, over 4892.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2065, pruned_loss=0.02813, over 970981.99 frames.], batch size: 18, lr: 1.15e-04 +2022-05-09 20:35:31,244 INFO [train.py:715] (7/8) Epoch 19, batch 33950, loss[loss=0.139, simple_loss=0.2145, pruned_loss=0.03173, over 4888.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2067, pruned_loss=0.02843, over 971502.60 frames.], batch size: 19, lr: 1.15e-04 +2022-05-09 20:36:10,893 INFO [train.py:715] (7/8) Epoch 19, batch 34000, loss[loss=0.1256, simple_loss=0.2025, pruned_loss=0.02439, over 4813.00 frames.], tot_loss[loss=0.1318, simple_loss=0.207, pruned_loss=0.02829, over 971622.59 frames.], batch size: 27, lr: 1.15e-04 +2022-05-09 20:36:50,180 INFO [train.py:715] (7/8) Epoch 19, batch 34050, loss[loss=0.1178, simple_loss=0.1886, pruned_loss=0.02356, over 4776.00 frames.], tot_loss[loss=0.131, simple_loss=0.2059, pruned_loss=0.02806, over 971941.95 frames.], batch size: 17, lr: 1.15e-04 +2022-05-09 20:37:28,969 INFO [train.py:715] (7/8) Epoch 19, batch 34100, loss[loss=0.148, simple_loss=0.224, pruned_loss=0.03595, over 4900.00 frames.], tot_loss[loss=0.131, simple_loss=0.206, pruned_loss=0.028, over 971792.57 frames.], batch size: 17, lr: 1.15e-04 +2022-05-09 20:38:08,481 INFO [train.py:715] (7/8) Epoch 19, batch 34150, loss[loss=0.1104, simple_loss=0.177, pruned_loss=0.02191, over 4967.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2062, pruned_loss=0.02829, over 971500.32 frames.], batch size: 35, lr: 1.15e-04 +2022-05-09 20:38:48,060 INFO [train.py:715] (7/8) Epoch 19, batch 34200, loss[loss=0.1539, simple_loss=0.2258, pruned_loss=0.04097, over 4913.00 frames.], tot_loss[loss=0.1318, simple_loss=0.2067, pruned_loss=0.02849, over 972164.10 frames.], batch size: 18, lr: 1.15e-04 +2022-05-09 20:39:27,600 INFO [train.py:715] (7/8) Epoch 19, batch 34250, loss[loss=0.1228, simple_loss=0.2042, pruned_loss=0.02071, over 4791.00 frames.], tot_loss[loss=0.1314, simple_loss=0.2062, pruned_loss=0.02831, over 972470.43 frames.], batch size: 18, lr: 1.15e-04 +2022-05-09 20:40:06,937 INFO [train.py:715] (7/8) Epoch 19, batch 34300, loss[loss=0.1282, simple_loss=0.2033, pruned_loss=0.02659, over 4872.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2066, pruned_loss=0.02836, over 972332.98 frames.], batch size: 32, lr: 1.15e-04 +2022-05-09 20:40:46,130 INFO [train.py:715] (7/8) Epoch 19, batch 34350, loss[loss=0.1198, simple_loss=0.1969, pruned_loss=0.02136, over 4881.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.0284, over 972280.87 frames.], batch size: 16, lr: 1.15e-04 +2022-05-09 20:41:25,885 INFO [train.py:715] (7/8) Epoch 19, batch 34400, loss[loss=0.1273, simple_loss=0.2116, pruned_loss=0.02154, over 4806.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2067, pruned_loss=0.02852, over 972007.06 frames.], batch size: 25, lr: 1.15e-04 +2022-05-09 20:42:05,039 INFO [train.py:715] (7/8) Epoch 19, batch 34450, loss[loss=0.146, simple_loss=0.2158, pruned_loss=0.03807, over 4973.00 frames.], tot_loss[loss=0.1317, simple_loss=0.2067, pruned_loss=0.02838, over 971953.40 frames.], batch size: 15, lr: 1.15e-04 +2022-05-09 20:42:44,560 INFO [train.py:715] (7/8) Epoch 19, batch 34500, loss[loss=0.1341, simple_loss=0.2116, pruned_loss=0.02823, over 4700.00 frames.], tot_loss[loss=0.1314, simple_loss=0.206, pruned_loss=0.02842, over 972010.12 frames.], batch size: 15, lr: 1.15e-04 +2022-05-09 20:43:24,267 INFO [train.py:715] (7/8) Epoch 19, batch 34550, loss[loss=0.1439, simple_loss=0.2287, pruned_loss=0.02954, over 4932.00 frames.], tot_loss[loss=0.1319, simple_loss=0.2068, pruned_loss=0.02854, over 971883.85 frames.], batch size: 29, lr: 1.15e-04 +2022-05-09 20:44:03,127 INFO [train.py:715] (7/8) Epoch 19, batch 34600, loss[loss=0.1326, simple_loss=0.2087, pruned_loss=0.02825, over 4950.00 frames.], tot_loss[loss=0.132, simple_loss=0.2067, pruned_loss=0.02861, over 972208.46 frames.], batch size: 35, lr: 1.15e-04 +2022-05-09 20:44:45,166 INFO [train.py:715] (7/8) Epoch 19, batch 34650, loss[loss=0.1359, simple_loss=0.2161, pruned_loss=0.02781, over 4887.00 frames.], tot_loss[loss=0.1316, simple_loss=0.2064, pruned_loss=0.02841, over 971922.86 frames.], batch size: 20, lr: 1.15e-04 +2022-05-09 20:45:24,630 INFO [train.py:715] (7/8) Epoch 19, batch 34700, loss[loss=0.109, simple_loss=0.1862, pruned_loss=0.01587, over 4904.00 frames.], tot_loss[loss=0.1303, simple_loss=0.2054, pruned_loss=0.02763, over 972072.82 frames.], batch size: 19, lr: 1.15e-04 +2022-05-09 20:46:02,678 INFO [train.py:715] (7/8) Epoch 19, batch 34750, loss[loss=0.1223, simple_loss=0.1979, pruned_loss=0.02334, over 4891.00 frames.], tot_loss[loss=0.1305, simple_loss=0.2054, pruned_loss=0.02781, over 972880.29 frames.], batch size: 19, lr: 1.15e-04 +2022-05-09 20:46:39,960 INFO [train.py:715] (7/8) Epoch 19, batch 34800, loss[loss=0.1221, simple_loss=0.1909, pruned_loss=0.0267, over 4889.00 frames.], tot_loss[loss=0.1301, simple_loss=0.2048, pruned_loss=0.02776, over 972848.93 frames.], batch size: 19, lr: 1.15e-04 +2022-05-09 20:46:48,537 INFO [train.py:915] (7/8) Done!