aashraychegu
commited on
End of training
Browse files- README.md +59 -34
- config.json +7 -7
- model.safetensors +2 -2
- training_args.bin +0 -0
README.md
CHANGED
@@ -14,12 +14,12 @@ should probably proofread and complete it, then remove this comment. -->
|
|
14 |
|
15 |
This model is a fine-tuned version of [](https://huggingface.co/) on an unknown dataset.
|
16 |
It achieves the following results on the evaluation set:
|
17 |
-
- Loss: 0.
|
18 |
-
- Mean Iou: 0.
|
19 |
-
- Mean Accuracy: 0.
|
20 |
-
- Overall Accuracy: 0.
|
21 |
-
- Per Category Iou: [0.
|
22 |
-
- Per Category Accuracy: [0.
|
23 |
|
24 |
## Model description
|
25 |
|
@@ -44,38 +44,63 @@ The following hyperparameters were used during training:
|
|
44 |
- seed: 42
|
45 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
46 |
- lr_scheduler_type: linear
|
47 |
-
- num_epochs:
|
48 |
- mixed_precision_training: Native AMP
|
49 |
|
50 |
### Training results
|
51 |
|
52 |
-
| Training Loss | Epoch | Step
|
53 |
-
|
54 |
-
| 0.
|
55 |
-
| 0.
|
56 |
-
| 0.
|
57 |
-
| 0.
|
58 |
-
| 0.
|
59 |
-
| 0.
|
60 |
-
| 0.
|
61 |
-
| 0.
|
62 |
-
| 0.
|
63 |
-
| 0.
|
64 |
-
| 0.
|
65 |
-
| 0.
|
66 |
-
| 0.
|
67 |
-
| 0.
|
68 |
-
| 0.
|
69 |
-
| 0.
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
-
| 0.
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
|
80 |
|
81 |
### Framework versions
|
|
|
14 |
|
15 |
This model is a fine-tuned version of [](https://huggingface.co/) on an unknown dataset.
|
16 |
It achieves the following results on the evaluation set:
|
17 |
+
- Loss: 0.0152
|
18 |
+
- Mean Iou: 0.9578
|
19 |
+
- Mean Accuracy: 0.9770
|
20 |
+
- Overall Accuracy: 0.9815
|
21 |
+
- Per Category Iou: [0.966974556454048, 0.9306800202753192, 0.9758789229664083]
|
22 |
+
- Per Category Accuracy: [0.986415203459849, 0.9545545504757966, 0.9898833477760938]
|
23 |
|
24 |
## Model description
|
25 |
|
|
|
44 |
- seed: 42
|
45 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
46 |
- lr_scheduler_type: linear
|
47 |
+
- num_epochs: 50
|
48 |
- mixed_precision_training: Native AMP
|
49 |
|
50 |
### Training results
|
51 |
|
52 |
+
| Training Loss | Epoch | Step | Validation Loss | Mean Iou | Mean Accuracy | Overall Accuracy | Per Category Iou | Per Category Accuracy |
|
53 |
+
|:-------------:|:-----:|:-----:|:---------------:|:--------:|:-------------:|:----------------:|:------------------------------------------------------------:|:------------------------------------------------------------:|
|
54 |
+
| 0.0646 | 1.0 | 703 | 0.0566 | 0.9247 | 0.9600 | 0.9667 | [0.9494255778418639, 0.867692450590415, 0.9569388095269572] | [0.9761190606426763, 0.9255943024845551, 0.9782301302123408] |
|
55 |
+
| 0.0599 | 2.0 | 1406 | 0.0497 | 0.9282 | 0.9616 | 0.9684 | [0.9477319173718886, 0.8753546288784326, 0.9613964795143571] | [0.9711497669822643, 0.9303257017214881, 0.983177895106294] |
|
56 |
+
| 0.0551 | 3.0 | 2109 | 0.0487 | 0.9297 | 0.9630 | 0.9689 | [0.9520168030120617, 0.8775963388947718, 0.9593654968038946] | [0.9792121369623757, 0.9314351084836408, 0.9782160997896858] |
|
57 |
+
| 0.0538 | 4.0 | 2812 | 0.0461 | 0.9301 | 0.9606 | 0.9697 | [0.9526700363985638, 0.8742364766071521, 0.9633119672224237] | [0.9796255350132945, 0.9152524234283989, 0.986801690344372] |
|
58 |
+
| 0.0537 | 5.0 | 3515 | 0.0459 | 0.9293 | 0.9605 | 0.9693 | [0.9480194887278883, 0.8755894234719767, 0.9644158890544811] | [0.9695323626038072, 0.9222969866729543, 0.989726930656922] |
|
59 |
+
| 0.0502 | 6.0 | 4218 | 0.0419 | 0.9343 | 0.9634 | 0.9714 | [0.9527888538779837, 0.8847465897421206, 0.9654703601812225] | [0.976308799435177, 0.925932784344352, 0.9880231202290629] |
|
60 |
+
| 0.0485 | 7.0 | 4921 | 0.0426 | 0.9324 | 0.9618 | 0.9707 | [0.952703644674004, 0.8794287150835857, 0.9650298528805881] | [0.9762879243141335, 0.9201165442085169, 0.9890906398334286] |
|
61 |
+
| 0.0467 | 8.0 | 5624 | 0.0391 | 0.9369 | 0.9657 | 0.9723 | [0.9570939850601026, 0.8892999282991234, 0.9641725721587178] | [0.9836122270915137, 0.9306427121583465, 0.9828948498133379] |
|
62 |
+
| 0.0465 | 9.0 | 6327 | 0.0365 | 0.9392 | 0.9662 | 0.9736 | [0.9570166835679954, 0.8930106582106385, 0.9674431118321617] | [0.9799191676774507, 0.9305790764906415, 0.9880572447369236] |
|
63 |
+
| 0.0441 | 10.0 | 7030 | 0.0368 | 0.9393 | 0.9671 | 0.9733 | [0.9557842284941621, 0.896113739486814, 0.966114092503835] | [0.9781441914622425, 0.9371105515852832, 0.9859413896265214] |
|
64 |
+
| 0.0449 | 11.0 | 7733 | 0.0363 | 0.9395 | 0.9662 | 0.9737 | [0.9587446620984498, 0.8925112162386646, 0.9671193682825042] | [0.9841777180627991, 0.9274434801225682, 0.9869006864166631] |
|
65 |
+
| 0.0429 | 12.0 | 8436 | 0.0362 | 0.9387 | 0.9663 | 0.9733 | [0.9568234173066003, 0.8919248505170725, 0.9672752936824205] | [0.9807715727354733, 0.9313716624751807, 0.986681375204542] |
|
66 |
+
| 0.043 | 13.0 | 9139 | 0.0359 | 0.9399 | 0.9674 | 0.9736 | [0.9583618298582752, 0.8954311403447485, 0.9658975869823546] | [0.9824077197610704, 0.9351835437823924, 0.9845412435692529] |
|
67 |
+
| 0.0403 | 14.0 | 9842 | 0.0327 | 0.9431 | 0.9685 | 0.9752 | [0.9605210947016356, 0.8999327500995064, 0.9687008045143891] | [0.9845226714091516, 0.9340792478377873, 0.9869475769904131] |
|
68 |
+
| 0.0396 | 15.0 | 10545 | 0.0317 | 0.9430 | 0.9687 | 0.9751 | [0.9582689044073551, 0.9018446354914778, 0.9689720395673985] | [0.9803209527379552, 0.9375974567773319, 0.9880329157127937] |
|
69 |
+
| 0.0398 | 16.0 | 11248 | 0.0311 | 0.9441 | 0.9687 | 0.9758 | [0.9600781473937521, 0.9018120203453935, 0.9705198767201947] | [0.9833703518813173, 0.9337154414777602, 0.9891511845852599] |
|
70 |
+
| 0.0394 | 17.0 | 11951 | 0.0315 | 0.9428 | 0.9681 | 0.9752 | [0.9599952844492292, 0.8990866834010299, 0.9692707279290462] | [0.9834365741883817, 0.932572065746635, 0.9882736728656242] |
|
71 |
+
| 0.0373 | 18.0 | 12654 | 0.0298 | 0.9453 | 0.9697 | 0.9762 | [0.9604100374735833, 0.9056672192236825, 0.96995081765577] | [0.9824364503507406, 0.9378770943608746, 0.9886761889355877] |
|
72 |
+
| 0.0367 | 19.0 | 13357 | 0.0286 | 0.9465 | 0.9703 | 0.9767 | [0.9607981425237361, 0.9074436356815473, 0.9711230723862715] | [0.9831492425992236, 0.939067046409727, 0.9888305367096034] |
|
73 |
+
| 0.0351 | 20.0 | 14060 | 0.0278 | 0.9471 | 0.9708 | 0.9770 | [0.9618333883616247, 0.9086763944768684, 0.9708953906361056] | [0.9835691087261089, 0.9400169996572258, 0.9886674084372189] |
|
74 |
+
| 0.0354 | 21.0 | 14763 | 0.0285 | 0.9464 | 0.9701 | 0.9767 | [0.9602311140889335, 0.9075971976817253, 0.971288380236899] | [0.9805902546456458, 0.9392485003967921, 0.9904217581362069] |
|
75 |
+
| 0.0346 | 22.0 | 15466 | 0.0272 | 0.9474 | 0.9707 | 0.9772 | [0.9633102052165451, 0.9075619288178078, 0.9712912214581088] | [0.9877988385981887, 0.9366896178642719, 0.9877408755495605] |
|
76 |
+
| 0.0338 | 23.0 | 16169 | 0.0274 | 0.9475 | 0.9710 | 0.9771 | [0.9621579382676179, 0.9097149647578355, 0.9706881239127263] | [0.9833567220330544, 0.9412283931828203, 0.9885273185825774] |
|
77 |
+
| 0.033 | 24.0 | 16872 | 0.0248 | 0.9496 | 0.9719 | 0.9781 | [0.9633388370586602, 0.9126597100292737, 0.9727672608399773] | [0.9847982679687262, 0.94123947327555, 0.9897510890585125] |
|
78 |
+
| 0.0328 | 25.0 | 17575 | 0.0258 | 0.9484 | 0.9711 | 0.9777 | [0.962009361478448, 0.9105945715118131, 0.972718363850045] | [0.9830181147234991, 0.9396734668722746, 0.990675360103791] |
|
79 |
+
| 0.0317 | 26.0 | 18278 | 0.0253 | 0.9492 | 0.9716 | 0.9779 | [0.9639558319000139, 0.911781219914515, 0.9717356738019733] | [0.9860431702641673, 0.939863355704706, 0.9889956162038476] |
|
80 |
+
| 0.0314 | 27.0 | 18981 | 0.0240 | 0.9508 | 0.9735 | 0.9783 | [0.9632096299431748, 0.9178493526029053, 0.9714077953769692] | [0.9846306889201024, 0.9490954287160887, 0.9868323630270145] |
|
81 |
+
| 0.031 | 28.0 | 19684 | 0.0225 | 0.9519 | 0.9738 | 0.9789 | [0.9639432214860649, 0.9191291970986417, 0.9727323390586772] | [0.9846959927218408, 0.9485333186423948, 0.9882958581706704] |
|
82 |
+
| 0.0304 | 29.0 | 20387 | 0.0222 | 0.9520 | 0.9738 | 0.9790 | [0.9646276170673505, 0.9186786670273801, 0.9728235086526744] | [0.9854814689302406, 0.9476905128862128, 0.988319968447955] |
|
83 |
+
| 0.03 | 30.0 | 21090 | 0.0222 | 0.9516 | 0.9735 | 0.9788 | [0.9640884759202785, 0.9181744197938652, 0.9725126150057635] | [0.9859090299478983, 0.9467068901317071, 0.9879970937294139] |
|
84 |
+
| 0.0291 | 31.0 | 21793 | 0.0222 | 0.9520 | 0.9737 | 0.9790 | [0.9644441535927152, 0.9183966340639266, 0.973123149831471] | [0.9853206084590358, 0.9467745885000796, 0.988864109975414] |
|
85 |
+
| 0.0288 | 32.0 | 22496 | 0.0216 | 0.9527 | 0.9744 | 0.9792 | [0.9635312975815179, 0.9213747265447779, 0.973258211843832] | [0.9833080284045524, 0.9510300129067109, 0.9887883360682158] |
|
86 |
+
| 0.0274 | 33.0 | 23199 | 0.0215 | 0.9527 | 0.9740 | 0.9793 | [0.9640157857518139, 0.9206728124821452, 0.9735005954174129] | [0.9843878695121234, 0.9483662887400095, 0.9893775350708613] |
|
87 |
+
| 0.0278 | 34.0 | 23902 | 0.0201 | 0.9538 | 0.9749 | 0.9797 | [0.9645863263073253, 0.9230425214866308, 0.9736614810798143] | [0.9850184523223797, 0.9508885071458663, 0.9886674084372189] |
|
88 |
+
| 0.0277 | 35.0 | 24605 | 0.0199 | 0.9542 | 0.9753 | 0.9798 | [0.9648387629037706, 0.9242740259952547, 0.9734575868219909] | [0.9851679759970793, 0.9527511805364656, 0.9880083198175126] |
|
89 |
+
| 0.027 | 36.0 | 25308 | 0.0190 | 0.9546 | 0.9751 | 0.9801 | [0.9650972384872185, 0.9240644176546186, 0.9747245960463098] | [0.9849486973025233, 0.9502703278101333, 0.9899684840482691] |
|
90 |
+
| 0.0266 | 37.0 | 26011 | 0.0190 | 0.9546 | 0.9748 | 0.9802 | [0.9660706980566042, 0.9226321694052272, 0.9749641496350198] | [0.9865475517272653, 0.9474771761818693, 0.9902573130078139] |
|
91 |
+
| 0.0256 | 38.0 | 26714 | 0.0186 | 0.9554 | 0.9757 | 0.9804 | [0.9655268292420411, 0.9260322783164191, 0.9746364243430061] | [0.9852960464704602, 0.9524953701433246, 0.9893096972992093] |
|
92 |
+
| 0.0262 | 39.0 | 27417 | 0.0180 | 0.9555 | 0.9757 | 0.9805 | [0.966309913000785, 0.9256650052387354, 0.9744492221886044] | [0.9861509436967797, 0.9520694054612898, 0.9890100753703193] |
|
93 |
+
| 0.0248 | 40.0 | 28120 | 0.0177 | 0.9557 | 0.9756 | 0.9806 | [0.9660911749634843, 0.9258013861826082, 0.9751373606294378] | [0.9859783931577905, 0.9509224960789698, 0.9899530798954259] |
|
94 |
+
| 0.0249 | 41.0 | 28823 | 0.0178 | 0.9555 | 0.9754 | 0.9806 | [0.9665350924463278, 0.9247239384112762, 0.9751124707269705] | [0.986670425901285, 0.9496893616146678, 0.9899208453603083] |
|
95 |
+
| 0.0241 | 42.0 | 29526 | 0.0173 | 0.9561 | 0.9759 | 0.9807 | [0.9669016375167447, 0.9264132616994011, 0.9748969691253521] | [0.9869660625770613, 0.9512582628169447, 0.9894373142087342] |
|
96 |
+
| 0.0239 | 43.0 | 30229 | 0.0170 | 0.9566 | 0.9765 | 0.9808 | [0.9657028012078982, 0.9293126752941933, 0.9747772529312919] | [0.9851113112838957, 0.9551385811653776, 0.9891693536982271] |
|
97 |
+
| 0.0234 | 44.0 | 30932 | 0.0166 | 0.9563 | 0.9758 | 0.9810 | [0.9678707868291188, 0.9255875185979531, 0.975463092811191] | [0.9880463982644232, 0.9494462284447401, 0.9899507568039292] |
|
98 |
+
| 0.0228 | 45.0 | 31635 | 0.0163 | 0.9569 | 0.9764 | 0.9811 | [0.9667985174141136, 0.9285307472949479, 0.9753903785250766] | [0.9863655913260211, 0.9531677121665817, 0.9897135346001182] |
|
99 |
+
| 0.0236 | 46.0 | 32338 | 0.0160 | 0.9572 | 0.9767 | 0.9812 | [0.9669272848057375, 0.9294111193088093, 0.9753600048069734] | [0.98654358224271, 0.9541473420948784, 0.989392186734557] |
|
100 |
+
| 0.0226 | 47.0 | 33041 | 0.0158 | 0.9572 | 0.9765 | 0.9812 | [0.9663055417479156, 0.929422808459136, 0.9759668808926588] | [0.9857908703396774, 0.9534334846611139, 0.9903217602033646] |
|
101 |
+
| 0.0223 | 48.0 | 33744 | 0.0154 | 0.9579 | 0.9771 | 0.9814 | [0.9667858095653383, 0.9314465218695431, 0.9755411515813254] | [0.9861092962245191, 0.9556930349948158, 0.9895239860837632] |
|
102 |
+
| 0.0217 | 49.0 | 34447 | 0.0153 | 0.9578 | 0.9769 | 0.9814 | [0.9666844542687175, 0.930702020324814, 0.9759177123723752] | [0.9859492257963569, 0.95489544799545, 0.9899713627567528] |
|
103 |
+
| 0.022 | 50.0 | 35150 | 0.0152 | 0.9578 | 0.9770 | 0.9815 | [0.966974556454048, 0.9306800202753192, 0.9758789229664083] | [0.986415203459849, 0.9545545504757966, 0.9898833477760938] |
|
104 |
|
105 |
|
106 |
### Framework versions
|
config.json
CHANGED
@@ -4,19 +4,19 @@
|
|
4 |
],
|
5 |
"attention_probs_dropout_prob": 0.0,
|
6 |
"classifier_dropout_prob": 0.1,
|
7 |
-
"decoder_hidden_size":
|
8 |
"depths": [
|
9 |
-
6,
|
10 |
-
4,
|
11 |
3,
|
12 |
-
|
|
|
|
|
13 |
],
|
14 |
"drop_path_rate": 0.1,
|
15 |
"hidden_act": "gelu",
|
16 |
"hidden_dropout_prob": 0.0,
|
17 |
"hidden_sizes": [
|
18 |
-
64,
|
19 |
128,
|
|
|
20 |
384,
|
21 |
512
|
22 |
],
|
@@ -40,9 +40,9 @@
|
|
40 |
],
|
41 |
"model_type": "segformer",
|
42 |
"num_attention_heads": [
|
43 |
-
1,
|
44 |
2,
|
45 |
-
|
|
|
46 |
8
|
47 |
],
|
48 |
"num_channels": 3,
|
|
|
4 |
],
|
5 |
"attention_probs_dropout_prob": 0.0,
|
6 |
"classifier_dropout_prob": 0.1,
|
7 |
+
"decoder_hidden_size": 768,
|
8 |
"depths": [
|
|
|
|
|
9 |
3,
|
10 |
+
8,
|
11 |
+
18,
|
12 |
+
3
|
13 |
],
|
14 |
"drop_path_rate": 0.1,
|
15 |
"hidden_act": "gelu",
|
16 |
"hidden_dropout_prob": 0.0,
|
17 |
"hidden_sizes": [
|
|
|
18 |
128,
|
19 |
+
256,
|
20 |
384,
|
21 |
512
|
22 |
],
|
|
|
40 |
],
|
41 |
"model_type": "segformer",
|
42 |
"num_attention_heads": [
|
|
|
43 |
2,
|
44 |
+
4,
|
45 |
+
8,
|
46 |
8
|
47 |
],
|
48 |
"num_channels": 3,
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a8b05602a2e9b9f5cee01dd502fe34beea093badae4bd8295e9477d56119c76b
|
3 |
+
size 309088476
|
training_args.bin
CHANGED
Binary files a/training_args.bin and b/training_args.bin differ
|
|