aashraychegu commited on
Commit
29db57d
·
verified ·
1 Parent(s): a30051c

End of training

Browse files
Files changed (4) hide show
  1. README.md +59 -34
  2. config.json +7 -7
  3. model.safetensors +2 -2
  4. training_args.bin +0 -0
README.md CHANGED
@@ -14,12 +14,12 @@ should probably proofread and complete it, then remove this comment. -->
14
 
15
  This model is a fine-tuned version of [](https://huggingface.co/) on an unknown dataset.
16
  It achieves the following results on the evaluation set:
17
- - Loss: 0.0290
18
- - Mean Iou: 0.9471
19
- - Mean Accuracy: 0.9710
20
- - Overall Accuracy: 0.9768
21
- - Per Category Iou: [0.9608005440708357, 0.9099221006602166, 0.9704331381855318]
22
- - Per Category Accuracy: [0.9824206687592316, 0.9426348262866555, 0.987965611683444]
23
 
24
  ## Model description
25
 
@@ -44,38 +44,63 @@ The following hyperparameters were used during training:
44
  - seed: 42
45
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
46
  - lr_scheduler_type: linear
47
- - num_epochs: 25
48
  - mixed_precision_training: Native AMP
49
 
50
  ### Training results
51
 
52
- | Training Loss | Epoch | Step | Validation Loss | Mean Iou | Mean Accuracy | Overall Accuracy | Per Category Iou | Per Category Accuracy |
53
- |:-------------:|:-----:|:----:|:---------------:|:--------:|:-------------:|:----------------:|:------------------------------------------------------------:|:------------------------------------------------------------:|
54
- | 0.0862 | 1.0 | 352 | 0.0576 | 0.9232 | 0.9592 | 0.9659 | [0.9447371839102795, 0.8673794852190165, 0.9574680953552409] | [0.968534005122562, 0.9283290191012513, 0.9806639382372657] |
55
- | 0.0656 | 2.0 | 704 | 0.0545 | 0.9238 | 0.9570 | 0.9669 | [0.9478803787782473, 0.8629377451136709, 0.9606714386330129] | [0.9722807746172916, 0.9111279737034862, 0.9876936131061734] |
56
- | 0.0618 | 3.0 | 1056 | 0.0534 | 0.9237 | 0.9564 | 0.9671 | [0.9508687060941174, 0.8596389521258784, 0.9607218269648252] | [0.9812787803688049, 0.9019564119732999, 0.9860254146647229] |
57
- | 0.0571 | 4.0 | 1408 | 0.0499 | 0.9275 | 0.9607 | 0.9682 | [0.9502329113612598, 0.8720582640084444, 0.9601695550481213] | [0.9753690914093931, 0.924181989944127, 0.9825173933819673] |
58
- | 0.0559 | 5.0 | 1760 | 0.0467 | 0.9299 | 0.9612 | 0.9695 | [0.9505140192865831, 0.8754764025227105, 0.9637205968987405] | [0.976237213827783, 0.9214627056007283, 0.9860167566645874] |
59
- | 0.0536 | 6.0 | 2112 | 0.0438 | 0.9326 | 0.9635 | 0.9704 | [0.9494054620742128, 0.8841375143227368, 0.9641436146464465] | [0.9716557991857033, 0.9326334654316629, 0.9860780801551877] |
60
- | 0.0523 | 7.0 | 2464 | 0.0430 | 0.9330 | 0.9628 | 0.9708 | [0.9528910425065712, 0.8812972916596699, 0.9647077920001537] | [0.9777607122771147, 0.9241165974328902, 0.9866203492096512] |
61
- | 0.0501 | 8.0 | 2816 | 0.0416 | 0.9343 | 0.9638 | 0.9713 | [0.9534610878112045, 0.8855375345037919, 0.9640451917096353] | [0.9785921522904953, 0.9270713187736989, 0.9856376115075716] |
62
- | 0.0495 | 9.0 | 3168 | 0.0397 | 0.9363 | 0.9648 | 0.9723 | [0.9545006031758507, 0.8881574910338527, 0.9661569365523052] | [0.97816237529845, 0.9291447035854092, 0.9871201770011974] |
63
- | 0.0464 | 10.0 | 3520 | 0.0387 | 0.9381 | 0.9667 | 0.9727 | [0.9535058676791107, 0.895511422338478, 0.9654228115593686] | [0.9749859398030879, 0.9391499873921522, 0.9858483740929799] |
64
- | 0.0469 | 11.0 | 3872 | 0.0375 | 0.9379 | 0.9655 | 0.9729 | [0.9549214957250483, 0.891720146026617, 0.9669602902499821] | [0.9773053391751129, 0.9304895573270372, 0.9885620293319832] |
65
- | 0.046 | 12.0 | 4224 | 0.0371 | 0.9390 | 0.9665 | 0.9733 | [0.9549188445906526, 0.894997463400181, 0.9672095088085364] | [0.9775963126700038, 0.9344942222656819, 0.9874873392060299] |
66
- | 0.0459 | 13.0 | 4576 | 0.0376 | 0.9393 | 0.9674 | 0.9732 | [0.9567727152562366, 0.8962463100368819, 0.9647967766370947] | [0.9818777935729291, 0.9374800577047236, 0.9829219394226549] |
67
- | 0.0442 | 14.0 | 4928 | 0.0375 | 0.9378 | 0.9654 | 0.9731 | [0.9582913046479526, 0.8885196793391749, 0.9665944535904781] | [0.9867372712504237, 0.9245328196189752, 0.9850417538509562] |
68
- | 0.0441 | 15.0 | 5280 | 0.0351 | 0.9408 | 0.9668 | 0.9743 | [0.9573946237991694, 0.8963420497950646, 0.9685835277021041] | [0.9790010413152688, 0.9318911491290942, 0.9896500148911915] |
69
- | 0.0424 | 16.0 | 5632 | 0.0333 | 0.9430 | 0.9691 | 0.9750 | [0.9579484854107692, 0.9029111339543006, 0.9680899513313485] | [0.9799520026370737, 0.941103807023036, 0.9863855244712643] |
70
- | 0.0422 | 17.0 | 5984 | 0.0325 | 0.9440 | 0.9693 | 0.9755 | [0.960526018065147, 0.9033327928010478, 0.9680751487957986] | [0.9829515905300688, 0.9384147284099078, 0.9865726973968797] |
71
- | 0.0412 | 18.0 | 6336 | 0.0329 | 0.9438 | 0.9695 | 0.9752 | [0.9574622081054572, 0.9055448607955255, 0.9682452665078278] | [0.9790884406297427, 0.9425620270828011, 0.9868754080312248] |
72
- | 0.0408 | 19.0 | 6688 | 0.0314 | 0.9450 | 0.9703 | 0.9758 | [0.9586636871431746, 0.9072926807452817, 0.9690543615214298] | [0.9803788121503738, 0.9439052138194869, 0.9866635429617163] |
73
- | 0.0391 | 20.0 | 7040 | 0.0314 | 0.9441 | 0.9690 | 0.9757 | [0.959128342970837, 0.902939786042012, 0.9702891936150936] | [0.980902051876667, 0.9368478835131284, 0.9892370339721507] |
74
- | 0.0397 | 21.0 | 7392 | 0.0304 | 0.9456 | 0.9701 | 0.9763 | [0.9598212480415552, 0.906699930083766, 0.9703717586568048] | [0.9813465827619545, 0.9404109518551305, 0.9885344016054268] |
75
- | 0.0392 | 22.0 | 7744 | 0.0297 | 0.9464 | 0.9707 | 0.9765 | [0.9607482194751374, 0.9084845650132211, 0.9700107510946303] | [0.9827931258804484, 0.9417580815076976, 0.9875174387719378] |
76
- | 0.0383 | 23.0 | 8096 | 0.0296 | 0.9464 | 0.9709 | 0.9764 | [0.9596652006117967, 0.9096179359866197, 0.9698528705786332] | [0.9807504984978905, 0.9444144788383055, 0.9875228155693943] |
77
- | 0.0379 | 24.0 | 8448 | 0.0292 | 0.9467 | 0.9706 | 0.9767 | [0.9608055853550872, 0.9088181850778698, 0.9704877713838735] | [0.9827165687826889, 0.9406717333169016, 0.9884114527535798] |
78
- | 0.0365 | 25.0 | 8800 | 0.0290 | 0.9471 | 0.9710 | 0.9768 | [0.9608005440708357, 0.9099221006602166, 0.9704331381855318] | [0.9824206687592316, 0.9426348262866555, 0.987965611683444] |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
 
80
 
81
  ### Framework versions
 
14
 
15
  This model is a fine-tuned version of [](https://huggingface.co/) on an unknown dataset.
16
  It achieves the following results on the evaluation set:
17
+ - Loss: 0.0152
18
+ - Mean Iou: 0.9578
19
+ - Mean Accuracy: 0.9770
20
+ - Overall Accuracy: 0.9815
21
+ - Per Category Iou: [0.966974556454048, 0.9306800202753192, 0.9758789229664083]
22
+ - Per Category Accuracy: [0.986415203459849, 0.9545545504757966, 0.9898833477760938]
23
 
24
  ## Model description
25
 
 
44
  - seed: 42
45
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
46
  - lr_scheduler_type: linear
47
+ - num_epochs: 50
48
  - mixed_precision_training: Native AMP
49
 
50
  ### Training results
51
 
52
+ | Training Loss | Epoch | Step | Validation Loss | Mean Iou | Mean Accuracy | Overall Accuracy | Per Category Iou | Per Category Accuracy |
53
+ |:-------------:|:-----:|:-----:|:---------------:|:--------:|:-------------:|:----------------:|:------------------------------------------------------------:|:------------------------------------------------------------:|
54
+ | 0.0646 | 1.0 | 703 | 0.0566 | 0.9247 | 0.9600 | 0.9667 | [0.9494255778418639, 0.867692450590415, 0.9569388095269572] | [0.9761190606426763, 0.9255943024845551, 0.9782301302123408] |
55
+ | 0.0599 | 2.0 | 1406 | 0.0497 | 0.9282 | 0.9616 | 0.9684 | [0.9477319173718886, 0.8753546288784326, 0.9613964795143571] | [0.9711497669822643, 0.9303257017214881, 0.983177895106294] |
56
+ | 0.0551 | 3.0 | 2109 | 0.0487 | 0.9297 | 0.9630 | 0.9689 | [0.9520168030120617, 0.8775963388947718, 0.9593654968038946] | [0.9792121369623757, 0.9314351084836408, 0.9782160997896858] |
57
+ | 0.0538 | 4.0 | 2812 | 0.0461 | 0.9301 | 0.9606 | 0.9697 | [0.9526700363985638, 0.8742364766071521, 0.9633119672224237] | [0.9796255350132945, 0.9152524234283989, 0.986801690344372] |
58
+ | 0.0537 | 5.0 | 3515 | 0.0459 | 0.9293 | 0.9605 | 0.9693 | [0.9480194887278883, 0.8755894234719767, 0.9644158890544811] | [0.9695323626038072, 0.9222969866729543, 0.989726930656922] |
59
+ | 0.0502 | 6.0 | 4218 | 0.0419 | 0.9343 | 0.9634 | 0.9714 | [0.9527888538779837, 0.8847465897421206, 0.9654703601812225] | [0.976308799435177, 0.925932784344352, 0.9880231202290629] |
60
+ | 0.0485 | 7.0 | 4921 | 0.0426 | 0.9324 | 0.9618 | 0.9707 | [0.952703644674004, 0.8794287150835857, 0.9650298528805881] | [0.9762879243141335, 0.9201165442085169, 0.9890906398334286] |
61
+ | 0.0467 | 8.0 | 5624 | 0.0391 | 0.9369 | 0.9657 | 0.9723 | [0.9570939850601026, 0.8892999282991234, 0.9641725721587178] | [0.9836122270915137, 0.9306427121583465, 0.9828948498133379] |
62
+ | 0.0465 | 9.0 | 6327 | 0.0365 | 0.9392 | 0.9662 | 0.9736 | [0.9570166835679954, 0.8930106582106385, 0.9674431118321617] | [0.9799191676774507, 0.9305790764906415, 0.9880572447369236] |
63
+ | 0.0441 | 10.0 | 7030 | 0.0368 | 0.9393 | 0.9671 | 0.9733 | [0.9557842284941621, 0.896113739486814, 0.966114092503835] | [0.9781441914622425, 0.9371105515852832, 0.9859413896265214] |
64
+ | 0.0449 | 11.0 | 7733 | 0.0363 | 0.9395 | 0.9662 | 0.9737 | [0.9587446620984498, 0.8925112162386646, 0.9671193682825042] | [0.9841777180627991, 0.9274434801225682, 0.9869006864166631] |
65
+ | 0.0429 | 12.0 | 8436 | 0.0362 | 0.9387 | 0.9663 | 0.9733 | [0.9568234173066003, 0.8919248505170725, 0.9672752936824205] | [0.9807715727354733, 0.9313716624751807, 0.986681375204542] |
66
+ | 0.043 | 13.0 | 9139 | 0.0359 | 0.9399 | 0.9674 | 0.9736 | [0.9583618298582752, 0.8954311403447485, 0.9658975869823546] | [0.9824077197610704, 0.9351835437823924, 0.9845412435692529] |
67
+ | 0.0403 | 14.0 | 9842 | 0.0327 | 0.9431 | 0.9685 | 0.9752 | [0.9605210947016356, 0.8999327500995064, 0.9687008045143891] | [0.9845226714091516, 0.9340792478377873, 0.9869475769904131] |
68
+ | 0.0396 | 15.0 | 10545 | 0.0317 | 0.9430 | 0.9687 | 0.9751 | [0.9582689044073551, 0.9018446354914778, 0.9689720395673985] | [0.9803209527379552, 0.9375974567773319, 0.9880329157127937] |
69
+ | 0.0398 | 16.0 | 11248 | 0.0311 | 0.9441 | 0.9687 | 0.9758 | [0.9600781473937521, 0.9018120203453935, 0.9705198767201947] | [0.9833703518813173, 0.9337154414777602, 0.9891511845852599] |
70
+ | 0.0394 | 17.0 | 11951 | 0.0315 | 0.9428 | 0.9681 | 0.9752 | [0.9599952844492292, 0.8990866834010299, 0.9692707279290462] | [0.9834365741883817, 0.932572065746635, 0.9882736728656242] |
71
+ | 0.0373 | 18.0 | 12654 | 0.0298 | 0.9453 | 0.9697 | 0.9762 | [0.9604100374735833, 0.9056672192236825, 0.96995081765577] | [0.9824364503507406, 0.9378770943608746, 0.9886761889355877] |
72
+ | 0.0367 | 19.0 | 13357 | 0.0286 | 0.9465 | 0.9703 | 0.9767 | [0.9607981425237361, 0.9074436356815473, 0.9711230723862715] | [0.9831492425992236, 0.939067046409727, 0.9888305367096034] |
73
+ | 0.0351 | 20.0 | 14060 | 0.0278 | 0.9471 | 0.9708 | 0.9770 | [0.9618333883616247, 0.9086763944768684, 0.9708953906361056] | [0.9835691087261089, 0.9400169996572258, 0.9886674084372189] |
74
+ | 0.0354 | 21.0 | 14763 | 0.0285 | 0.9464 | 0.9701 | 0.9767 | [0.9602311140889335, 0.9075971976817253, 0.971288380236899] | [0.9805902546456458, 0.9392485003967921, 0.9904217581362069] |
75
+ | 0.0346 | 22.0 | 15466 | 0.0272 | 0.9474 | 0.9707 | 0.9772 | [0.9633102052165451, 0.9075619288178078, 0.9712912214581088] | [0.9877988385981887, 0.9366896178642719, 0.9877408755495605] |
76
+ | 0.0338 | 23.0 | 16169 | 0.0274 | 0.9475 | 0.9710 | 0.9771 | [0.9621579382676179, 0.9097149647578355, 0.9706881239127263] | [0.9833567220330544, 0.9412283931828203, 0.9885273185825774] |
77
+ | 0.033 | 24.0 | 16872 | 0.0248 | 0.9496 | 0.9719 | 0.9781 | [0.9633388370586602, 0.9126597100292737, 0.9727672608399773] | [0.9847982679687262, 0.94123947327555, 0.9897510890585125] |
78
+ | 0.0328 | 25.0 | 17575 | 0.0258 | 0.9484 | 0.9711 | 0.9777 | [0.962009361478448, 0.9105945715118131, 0.972718363850045] | [0.9830181147234991, 0.9396734668722746, 0.990675360103791] |
79
+ | 0.0317 | 26.0 | 18278 | 0.0253 | 0.9492 | 0.9716 | 0.9779 | [0.9639558319000139, 0.911781219914515, 0.9717356738019733] | [0.9860431702641673, 0.939863355704706, 0.9889956162038476] |
80
+ | 0.0314 | 27.0 | 18981 | 0.0240 | 0.9508 | 0.9735 | 0.9783 | [0.9632096299431748, 0.9178493526029053, 0.9714077953769692] | [0.9846306889201024, 0.9490954287160887, 0.9868323630270145] |
81
+ | 0.031 | 28.0 | 19684 | 0.0225 | 0.9519 | 0.9738 | 0.9789 | [0.9639432214860649, 0.9191291970986417, 0.9727323390586772] | [0.9846959927218408, 0.9485333186423948, 0.9882958581706704] |
82
+ | 0.0304 | 29.0 | 20387 | 0.0222 | 0.9520 | 0.9738 | 0.9790 | [0.9646276170673505, 0.9186786670273801, 0.9728235086526744] | [0.9854814689302406, 0.9476905128862128, 0.988319968447955] |
83
+ | 0.03 | 30.0 | 21090 | 0.0222 | 0.9516 | 0.9735 | 0.9788 | [0.9640884759202785, 0.9181744197938652, 0.9725126150057635] | [0.9859090299478983, 0.9467068901317071, 0.9879970937294139] |
84
+ | 0.0291 | 31.0 | 21793 | 0.0222 | 0.9520 | 0.9737 | 0.9790 | [0.9644441535927152, 0.9183966340639266, 0.973123149831471] | [0.9853206084590358, 0.9467745885000796, 0.988864109975414] |
85
+ | 0.0288 | 32.0 | 22496 | 0.0216 | 0.9527 | 0.9744 | 0.9792 | [0.9635312975815179, 0.9213747265447779, 0.973258211843832] | [0.9833080284045524, 0.9510300129067109, 0.9887883360682158] |
86
+ | 0.0274 | 33.0 | 23199 | 0.0215 | 0.9527 | 0.9740 | 0.9793 | [0.9640157857518139, 0.9206728124821452, 0.9735005954174129] | [0.9843878695121234, 0.9483662887400095, 0.9893775350708613] |
87
+ | 0.0278 | 34.0 | 23902 | 0.0201 | 0.9538 | 0.9749 | 0.9797 | [0.9645863263073253, 0.9230425214866308, 0.9736614810798143] | [0.9850184523223797, 0.9508885071458663, 0.9886674084372189] |
88
+ | 0.0277 | 35.0 | 24605 | 0.0199 | 0.9542 | 0.9753 | 0.9798 | [0.9648387629037706, 0.9242740259952547, 0.9734575868219909] | [0.9851679759970793, 0.9527511805364656, 0.9880083198175126] |
89
+ | 0.027 | 36.0 | 25308 | 0.0190 | 0.9546 | 0.9751 | 0.9801 | [0.9650972384872185, 0.9240644176546186, 0.9747245960463098] | [0.9849486973025233, 0.9502703278101333, 0.9899684840482691] |
90
+ | 0.0266 | 37.0 | 26011 | 0.0190 | 0.9546 | 0.9748 | 0.9802 | [0.9660706980566042, 0.9226321694052272, 0.9749641496350198] | [0.9865475517272653, 0.9474771761818693, 0.9902573130078139] |
91
+ | 0.0256 | 38.0 | 26714 | 0.0186 | 0.9554 | 0.9757 | 0.9804 | [0.9655268292420411, 0.9260322783164191, 0.9746364243430061] | [0.9852960464704602, 0.9524953701433246, 0.9893096972992093] |
92
+ | 0.0262 | 39.0 | 27417 | 0.0180 | 0.9555 | 0.9757 | 0.9805 | [0.966309913000785, 0.9256650052387354, 0.9744492221886044] | [0.9861509436967797, 0.9520694054612898, 0.9890100753703193] |
93
+ | 0.0248 | 40.0 | 28120 | 0.0177 | 0.9557 | 0.9756 | 0.9806 | [0.9660911749634843, 0.9258013861826082, 0.9751373606294378] | [0.9859783931577905, 0.9509224960789698, 0.9899530798954259] |
94
+ | 0.0249 | 41.0 | 28823 | 0.0178 | 0.9555 | 0.9754 | 0.9806 | [0.9665350924463278, 0.9247239384112762, 0.9751124707269705] | [0.986670425901285, 0.9496893616146678, 0.9899208453603083] |
95
+ | 0.0241 | 42.0 | 29526 | 0.0173 | 0.9561 | 0.9759 | 0.9807 | [0.9669016375167447, 0.9264132616994011, 0.9748969691253521] | [0.9869660625770613, 0.9512582628169447, 0.9894373142087342] |
96
+ | 0.0239 | 43.0 | 30229 | 0.0170 | 0.9566 | 0.9765 | 0.9808 | [0.9657028012078982, 0.9293126752941933, 0.9747772529312919] | [0.9851113112838957, 0.9551385811653776, 0.9891693536982271] |
97
+ | 0.0234 | 44.0 | 30932 | 0.0166 | 0.9563 | 0.9758 | 0.9810 | [0.9678707868291188, 0.9255875185979531, 0.975463092811191] | [0.9880463982644232, 0.9494462284447401, 0.9899507568039292] |
98
+ | 0.0228 | 45.0 | 31635 | 0.0163 | 0.9569 | 0.9764 | 0.9811 | [0.9667985174141136, 0.9285307472949479, 0.9753903785250766] | [0.9863655913260211, 0.9531677121665817, 0.9897135346001182] |
99
+ | 0.0236 | 46.0 | 32338 | 0.0160 | 0.9572 | 0.9767 | 0.9812 | [0.9669272848057375, 0.9294111193088093, 0.9753600048069734] | [0.98654358224271, 0.9541473420948784, 0.989392186734557] |
100
+ | 0.0226 | 47.0 | 33041 | 0.0158 | 0.9572 | 0.9765 | 0.9812 | [0.9663055417479156, 0.929422808459136, 0.9759668808926588] | [0.9857908703396774, 0.9534334846611139, 0.9903217602033646] |
101
+ | 0.0223 | 48.0 | 33744 | 0.0154 | 0.9579 | 0.9771 | 0.9814 | [0.9667858095653383, 0.9314465218695431, 0.9755411515813254] | [0.9861092962245191, 0.9556930349948158, 0.9895239860837632] |
102
+ | 0.0217 | 49.0 | 34447 | 0.0153 | 0.9578 | 0.9769 | 0.9814 | [0.9666844542687175, 0.930702020324814, 0.9759177123723752] | [0.9859492257963569, 0.95489544799545, 0.9899713627567528] |
103
+ | 0.022 | 50.0 | 35150 | 0.0152 | 0.9578 | 0.9770 | 0.9815 | [0.966974556454048, 0.9306800202753192, 0.9758789229664083] | [0.986415203459849, 0.9545545504757966, 0.9898833477760938] |
104
 
105
 
106
  ### Framework versions
config.json CHANGED
@@ -4,19 +4,19 @@
4
  ],
5
  "attention_probs_dropout_prob": 0.0,
6
  "classifier_dropout_prob": 0.1,
7
- "decoder_hidden_size": 896,
8
  "depths": [
9
- 6,
10
- 4,
11
  3,
12
- 2
 
 
13
  ],
14
  "drop_path_rate": 0.1,
15
  "hidden_act": "gelu",
16
  "hidden_dropout_prob": 0.0,
17
  "hidden_sizes": [
18
- 64,
19
  128,
 
20
  384,
21
  512
22
  ],
@@ -40,9 +40,9 @@
40
  ],
41
  "model_type": "segformer",
42
  "num_attention_heads": [
43
- 1,
44
  2,
45
- 6,
 
46
  8
47
  ],
48
  "num_channels": 3,
 
4
  ],
5
  "attention_probs_dropout_prob": 0.0,
6
  "classifier_dropout_prob": 0.1,
7
+ "decoder_hidden_size": 768,
8
  "depths": [
 
 
9
  3,
10
+ 8,
11
+ 18,
12
+ 3
13
  ],
14
  "drop_path_rate": 0.1,
15
  "hidden_act": "gelu",
16
  "hidden_dropout_prob": 0.0,
17
  "hidden_sizes": [
 
18
  128,
19
+ 256,
20
  384,
21
  512
22
  ],
 
40
  ],
41
  "model_type": "segformer",
42
  "num_attention_heads": [
 
43
  2,
44
+ 4,
45
+ 8,
46
  8
47
  ],
48
  "num_channels": 3,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fbe66c88671698e6cc75c7bd5673465ba55bb3c9463e71b1c84a22acf0a4f26f
3
- size 94992836
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8b05602a2e9b9f5cee01dd502fe34beea093badae4bd8295e9477d56119c76b
3
+ size 309088476
training_args.bin CHANGED
Binary files a/training_args.bin and b/training_args.bin differ