qubvel-hf HF staff commited on
Commit
c5f6d10
1 Parent(s): 6fbbfd5

End of training

Browse files
Files changed (5) hide show
  1. README.md +36 -56
  2. config.json +13 -11
  3. model.safetensors +2 -2
  4. preprocessor_config.json +4 -2
  5. training_args.bin +1 -1
README.md CHANGED
@@ -1,5 +1,5 @@
1
  ---
2
- base_model: sbchoi/rtdetr_r50vd_coco_o365
3
  tags:
4
  - generated_from_trainer
5
  model-index:
@@ -12,31 +12,31 @@ should probably proofread and complete it, then remove this comment. -->
12
 
13
  # rtdetr-r50-cppe5-finetune
14
 
15
- This model is a fine-tuned version of [sbchoi/rtdetr_r50vd_coco_o365](https://huggingface.co/sbchoi/rtdetr_r50vd_coco_o365) on an unknown dataset.
16
  It achieves the following results on the evaluation set:
17
- - Loss: 12.3857
18
- - Map: 0.3012
19
- - Map 50: 0.6058
20
- - Map 75: 0.2868
21
- - Map Small: 0.0129
22
- - Map Medium: 0.2252
23
- - Map Large: 0.4144
24
- - Mar 1: 0.2713
25
- - Mar 10: 0.4318
26
- - Mar 100: 0.4453
27
- - Mar Small: 0.0125
28
- - Mar Medium: 0.3209
29
- - Mar Large: 0.6474
30
- - Map Coverall: 0.4501
31
- - Mar 100 Coverall: 0.6538
32
- - Map Face Shield: 0.538
33
- - Mar 100 Face Shield: 0.7118
34
- - Map Gloves: 0.2523
35
- - Mar 100 Gloves: 0.3203
36
- - Map Goggles: 0.1689
37
- - Mar 100 Goggles: 0.3345
38
- - Map Mask: 0.0965
39
- - Mar 100 Mask: 0.2059
40
 
41
  ## Model description
42
 
@@ -61,42 +61,22 @@ The following hyperparameters were used during training:
61
  - seed: 42
62
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
63
  - lr_scheduler_type: linear
64
- - num_epochs: 30
65
 
66
  ### Training results
67
 
68
  | Training Loss | Epoch | Step | Validation Loss | Map | Map 50 | Map 75 | Map Small | Map Medium | Map Large | Mar 1 | Mar 10 | Mar 100 | Mar Small | Mar Medium | Mar Large | Map Coverall | Mar 100 Coverall | Map Face Shield | Mar 100 Face Shield | Map Gloves | Mar 100 Gloves | Map Goggles | Mar 100 Goggles | Map Mask | Mar 100 Mask |
69
  |:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|:----------:|:---------:|:------:|:------:|:-------:|:---------:|:----------:|:---------:|:------------:|:----------------:|:---------------:|:-------------------:|:----------:|:--------------:|:-----------:|:---------------:|:--------:|:------------:|
70
- | No log | 1.0 | 107 | 20.4512 | 0.023 | 0.0516 | 0.0164 | 0.0159 | 0.0136 | 0.0222 | 0.0738 | 0.1508 | 0.2152 | 0.0188 | 0.0978 | 0.3017 | 0.0905 | 0.5946 | 0.0132 | 0.2304 | 0.0067 | 0.1429 | 0.0002 | 0.02 | 0.0044 | 0.088 |
71
- | No log | 2.0 | 214 | 15.8853 | 0.0755 | 0.2004 | 0.0363 | 0.0356 | 0.073 | 0.0977 | 0.1369 | 0.2646 | 0.3181 | 0.1584 | 0.2468 | 0.4432 | 0.1217 | 0.4721 | 0.0112 | 0.3405 | 0.1023 | 0.3272 | 0.0061 | 0.18 | 0.1361 | 0.2707 |
72
- | No log | 3.0 | 321 | 15.1844 | 0.1008 | 0.2625 | 0.0554 | 0.0565 | 0.1052 | 0.175 | 0.1496 | 0.2625 | 0.317 | 0.0952 | 0.2395 | 0.4823 | 0.1386 | 0.541 | 0.0298 | 0.3937 | 0.1292 | 0.2679 | 0.058 | 0.1462 | 0.1483 | 0.2364 |
73
- | No log | 4.0 | 428 | 15.1909 | 0.1095 | 0.287 | 0.0624 | 0.033 | 0.0999 | 0.2029 | 0.1469 | 0.2566 | 0.2886 | 0.0642 | 0.2004 | 0.449 | 0.1309 | 0.509 | 0.1654 | 0.4747 | 0.089 | 0.1629 | 0.0892 | 0.1631 | 0.073 | 0.1333 |
74
- | 35.7915 | 5.0 | 535 | 15.8705 | 0.1084 | 0.2837 | 0.0601 | 0.0798 | 0.0977 | 0.1804 | 0.1355 | 0.2334 | 0.2559 | 0.0812 | 0.1947 | 0.3573 | 0.1679 | 0.5266 | 0.1797 | 0.4316 | 0.0706 | 0.1304 | 0.0662 | 0.0954 | 0.0573 | 0.0956 |
75
- | 35.7915 | 6.0 | 642 | 14.9585 | 0.1051 | 0.2666 | 0.0677 | 0.0066 | 0.0707 | 0.1678 | 0.1427 | 0.2623 | 0.297 | 0.0311 | 0.2158 | 0.4682 | 0.2075 | 0.477 | 0.1254 | 0.4671 | 0.0785 | 0.2259 | 0.0548 | 0.1708 | 0.0593 | 0.144 |
76
- | 35.7915 | 7.0 | 749 | 14.4975 | 0.1285 | 0.3313 | 0.0848 | 0.0736 | 0.0933 | 0.2195 | 0.1724 | 0.3019 | 0.3354 | 0.0792 | 0.2435 | 0.5139 | 0.1934 | 0.5288 | 0.1537 | 0.4899 | 0.1562 | 0.2978 | 0.076 | 0.2015 | 0.0631 | 0.1591 |
77
- | 35.7915 | 8.0 | 856 | 16.2306 | 0.0912 | 0.2336 | 0.0604 | 0.0254 | 0.0389 | 0.1618 | 0.1332 | 0.2173 | 0.2402 | 0.0254 | 0.1494 | 0.3835 | 0.2184 | 0.5167 | 0.117 | 0.3937 | 0.0307 | 0.0871 | 0.0477 | 0.1108 | 0.0424 | 0.0929 |
78
- | 35.7915 | 9.0 | 963 | 14.3495 | 0.1336 | 0.3304 | 0.0874 | 0.0217 | 0.0814 | 0.249 | 0.1824 | 0.3111 | 0.3385 | 0.0567 | 0.2286 | 0.5461 | 0.2756 | 0.5847 | 0.1342 | 0.5089 | 0.0473 | 0.1665 | 0.1129 | 0.2446 | 0.0982 | 0.188 |
79
- | 28.4929 | 10.0 | 1070 | 15.2865 | 0.1176 | 0.2704 | 0.0906 | 0.0172 | 0.0694 | 0.2019 | 0.1628 | 0.2685 | 0.2953 | 0.0704 | 0.2001 | 0.4525 | 0.2353 | 0.5311 | 0.171 | 0.5165 | 0.0212 | 0.0795 | 0.0882 | 0.1923 | 0.0723 | 0.1573 |
80
- | 28.4929 | 11.0 | 1177 | 14.7075 | 0.1318 | 0.3045 | 0.1019 | 0.0269 | 0.0831 | 0.2289 | 0.1764 | 0.2815 | 0.3073 | 0.0619 | 0.1917 | 0.5061 | 0.2883 | 0.5766 | 0.1773 | 0.4861 | 0.0484 | 0.125 | 0.0653 | 0.2046 | 0.0795 | 0.144 |
81
- | 28.4929 | 12.0 | 1284 | 15.1734 | 0.12 | 0.2793 | 0.0902 | 0.0656 | 0.084 | 0.2082 | 0.1652 | 0.2679 | 0.2941 | 0.0692 | 0.1888 | 0.4528 | 0.2795 | 0.6005 | 0.1641 | 0.4924 | 0.0358 | 0.1004 | 0.062 | 0.1185 | 0.0586 | 0.1587 |
82
- | 28.4929 | 13.0 | 1391 | 15.8832 | 0.1302 | 0.2688 | 0.1197 | 0.0001 | 0.0961 | 0.191 | 0.1629 | 0.2505 | 0.2715 | 0.0012 | 0.1825 | 0.4264 | 0.3168 | 0.5752 | 0.2343 | 0.5177 | 0.0208 | 0.0732 | 0.0562 | 0.1092 | 0.0227 | 0.0822 |
83
- | 28.4929 | 14.0 | 1498 | 15.8669 | 0.1164 | 0.2412 | 0.0971 | 0.0002 | 0.0624 | 0.1909 | 0.1561 | 0.2399 | 0.2614 | 0.0012 | 0.177 | 0.4288 | 0.2794 | 0.5342 | 0.1948 | 0.5038 | 0.0215 | 0.071 | 0.0649 | 0.1292 | 0.0214 | 0.0689 |
84
- | 28.259 | 15.0 | 1605 | 16.1133 | 0.1075 | 0.2372 | 0.0877 | 0.0007 | 0.072 | 0.1903 | 0.1446 | 0.2326 | 0.2526 | 0.0017 | 0.1849 | 0.4213 | 0.261 | 0.4721 | 0.1704 | 0.5253 | 0.0191 | 0.0719 | 0.0611 | 0.1262 | 0.0257 | 0.0676 |
85
- | 28.259 | 16.0 | 1712 | 16.1798 | 0.106 | 0.2407 | 0.0809 | 0.0016 | 0.0549 | 0.2039 | 0.1454 | 0.2243 | 0.2445 | 0.0046 | 0.1853 | 0.4097 | 0.2231 | 0.4414 | 0.1687 | 0.4759 | 0.0399 | 0.0821 | 0.079 | 0.1738 | 0.0192 | 0.0493 |
86
- | 28.259 | 17.0 | 1819 | 16.0059 | 0.1075 | 0.2378 | 0.0793 | 0.0056 | 0.0596 | 0.1896 | 0.1585 | 0.2588 | 0.2855 | 0.0283 | 0.1918 | 0.4598 | 0.2349 | 0.5838 | 0.1818 | 0.5127 | 0.0278 | 0.0719 | 0.059 | 0.1969 | 0.0342 | 0.0622 |
87
- | 28.259 | 18.0 | 1926 | 14.8009 | 0.1411 | 0.3297 | 0.1189 | 0.0084 | 0.0892 | 0.2252 | 0.1852 | 0.2943 | 0.3245 | 0.0512 | 0.2204 | 0.52 | 0.3161 | 0.5401 | 0.1967 | 0.5316 | 0.0501 | 0.1179 | 0.0952 | 0.2785 | 0.0476 | 0.1547 |
88
- | 29.7207 | 19.0 | 2033 | 15.1573 | 0.1324 | 0.2861 | 0.1113 | 0.0018 | 0.0722 | 0.2445 | 0.1732 | 0.2672 | 0.2922 | 0.0046 | 0.2154 | 0.4968 | 0.3156 | 0.5189 | 0.1918 | 0.538 | 0.0464 | 0.1103 | 0.0722 | 0.1815 | 0.0363 | 0.1124 |
89
- | 29.7207 | 20.0 | 2140 | 14.2853 | 0.167 | 0.3784 | 0.1199 | 0.0119 | 0.115 | 0.2635 | 0.1809 | 0.2935 | 0.3198 | 0.0481 | 0.2312 | 0.5035 | 0.3428 | 0.5171 | 0.2675 | 0.5481 | 0.0952 | 0.2188 | 0.0721 | 0.1831 | 0.0574 | 0.132 |
90
- | 29.7207 | 21.0 | 2247 | 14.2290 | 0.1704 | 0.4079 | 0.1158 | 0.0292 | 0.1107 | 0.273 | 0.1759 | 0.3036 | 0.3375 | 0.0826 | 0.2659 | 0.5283 | 0.3023 | 0.4468 | 0.2462 | 0.5443 | 0.1488 | 0.2996 | 0.0885 | 0.2277 | 0.0664 | 0.1693 |
91
- | 29.7207 | 22.0 | 2354 | 14.4412 | 0.1609 | 0.3508 | 0.1199 | 0.0069 | 0.105 | 0.2566 | 0.1897 | 0.3089 | 0.3337 | 0.0763 | 0.2411 | 0.529 | 0.3429 | 0.5405 | 0.2429 | 0.538 | 0.0984 | 0.2121 | 0.0832 | 0.2492 | 0.0373 | 0.1289 |
92
- | 29.7207 | 23.0 | 2461 | 14.2410 | 0.163 | 0.3848 | 0.1197 | 0.0399 | 0.1132 | 0.2656 | 0.198 | 0.3201 | 0.352 | 0.1302 | 0.2632 | 0.5417 | 0.3221 | 0.5477 | 0.2456 | 0.5709 | 0.1039 | 0.2335 | 0.0773 | 0.2354 | 0.0662 | 0.1724 |
93
- | 26.4188 | 24.0 | 2568 | 14.1542 | 0.1744 | 0.4102 | 0.1127 | 0.0333 | 0.1274 | 0.2733 | 0.2099 | 0.3365 | 0.3703 | 0.1357 | 0.3008 | 0.5313 | 0.2857 | 0.5027 | 0.2457 | 0.5013 | 0.1495 | 0.3125 | 0.1131 | 0.3308 | 0.0778 | 0.2044 |
94
- | 26.4188 | 25.0 | 2675 | 13.7413 | 0.1976 | 0.4536 | 0.1429 | 0.0431 | 0.1301 | 0.3285 | 0.2192 | 0.3511 | 0.3836 | 0.1681 | 0.2982 | 0.5611 | 0.3277 | 0.5221 | 0.2586 | 0.5392 | 0.1431 | 0.2942 | 0.1074 | 0.3108 | 0.151 | 0.2516 |
95
- | 26.4188 | 26.0 | 2782 | 13.4434 | 0.227 | 0.5003 | 0.1767 | 0.0231 | 0.1527 | 0.3463 | 0.236 | 0.3677 | 0.3979 | 0.1054 | 0.3107 | 0.5781 | 0.4066 | 0.5896 | 0.3141 | 0.5709 | 0.1581 | 0.2893 | 0.1419 | 0.3092 | 0.1143 | 0.2302 |
96
- | 26.4188 | 27.0 | 2889 | 14.0435 | 0.1966 | 0.4275 | 0.153 | 0.0209 | 0.1474 | 0.3007 | 0.2187 | 0.3461 | 0.3728 | 0.0534 | 0.2805 | 0.5637 | 0.3925 | 0.5748 | 0.2415 | 0.5582 | 0.1376 | 0.2513 | 0.1225 | 0.2908 | 0.0889 | 0.1889 |
97
- | 26.4188 | 28.0 | 2996 | 14.0261 | 0.2069 | 0.4365 | 0.1794 | 0.0157 | 0.1315 | 0.3286 | 0.2091 | 0.3306 | 0.3624 | 0.0873 | 0.2732 | 0.5457 | 0.4281 | 0.5964 | 0.2773 | 0.562 | 0.124 | 0.2339 | 0.1217 | 0.26 | 0.0836 | 0.1596 |
98
- | 25.6327 | 29.0 | 3103 | 14.2025 | 0.1939 | 0.4264 | 0.1679 | 0.0111 | 0.1224 | 0.3067 | 0.1986 | 0.31 | 0.335 | 0.0571 | 0.2238 | 0.534 | 0.4047 | 0.577 | 0.2969 | 0.5595 | 0.1016 | 0.1866 | 0.1048 | 0.2108 | 0.0617 | 0.1409 |
99
- | 25.6327 | 30.0 | 3210 | 14.1767 | 0.1903 | 0.4305 | 0.1466 | 0.0065 | 0.1174 | 0.319 | 0.2022 | 0.3125 | 0.3319 | 0.05 | 0.2396 | 0.5334 | 0.3754 | 0.5459 | 0.3062 | 0.5354 | 0.0796 | 0.1652 | 0.124 | 0.2692 | 0.0664 | 0.1436 |
100
 
101
 
102
  ### Framework versions
 
1
  ---
2
+ base_model: PekingU/rtdetr_r50vd_coco_o365
3
  tags:
4
  - generated_from_trainer
5
  model-index:
 
12
 
13
  # rtdetr-r50-cppe5-finetune
14
 
15
+ This model is a fine-tuned version of [PekingU/rtdetr_r50vd_coco_o365](https://huggingface.co/PekingU/rtdetr_r50vd_coco_o365) on an unknown dataset.
16
  It achieves the following results on the evaluation set:
17
+ - Loss: 9.9243
18
+ - Map: 0.4532
19
+ - Map 50: 0.66
20
+ - Map 75: 0.5228
21
+ - Map Small: 0.431
22
+ - Map Medium: 0.3515
23
+ - Map Large: 0.5415
24
+ - Mar 1: 0.3644
25
+ - Mar 10: 0.6286
26
+ - Mar 100: 0.6927
27
+ - Mar Small: 0.5962
28
+ - Mar Medium: 0.5879
29
+ - Mar Large: 0.81
30
+ - Map Coverall: 0.4755
31
+ - Mar 100 Coverall: 0.7974
32
+ - Map Face Shield: 0.4919
33
+ - Mar 100 Face Shield: 0.7176
34
+ - Map Gloves: 0.3847
35
+ - Mar 100 Gloves: 0.6593
36
+ - Map Goggles: 0.3127
37
+ - Mar 100 Goggles: 0.5793
38
+ - Map Mask: 0.6013
39
+ - Mar 100 Mask: 0.7098
40
 
41
  ## Model description
42
 
 
61
  - seed: 42
62
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
63
  - lr_scheduler_type: linear
64
+ - num_epochs: 10
65
 
66
  ### Training results
67
 
68
  | Training Loss | Epoch | Step | Validation Loss | Map | Map 50 | Map 75 | Map Small | Map Medium | Map Large | Mar 1 | Mar 10 | Mar 100 | Mar Small | Mar Medium | Mar Large | Map Coverall | Mar 100 Coverall | Map Face Shield | Mar 100 Face Shield | Map Gloves | Mar 100 Gloves | Map Goggles | Mar 100 Goggles | Map Mask | Mar 100 Mask |
69
  |:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|:----------:|:---------:|:------:|:------:|:-------:|:---------:|:----------:|:---------:|:------------:|:----------------:|:---------------:|:-------------------:|:----------:|:--------------:|:-----------:|:---------------:|:--------:|:------------:|
70
+ | No log | 1.0 | 107 | 138.4975 | 0.0441 | 0.0808 | 0.0353 | 0.0 | 0.0247 | 0.056 | 0.0547 | 0.1243 | 0.1461 | 0.0 | 0.082 | 0.2388 | 0.2204 | 0.5937 | 0.0001 | 0.0759 | 0.0 | 0.0201 | 0.0 | 0.02 | 0.0 | 0.0209 |
71
+ | No log | 2.0 | 214 | 23.3748 | 0.0916 | 0.1786 | 0.0747 | 0.0461 | 0.0467 | 0.0912 | 0.1138 | 0.269 | 0.3528 | 0.2138 | 0.2623 | 0.4998 | 0.3271 | 0.6284 | 0.0041 | 0.3076 | 0.0078 | 0.2701 | 0.0034 | 0.2246 | 0.1156 | 0.3333 |
72
+ | No log | 3.0 | 321 | 13.3702 | 0.2057 | 0.3793 | 0.196 | 0.1007 | 0.1548 | 0.3415 | 0.2296 | 0.4115 | 0.4959 | 0.2755 | 0.4268 | 0.7117 | 0.4253 | 0.6986 | 0.0393 | 0.5051 | 0.143 | 0.4183 | 0.1092 | 0.3938 | 0.3119 | 0.4636 |
73
+ | No log | 4.0 | 428 | 12.8750 | 0.2236 | 0.4139 | 0.218 | 0.12 | 0.1699 | 0.4095 | 0.225 | 0.4324 | 0.5089 | 0.296 | 0.4525 | 0.7051 | 0.3626 | 0.6342 | 0.0964 | 0.5253 | 0.117 | 0.3996 | 0.2042 | 0.4631 | 0.3377 | 0.5222 |
74
+ | 90.5185 | 5.0 | 535 | 11.9853 | 0.2701 | 0.4731 | 0.2752 | 0.192 | 0.1984 | 0.475 | 0.2573 | 0.4629 | 0.5406 | 0.357 | 0.4739 | 0.7304 | 0.4639 | 0.6973 | 0.1397 | 0.5443 | 0.2001 | 0.5134 | 0.2089 | 0.4354 | 0.3381 | 0.5124 |
75
+ | 90.5185 | 6.0 | 642 | 12.6566 | 0.2422 | 0.4501 | 0.2296 | 0.2014 | 0.1863 | 0.425 | 0.2339 | 0.4469 | 0.5379 | 0.3612 | 0.4893 | 0.7289 | 0.3361 | 0.5752 | 0.1231 | 0.5329 | 0.1813 | 0.5272 | 0.2314 | 0.5108 | 0.3393 | 0.5436 |
76
+ | 90.5185 | 7.0 | 749 | 12.7385 | 0.2411 | 0.432 | 0.2334 | 0.1769 | 0.1784 | 0.442 | 0.2291 | 0.4407 | 0.5321 | 0.3208 | 0.4863 | 0.7248 | 0.3662 | 0.6527 | 0.115 | 0.5114 | 0.1671 | 0.4969 | 0.2244 | 0.4677 | 0.3328 | 0.532 |
77
+ | 90.5185 | 8.0 | 856 | 12.8410 | 0.2614 | 0.4702 | 0.2516 | 0.1796 | 0.1916 | 0.4767 | 0.2389 | 0.451 | 0.5373 | 0.3511 | 0.4776 | 0.7404 | 0.3826 | 0.6739 | 0.1451 | 0.5456 | 0.2148 | 0.5022 | 0.2567 | 0.4646 | 0.3078 | 0.5 |
78
+ | 90.5185 | 9.0 | 963 | 13.1283 | 0.1857 | 0.3361 | 0.1772 | 0.1922 | 0.1448 | 0.3403 | 0.2197 | 0.4346 | 0.5488 | 0.368 | 0.5015 | 0.7352 | 0.2542 | 0.6599 | 0.0948 | 0.5392 | 0.0841 | 0.5022 | 0.211 | 0.5062 | 0.2846 | 0.5364 |
79
+ | 13.6999 | 10.0 | 1070 | 12.8353 | 0.2457 | 0.4365 | 0.2273 | 0.1837 | 0.1881 | 0.4385 | 0.2388 | 0.4518 | 0.5494 | 0.3529 | 0.4936 | 0.7493 | 0.3722 | 0.6748 | 0.1472 | 0.5671 | 0.1703 | 0.496 | 0.2429 | 0.4831 | 0.296 | 0.5262 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
 
81
 
82
  ### Framework versions
config.json CHANGED
@@ -1,26 +1,28 @@
1
  {
2
- "_name_or_path": "sbchoi/rtdetr_r50vd_coco_o365",
3
  "activation_dropout": 0.0,
4
  "activation_function": "silu",
5
- "anchor_image_size": [
6
- 640,
7
- 640
8
- ],
9
  "architectures": [
10
  "RTDetrForObjectDetection"
11
  ],
12
  "attention_dropout": 0.0,
13
  "auxiliary_loss": true,
14
- "backbone": "resnet50d",
15
- "backbone_config": null,
16
- "backbone_kwargs": {
17
- "features_only": true,
 
 
 
 
18
  "out_indices": [
19
  2,
20
  3,
21
  4
22
  ]
23
  },
 
24
  "batch_norm_eps": 1e-05,
25
  "box_noise_scale": 1.0,
26
  "d_model": 256,
@@ -92,8 +94,8 @@
92
  "torch_dtype": "float32",
93
  "transformers_version": "4.42.0.dev0",
94
  "use_focal_loss": true,
95
- "use_pretrained_backbone": true,
96
- "use_timm_backbone": true,
97
  "weight_loss_bbox": 5.0,
98
  "weight_loss_giou": 2.0,
99
  "weight_loss_vfl": 1.0,
 
1
  {
2
+ "_name_or_path": "PekingU/rtdetr_r50vd_coco_o365",
3
  "activation_dropout": 0.0,
4
  "activation_function": "silu",
5
+ "anchor_image_size": null,
 
 
 
6
  "architectures": [
7
  "RTDetrForObjectDetection"
8
  ],
9
  "attention_dropout": 0.0,
10
  "auxiliary_loss": true,
11
+ "backbone": null,
12
+ "backbone_config": {
13
+ "model_type": "rt_detr_resnet",
14
+ "out_features": [
15
+ "stage2",
16
+ "stage3",
17
+ "stage4"
18
+ ],
19
  "out_indices": [
20
  2,
21
  3,
22
  4
23
  ]
24
  },
25
+ "backbone_kwargs": null,
26
  "batch_norm_eps": 1e-05,
27
  "box_noise_scale": 1.0,
28
  "d_model": 256,
 
94
  "torch_dtype": "float32",
95
  "transformers_version": "4.42.0.dev0",
96
  "use_focal_loss": true,
97
+ "use_pretrained_backbone": false,
98
+ "use_timm_backbone": false,
99
  "weight_loss_bbox": 5.0,
100
  "weight_loss_giou": 2.0,
101
  "weight_loss_vfl": 1.0,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d2c126c4f11e8f334749ec441b462057d344e170768c2a316bfd15b2c5a765a9
3
- size 171552884
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67d7208b8a71d2f543e8e3a7487b2f624699517e4908446002ec5c67180b7e11
3
+ size 171559340
preprocessor_config.json CHANGED
@@ -14,6 +14,7 @@
14
  "image_mean",
15
  "image_std",
16
  "do_pad",
 
17
  "format",
18
  "return_tensors",
19
  "data_format",
@@ -36,10 +37,11 @@
36
  0.224,
37
  0.225
38
  ],
 
39
  "resample": 2,
40
  "rescale_factor": 0.00392156862745098,
41
  "size": {
42
- "height": 640,
43
- "width": 640
44
  }
45
  }
 
14
  "image_mean",
15
  "image_std",
16
  "do_pad",
17
+ "pad_size",
18
  "format",
19
  "return_tensors",
20
  "data_format",
 
37
  0.224,
38
  0.225
39
  ],
40
+ "pad_size": null,
41
  "resample": 2,
42
  "rescale_factor": 0.00392156862745098,
43
  "size": {
44
+ "height": 480,
45
+ "width": 480
46
  }
47
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e1862dddab38e8cd8584272d2e2fa768d2724237f5870b06c172e1e90656a1aa
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6aed1e9e0a8f73b486fa4b5a13be28e92b91f11e88f8189ebda9afb8ef4148ad
3
  size 5112