TolgaBkm commited on
Commit
a0a7c9e
·
verified ·
1 Parent(s): ffac3c3

Upload 3 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ yolov4-plane_best.weights filter=lfs diff=lfs merge=lfs -text
faster_rcnn_inception_v2_coco_2018_01_28.7z ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecb1ffeba02f300dac6a5e0f06b75c7e8ae2a96902d0137a028760a75dc4aa63
3
+ size 86715535
yolov4-plane.cfg ADDED
@@ -0,0 +1,1160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [net]
2
+ # Testing
3
+ #batch=1
4
+ #subdivisions=1
5
+ # Training
6
+ batch=64
7
+ subdivisions=32
8
+ width=416
9
+ height=416
10
+ channels=3
11
+ momentum=0.949
12
+ decay=0.0005
13
+ angle=0
14
+ saturation = 1.5
15
+ exposure = 1.5
16
+ hue=.1
17
+
18
+ learning_rate=0.001
19
+ burn_in=1000
20
+ max_batches = 6000
21
+ policy=steps
22
+ steps=4800,5400
23
+ scales=.1,.1
24
+
25
+ #cutmix=1
26
+ mosaic=1
27
+
28
+ #:104x104 54:52x52 85:26x26 104:13x13 for 416
29
+
30
+ [convolutional]
31
+ batch_normalize=1
32
+ filters=32
33
+ size=3
34
+ stride=1
35
+ pad=1
36
+ activation=mish
37
+
38
+ # Downsample
39
+
40
+ [convolutional]
41
+ batch_normalize=1
42
+ filters=64
43
+ size=3
44
+ stride=2
45
+ pad=1
46
+ activation=mish
47
+
48
+ [convolutional]
49
+ batch_normalize=1
50
+ filters=64
51
+ size=1
52
+ stride=1
53
+ pad=1
54
+ activation=mish
55
+
56
+ [route]
57
+ layers = -2
58
+
59
+ [convolutional]
60
+ batch_normalize=1
61
+ filters=64
62
+ size=1
63
+ stride=1
64
+ pad=1
65
+ activation=mish
66
+
67
+ [convolutional]
68
+ batch_normalize=1
69
+ filters=32
70
+ size=1
71
+ stride=1
72
+ pad=1
73
+ activation=mish
74
+
75
+ [convolutional]
76
+ batch_normalize=1
77
+ filters=64
78
+ size=3
79
+ stride=1
80
+ pad=1
81
+ activation=mish
82
+
83
+ [shortcut]
84
+ from=-3
85
+ activation=linear
86
+
87
+ [convolutional]
88
+ batch_normalize=1
89
+ filters=64
90
+ size=1
91
+ stride=1
92
+ pad=1
93
+ activation=mish
94
+
95
+ [route]
96
+ layers = -1,-7
97
+
98
+ [convolutional]
99
+ batch_normalize=1
100
+ filters=64
101
+ size=1
102
+ stride=1
103
+ pad=1
104
+ activation=mish
105
+
106
+ # Downsample
107
+
108
+ [convolutional]
109
+ batch_normalize=1
110
+ filters=128
111
+ size=3
112
+ stride=2
113
+ pad=1
114
+ activation=mish
115
+
116
+ [convolutional]
117
+ batch_normalize=1
118
+ filters=64
119
+ size=1
120
+ stride=1
121
+ pad=1
122
+ activation=mish
123
+
124
+ [route]
125
+ layers = -2
126
+
127
+ [convolutional]
128
+ batch_normalize=1
129
+ filters=64
130
+ size=1
131
+ stride=1
132
+ pad=1
133
+ activation=mish
134
+
135
+ [convolutional]
136
+ batch_normalize=1
137
+ filters=64
138
+ size=1
139
+ stride=1
140
+ pad=1
141
+ activation=mish
142
+
143
+ [convolutional]
144
+ batch_normalize=1
145
+ filters=64
146
+ size=3
147
+ stride=1
148
+ pad=1
149
+ activation=mish
150
+
151
+ [shortcut]
152
+ from=-3
153
+ activation=linear
154
+
155
+ [convolutional]
156
+ batch_normalize=1
157
+ filters=64
158
+ size=1
159
+ stride=1
160
+ pad=1
161
+ activation=mish
162
+
163
+ [convolutional]
164
+ batch_normalize=1
165
+ filters=64
166
+ size=3
167
+ stride=1
168
+ pad=1
169
+ activation=mish
170
+
171
+ [shortcut]
172
+ from=-3
173
+ activation=linear
174
+
175
+ [convolutional]
176
+ batch_normalize=1
177
+ filters=64
178
+ size=1
179
+ stride=1
180
+ pad=1
181
+ activation=mish
182
+
183
+ [route]
184
+ layers = -1,-10
185
+
186
+ [convolutional]
187
+ batch_normalize=1
188
+ filters=128
189
+ size=1
190
+ stride=1
191
+ pad=1
192
+ activation=mish
193
+
194
+ # Downsample
195
+
196
+ [convolutional]
197
+ batch_normalize=1
198
+ filters=256
199
+ size=3
200
+ stride=2
201
+ pad=1
202
+ activation=mish
203
+
204
+ [convolutional]
205
+ batch_normalize=1
206
+ filters=128
207
+ size=1
208
+ stride=1
209
+ pad=1
210
+ activation=mish
211
+
212
+ [route]
213
+ layers = -2
214
+
215
+ [convolutional]
216
+ batch_normalize=1
217
+ filters=128
218
+ size=1
219
+ stride=1
220
+ pad=1
221
+ activation=mish
222
+
223
+ [convolutional]
224
+ batch_normalize=1
225
+ filters=128
226
+ size=1
227
+ stride=1
228
+ pad=1
229
+ activation=mish
230
+
231
+ [convolutional]
232
+ batch_normalize=1
233
+ filters=128
234
+ size=3
235
+ stride=1
236
+ pad=1
237
+ activation=mish
238
+
239
+ [shortcut]
240
+ from=-3
241
+ activation=linear
242
+
243
+ [convolutional]
244
+ batch_normalize=1
245
+ filters=128
246
+ size=1
247
+ stride=1
248
+ pad=1
249
+ activation=mish
250
+
251
+ [convolutional]
252
+ batch_normalize=1
253
+ filters=128
254
+ size=3
255
+ stride=1
256
+ pad=1
257
+ activation=mish
258
+
259
+ [shortcut]
260
+ from=-3
261
+ activation=linear
262
+
263
+ [convolutional]
264
+ batch_normalize=1
265
+ filters=128
266
+ size=1
267
+ stride=1
268
+ pad=1
269
+ activation=mish
270
+
271
+ [convolutional]
272
+ batch_normalize=1
273
+ filters=128
274
+ size=3
275
+ stride=1
276
+ pad=1
277
+ activation=mish
278
+
279
+ [shortcut]
280
+ from=-3
281
+ activation=linear
282
+
283
+ [convolutional]
284
+ batch_normalize=1
285
+ filters=128
286
+ size=1
287
+ stride=1
288
+ pad=1
289
+ activation=mish
290
+
291
+ [convolutional]
292
+ batch_normalize=1
293
+ filters=128
294
+ size=3
295
+ stride=1
296
+ pad=1
297
+ activation=mish
298
+
299
+ [shortcut]
300
+ from=-3
301
+ activation=linear
302
+
303
+
304
+ [convolutional]
305
+ batch_normalize=1
306
+ filters=128
307
+ size=1
308
+ stride=1
309
+ pad=1
310
+ activation=mish
311
+
312
+ [convolutional]
313
+ batch_normalize=1
314
+ filters=128
315
+ size=3
316
+ stride=1
317
+ pad=1
318
+ activation=mish
319
+
320
+ [shortcut]
321
+ from=-3
322
+ activation=linear
323
+
324
+ [convolutional]
325
+ batch_normalize=1
326
+ filters=128
327
+ size=1
328
+ stride=1
329
+ pad=1
330
+ activation=mish
331
+
332
+ [convolutional]
333
+ batch_normalize=1
334
+ filters=128
335
+ size=3
336
+ stride=1
337
+ pad=1
338
+ activation=mish
339
+
340
+ [shortcut]
341
+ from=-3
342
+ activation=linear
343
+
344
+ [convolutional]
345
+ batch_normalize=1
346
+ filters=128
347
+ size=1
348
+ stride=1
349
+ pad=1
350
+ activation=mish
351
+
352
+ [convolutional]
353
+ batch_normalize=1
354
+ filters=128
355
+ size=3
356
+ stride=1
357
+ pad=1
358
+ activation=mish
359
+
360
+ [shortcut]
361
+ from=-3
362
+ activation=linear
363
+
364
+ [convolutional]
365
+ batch_normalize=1
366
+ filters=128
367
+ size=1
368
+ stride=1
369
+ pad=1
370
+ activation=mish
371
+
372
+ [convolutional]
373
+ batch_normalize=1
374
+ filters=128
375
+ size=3
376
+ stride=1
377
+ pad=1
378
+ activation=mish
379
+
380
+ [shortcut]
381
+ from=-3
382
+ activation=linear
383
+
384
+ [convolutional]
385
+ batch_normalize=1
386
+ filters=128
387
+ size=1
388
+ stride=1
389
+ pad=1
390
+ activation=mish
391
+
392
+ [route]
393
+ layers = -1,-28
394
+
395
+ [convolutional]
396
+ batch_normalize=1
397
+ filters=256
398
+ size=1
399
+ stride=1
400
+ pad=1
401
+ activation=mish
402
+
403
+ # Downsample
404
+
405
+ [convolutional]
406
+ batch_normalize=1
407
+ filters=512
408
+ size=3
409
+ stride=2
410
+ pad=1
411
+ activation=mish
412
+
413
+ [convolutional]
414
+ batch_normalize=1
415
+ filters=256
416
+ size=1
417
+ stride=1
418
+ pad=1
419
+ activation=mish
420
+
421
+ [route]
422
+ layers = -2
423
+
424
+ [convolutional]
425
+ batch_normalize=1
426
+ filters=256
427
+ size=1
428
+ stride=1
429
+ pad=1
430
+ activation=mish
431
+
432
+ [convolutional]
433
+ batch_normalize=1
434
+ filters=256
435
+ size=1
436
+ stride=1
437
+ pad=1
438
+ activation=mish
439
+
440
+ [convolutional]
441
+ batch_normalize=1
442
+ filters=256
443
+ size=3
444
+ stride=1
445
+ pad=1
446
+ activation=mish
447
+
448
+ [shortcut]
449
+ from=-3
450
+ activation=linear
451
+
452
+
453
+ [convolutional]
454
+ batch_normalize=1
455
+ filters=256
456
+ size=1
457
+ stride=1
458
+ pad=1
459
+ activation=mish
460
+
461
+ [convolutional]
462
+ batch_normalize=1
463
+ filters=256
464
+ size=3
465
+ stride=1
466
+ pad=1
467
+ activation=mish
468
+
469
+ [shortcut]
470
+ from=-3
471
+ activation=linear
472
+
473
+
474
+ [convolutional]
475
+ batch_normalize=1
476
+ filters=256
477
+ size=1
478
+ stride=1
479
+ pad=1
480
+ activation=mish
481
+
482
+ [convolutional]
483
+ batch_normalize=1
484
+ filters=256
485
+ size=3
486
+ stride=1
487
+ pad=1
488
+ activation=mish
489
+
490
+ [shortcut]
491
+ from=-3
492
+ activation=linear
493
+
494
+
495
+ [convolutional]
496
+ batch_normalize=1
497
+ filters=256
498
+ size=1
499
+ stride=1
500
+ pad=1
501
+ activation=mish
502
+
503
+ [convolutional]
504
+ batch_normalize=1
505
+ filters=256
506
+ size=3
507
+ stride=1
508
+ pad=1
509
+ activation=mish
510
+
511
+ [shortcut]
512
+ from=-3
513
+ activation=linear
514
+
515
+
516
+ [convolutional]
517
+ batch_normalize=1
518
+ filters=256
519
+ size=1
520
+ stride=1
521
+ pad=1
522
+ activation=mish
523
+
524
+ [convolutional]
525
+ batch_normalize=1
526
+ filters=256
527
+ size=3
528
+ stride=1
529
+ pad=1
530
+ activation=mish
531
+
532
+ [shortcut]
533
+ from=-3
534
+ activation=linear
535
+
536
+
537
+ [convolutional]
538
+ batch_normalize=1
539
+ filters=256
540
+ size=1
541
+ stride=1
542
+ pad=1
543
+ activation=mish
544
+
545
+ [convolutional]
546
+ batch_normalize=1
547
+ filters=256
548
+ size=3
549
+ stride=1
550
+ pad=1
551
+ activation=mish
552
+
553
+ [shortcut]
554
+ from=-3
555
+ activation=linear
556
+
557
+
558
+ [convolutional]
559
+ batch_normalize=1
560
+ filters=256
561
+ size=1
562
+ stride=1
563
+ pad=1
564
+ activation=mish
565
+
566
+ [convolutional]
567
+ batch_normalize=1
568
+ filters=256
569
+ size=3
570
+ stride=1
571
+ pad=1
572
+ activation=mish
573
+
574
+ [shortcut]
575
+ from=-3
576
+ activation=linear
577
+
578
+ [convolutional]
579
+ batch_normalize=1
580
+ filters=256
581
+ size=1
582
+ stride=1
583
+ pad=1
584
+ activation=mish
585
+
586
+ [convolutional]
587
+ batch_normalize=1
588
+ filters=256
589
+ size=3
590
+ stride=1
591
+ pad=1
592
+ activation=mish
593
+
594
+ [shortcut]
595
+ from=-3
596
+ activation=linear
597
+
598
+ [convolutional]
599
+ batch_normalize=1
600
+ filters=256
601
+ size=1
602
+ stride=1
603
+ pad=1
604
+ activation=mish
605
+
606
+ [route]
607
+ layers = -1,-28
608
+
609
+ [convolutional]
610
+ batch_normalize=1
611
+ filters=512
612
+ size=1
613
+ stride=1
614
+ pad=1
615
+ activation=mish
616
+
617
+ # Downsample
618
+
619
+ [convolutional]
620
+ batch_normalize=1
621
+ filters=1024
622
+ size=3
623
+ stride=2
624
+ pad=1
625
+ activation=mish
626
+
627
+ [convolutional]
628
+ batch_normalize=1
629
+ filters=512
630
+ size=1
631
+ stride=1
632
+ pad=1
633
+ activation=mish
634
+
635
+ [route]
636
+ layers = -2
637
+
638
+ [convolutional]
639
+ batch_normalize=1
640
+ filters=512
641
+ size=1
642
+ stride=1
643
+ pad=1
644
+ activation=mish
645
+
646
+ [convolutional]
647
+ batch_normalize=1
648
+ filters=512
649
+ size=1
650
+ stride=1
651
+ pad=1
652
+ activation=mish
653
+
654
+ [convolutional]
655
+ batch_normalize=1
656
+ filters=512
657
+ size=3
658
+ stride=1
659
+ pad=1
660
+ activation=mish
661
+
662
+ [shortcut]
663
+ from=-3
664
+ activation=linear
665
+
666
+ [convolutional]
667
+ batch_normalize=1
668
+ filters=512
669
+ size=1
670
+ stride=1
671
+ pad=1
672
+ activation=mish
673
+
674
+ [convolutional]
675
+ batch_normalize=1
676
+ filters=512
677
+ size=3
678
+ stride=1
679
+ pad=1
680
+ activation=mish
681
+
682
+ [shortcut]
683
+ from=-3
684
+ activation=linear
685
+
686
+ [convolutional]
687
+ batch_normalize=1
688
+ filters=512
689
+ size=1
690
+ stride=1
691
+ pad=1
692
+ activation=mish
693
+
694
+ [convolutional]
695
+ batch_normalize=1
696
+ filters=512
697
+ size=3
698
+ stride=1
699
+ pad=1
700
+ activation=mish
701
+
702
+ [shortcut]
703
+ from=-3
704
+ activation=linear
705
+
706
+ [convolutional]
707
+ batch_normalize=1
708
+ filters=512
709
+ size=1
710
+ stride=1
711
+ pad=1
712
+ activation=mish
713
+
714
+ [convolutional]
715
+ batch_normalize=1
716
+ filters=512
717
+ size=3
718
+ stride=1
719
+ pad=1
720
+ activation=mish
721
+
722
+ [shortcut]
723
+ from=-3
724
+ activation=linear
725
+
726
+ [convolutional]
727
+ batch_normalize=1
728
+ filters=512
729
+ size=1
730
+ stride=1
731
+ pad=1
732
+ activation=mish
733
+
734
+ [route]
735
+ layers = -1,-16
736
+
737
+ [convolutional]
738
+ batch_normalize=1
739
+ filters=1024
740
+ size=1
741
+ stride=1
742
+ pad=1
743
+ activation=mish
744
+ stopbackward=800
745
+
746
+ ##########################
747
+
748
+ [convolutional]
749
+ batch_normalize=1
750
+ filters=512
751
+ size=1
752
+ stride=1
753
+ pad=1
754
+ activation=leaky
755
+
756
+ [convolutional]
757
+ batch_normalize=1
758
+ size=3
759
+ stride=1
760
+ pad=1
761
+ filters=1024
762
+ activation=leaky
763
+
764
+ [convolutional]
765
+ batch_normalize=1
766
+ filters=512
767
+ size=1
768
+ stride=1
769
+ pad=1
770
+ activation=leaky
771
+
772
+ ### SPP ###
773
+ [maxpool]
774
+ stride=1
775
+ size=5
776
+
777
+ [route]
778
+ layers=-2
779
+
780
+ [maxpool]
781
+ stride=1
782
+ size=9
783
+
784
+ [route]
785
+ layers=-4
786
+
787
+ [maxpool]
788
+ stride=1
789
+ size=13
790
+
791
+ [route]
792
+ layers=-1,-3,-5,-6
793
+ ### End SPP ###
794
+
795
+ [convolutional]
796
+ batch_normalize=1
797
+ filters=512
798
+ size=1
799
+ stride=1
800
+ pad=1
801
+ activation=leaky
802
+
803
+ [convolutional]
804
+ batch_normalize=1
805
+ size=3
806
+ stride=1
807
+ pad=1
808
+ filters=1024
809
+ activation=leaky
810
+
811
+ [convolutional]
812
+ batch_normalize=1
813
+ filters=512
814
+ size=1
815
+ stride=1
816
+ pad=1
817
+ activation=leaky
818
+
819
+ [convolutional]
820
+ batch_normalize=1
821
+ filters=256
822
+ size=1
823
+ stride=1
824
+ pad=1
825
+ activation=leaky
826
+
827
+ [upsample]
828
+ stride=2
829
+
830
+ [route]
831
+ layers = 85
832
+
833
+ [convolutional]
834
+ batch_normalize=1
835
+ filters=256
836
+ size=1
837
+ stride=1
838
+ pad=1
839
+ activation=leaky
840
+
841
+ [route]
842
+ layers = -1, -3
843
+
844
+ [convolutional]
845
+ batch_normalize=1
846
+ filters=256
847
+ size=1
848
+ stride=1
849
+ pad=1
850
+ activation=leaky
851
+
852
+ [convolutional]
853
+ batch_normalize=1
854
+ size=3
855
+ stride=1
856
+ pad=1
857
+ filters=512
858
+ activation=leaky
859
+
860
+ [convolutional]
861
+ batch_normalize=1
862
+ filters=256
863
+ size=1
864
+ stride=1
865
+ pad=1
866
+ activation=leaky
867
+
868
+ [convolutional]
869
+ batch_normalize=1
870
+ size=3
871
+ stride=1
872
+ pad=1
873
+ filters=512
874
+ activation=leaky
875
+
876
+ [convolutional]
877
+ batch_normalize=1
878
+ filters=256
879
+ size=1
880
+ stride=1
881
+ pad=1
882
+ activation=leaky
883
+
884
+ [convolutional]
885
+ batch_normalize=1
886
+ filters=128
887
+ size=1
888
+ stride=1
889
+ pad=1
890
+ activation=leaky
891
+
892
+ [upsample]
893
+ stride=2
894
+
895
+ [route]
896
+ layers = 54
897
+
898
+ [convolutional]
899
+ batch_normalize=1
900
+ filters=128
901
+ size=1
902
+ stride=1
903
+ pad=1
904
+ activation=leaky
905
+
906
+ [route]
907
+ layers = -1, -3
908
+
909
+ [convolutional]
910
+ batch_normalize=1
911
+ filters=128
912
+ size=1
913
+ stride=1
914
+ pad=1
915
+ activation=leaky
916
+
917
+ [convolutional]
918
+ batch_normalize=1
919
+ size=3
920
+ stride=1
921
+ pad=1
922
+ filters=256
923
+ activation=leaky
924
+
925
+ [convolutional]
926
+ batch_normalize=1
927
+ filters=128
928
+ size=1
929
+ stride=1
930
+ pad=1
931
+ activation=leaky
932
+
933
+ [convolutional]
934
+ batch_normalize=1
935
+ size=3
936
+ stride=1
937
+ pad=1
938
+ filters=256
939
+ activation=leaky
940
+
941
+ [convolutional]
942
+ batch_normalize=1
943
+ filters=128
944
+ size=1
945
+ stride=1
946
+ pad=1
947
+ activation=leaky
948
+
949
+ ##########################
950
+
951
+ [convolutional]
952
+ batch_normalize=1
953
+ size=3
954
+ stride=1
955
+ pad=1
956
+ filters=256
957
+ activation=leaky
958
+
959
+ [convolutional]
960
+ size=1
961
+ stride=1
962
+ pad=1
963
+ filters=18
964
+ activation=linear
965
+
966
+
967
+ [yolo]
968
+ mask = 0,1,2
969
+ anchors = 12, 16, 19, 36, 40, 28, 36, 75, 76, 55, 72, 146, 142, 110, 192, 243, 459, 401
970
+ classes=1
971
+ num=9
972
+ jitter=.3
973
+ ignore_thresh = .7
974
+ truth_thresh = 1
975
+ scale_x_y = 1.2
976
+ iou_thresh=0.213
977
+ cls_normalizer=1.0
978
+ iou_normalizer=0.07
979
+ iou_loss=ciou
980
+ nms_kind=greedynms
981
+ beta_nms=0.6
982
+ max_delta=5
983
+
984
+
985
+ [route]
986
+ layers = -4
987
+
988
+ [convolutional]
989
+ batch_normalize=1
990
+ size=3
991
+ stride=2
992
+ pad=1
993
+ filters=256
994
+ activation=leaky
995
+
996
+ [route]
997
+ layers = -1, -16
998
+
999
+ [convolutional]
1000
+ batch_normalize=1
1001
+ filters=256
1002
+ size=1
1003
+ stride=1
1004
+ pad=1
1005
+ activation=leaky
1006
+
1007
+ [convolutional]
1008
+ batch_normalize=1
1009
+ size=3
1010
+ stride=1
1011
+ pad=1
1012
+ filters=512
1013
+ activation=leaky
1014
+
1015
+ [convolutional]
1016
+ batch_normalize=1
1017
+ filters=256
1018
+ size=1
1019
+ stride=1
1020
+ pad=1
1021
+ activation=leaky
1022
+
1023
+ [convolutional]
1024
+ batch_normalize=1
1025
+ size=3
1026
+ stride=1
1027
+ pad=1
1028
+ filters=512
1029
+ activation=leaky
1030
+
1031
+ [convolutional]
1032
+ batch_normalize=1
1033
+ filters=256
1034
+ size=1
1035
+ stride=1
1036
+ pad=1
1037
+ activation=leaky
1038
+
1039
+ [convolutional]
1040
+ batch_normalize=1
1041
+ size=3
1042
+ stride=1
1043
+ pad=1
1044
+ filters=512
1045
+ activation=leaky
1046
+
1047
+ [convolutional]
1048
+ size=1
1049
+ stride=1
1050
+ pad=1
1051
+ filters=18
1052
+ activation=linear
1053
+
1054
+
1055
+ [yolo]
1056
+ mask = 3,4,5
1057
+ anchors = 12, 16, 19, 36, 40, 28, 36, 75, 76, 55, 72, 146, 142, 110, 192, 243, 459, 401
1058
+ classes=1
1059
+ num=9
1060
+ jitter=.3
1061
+ ignore_thresh = .7
1062
+ truth_thresh = 1
1063
+ scale_x_y = 1.1
1064
+ iou_thresh=0.213
1065
+ cls_normalizer=1.0
1066
+ iou_normalizer=0.07
1067
+ iou_loss=ciou
1068
+ nms_kind=greedynms
1069
+ beta_nms=0.6
1070
+ max_delta=5
1071
+
1072
+
1073
+ [route]
1074
+ layers = -4
1075
+
1076
+ [convolutional]
1077
+ batch_normalize=1
1078
+ size=3
1079
+ stride=2
1080
+ pad=1
1081
+ filters=512
1082
+ activation=leaky
1083
+
1084
+ [route]
1085
+ layers = -1, -37
1086
+
1087
+ [convolutional]
1088
+ batch_normalize=1
1089
+ filters=512
1090
+ size=1
1091
+ stride=1
1092
+ pad=1
1093
+ activation=leaky
1094
+
1095
+ [convolutional]
1096
+ batch_normalize=1
1097
+ size=3
1098
+ stride=1
1099
+ pad=1
1100
+ filters=1024
1101
+ activation=leaky
1102
+
1103
+ [convolutional]
1104
+ batch_normalize=1
1105
+ filters=512
1106
+ size=1
1107
+ stride=1
1108
+ pad=1
1109
+ activation=leaky
1110
+
1111
+ [convolutional]
1112
+ batch_normalize=1
1113
+ size=3
1114
+ stride=1
1115
+ pad=1
1116
+ filters=1024
1117
+ activation=leaky
1118
+
1119
+ [convolutional]
1120
+ batch_normalize=1
1121
+ filters=512
1122
+ size=1
1123
+ stride=1
1124
+ pad=1
1125
+ activation=leaky
1126
+
1127
+ [convolutional]
1128
+ batch_normalize=1
1129
+ size=3
1130
+ stride=1
1131
+ pad=1
1132
+ filters=1024
1133
+ activation=leaky
1134
+
1135
+ [convolutional]
1136
+ size=1
1137
+ stride=1
1138
+ pad=1
1139
+ filters=18
1140
+ activation=linear
1141
+
1142
+
1143
+ [yolo]
1144
+ mask = 6,7,8
1145
+ anchors = 12, 16, 19, 36, 40, 28, 36, 75, 76, 55, 72, 146, 142, 110, 192, 243, 459, 401
1146
+ classes=1
1147
+ num=9
1148
+ jitter=.3
1149
+ ignore_thresh = .7
1150
+ truth_thresh = 1
1151
+ random=1
1152
+ scale_x_y = 1.05
1153
+ iou_thresh=0.213
1154
+ cls_normalizer=1.0
1155
+ iou_normalizer=0.07
1156
+ iou_loss=ciou
1157
+ nms_kind=greedynms
1158
+ beta_nms=0.6
1159
+ max_delta=5
1160
+
yolov4-plane_best.weights ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc2ffd7bfdcd62eef3aee3a96d185395360b1fd34eb1610ce7fe618157032311
3
+ size 256015980