전원표 commited on
Commit
7b516e0
·
1 Parent(s): 9ab228f

model commit

Browse files
F1_curve.png ADDED
PR_curve.png ADDED
P_curve.png ADDED
R_curve.png ADDED
args.yaml ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ task: detect
2
+ mode: train
3
+ model: ./model/yolov8n.pt
4
+ data: ./doclaynet.yaml
5
+ epochs: 50
6
+ time: null
7
+ patience: 100
8
+ batch: 128
9
+ imgsz: 640
10
+ save: true
11
+ save_period: -1
12
+ cache: false
13
+ device:
14
+ - 0
15
+ - 1
16
+ workers: 2
17
+ project: null
18
+ name: train10
19
+ exist_ok: false
20
+ pretrained: true
21
+ optimizer: auto
22
+ verbose: true
23
+ seed: 0
24
+ deterministic: true
25
+ single_cls: false
26
+ rect: false
27
+ cos_lr: false
28
+ close_mosaic: 10
29
+ resume: false
30
+ amp: true
31
+ fraction: 1.0
32
+ profile: false
33
+ freeze: null
34
+ multi_scale: false
35
+ overlap_mask: true
36
+ mask_ratio: 4
37
+ dropout: 0.0
38
+ val: true
39
+ split: val
40
+ save_json: false
41
+ save_hybrid: false
42
+ conf: null
43
+ iou: 0.7
44
+ max_det: 300
45
+ half: false
46
+ dnn: false
47
+ plots: true
48
+ source: null
49
+ vid_stride: 1
50
+ stream_buffer: false
51
+ visualize: false
52
+ augment: false
53
+ agnostic_nms: false
54
+ classes: null
55
+ retina_masks: false
56
+ embed: null
57
+ show: false
58
+ save_frames: false
59
+ save_txt: false
60
+ save_conf: false
61
+ save_crop: false
62
+ show_labels: true
63
+ show_conf: true
64
+ show_boxes: true
65
+ line_width: null
66
+ format: torchscript
67
+ keras: false
68
+ optimize: false
69
+ int8: false
70
+ dynamic: false
71
+ simplify: false
72
+ opset: null
73
+ workspace: 4
74
+ nms: false
75
+ lr0: 0.01
76
+ lrf: 0.01
77
+ momentum: 0.937
78
+ weight_decay: 0.0005
79
+ warmup_epochs: 3.0
80
+ warmup_momentum: 0.8
81
+ warmup_bias_lr: 0.1
82
+ box: 7.5
83
+ cls: 0.5
84
+ dfl: 1.5
85
+ pose: 12.0
86
+ kobj: 1.0
87
+ label_smoothing: 0.0
88
+ nbs: 64
89
+ hsv_h: 0.015
90
+ hsv_s: 0.7
91
+ hsv_v: 0.4
92
+ degrees: 0.0
93
+ translate: 0.1
94
+ scale: 0.5
95
+ shear: 0.0
96
+ perspective: 0.0
97
+ flipud: 0.0
98
+ fliplr: 0.5
99
+ bgr: 0.0
100
+ mosaic: 1.0
101
+ mixup: 0.0
102
+ copy_paste: 0.0
103
+ auto_augment: randaugment
104
+ erasing: 0.4
105
+ crop_fraction: 1.0
106
+ cfg: null
107
+ tracker: botsort.yaml
108
+ save_dir: runs/detect/train10
confusion_matrix.png ADDED
confusion_matrix_normalized.png ADDED
labels.jpg ADDED
labels_correlogram.jpg ADDED
results.csv ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch, train/box_loss, train/cls_loss, train/dfl_loss, metrics/precision(B), metrics/recall(B), metrics/mAP50(B), metrics/mAP50-95(B), val/box_loss, val/cls_loss, val/dfl_loss, lr/pg0, lr/pg1, lr/pg2
2
+ 1, 1.483, 2.2701, 1.2925, 0.56516, 0.34005, 0.30014, 0.18733, 1.3558, 1.6627, 1.1334, 0.0033272, 0.0033272, 0.0033272
3
+ 2, 1.1529, 1.4433, 1.0658, 0.66109, 0.4472, 0.43733, 0.27855, 1.2296, 1.3608, 1.0589, 0.0065286, 0.0065286, 0.0065286
4
+ 3, 1.0471, 1.2508, 1.0112, 0.48298, 0.41587, 0.41077, 0.23928, 1.3097, 1.4539, 1.0248, 0.0095981, 0.0095981, 0.0095981
5
+ 4, 0.93596, 1.0893, 0.97555, 0.62996, 0.51039, 0.50624, 0.31374, 1.1873, 1.2342, 0.97382, 0.009406, 0.009406, 0.009406
6
+ 5, 0.84789, 0.96796, 0.95318, 0.63569, 0.57479, 0.5871, 0.38927, 1.0503, 1.0473, 0.93336, 0.009208, 0.009208, 0.009208
7
+ 6, 0.79689, 0.90037, 0.94148, 0.64779, 0.55927, 0.5856, 0.41355, 0.93014, 0.94925, 0.92989, 0.00901, 0.00901, 0.00901
8
+ 7, 0.76595, 0.86462, 0.93538, 0.57191, 0.4991, 0.51802, 0.34505, 1.1078, 1.1607, 0.97045, 0.008812, 0.008812, 0.008812
9
+ 8, 0.73832, 0.83086, 0.92866, 0.63032, 0.56722, 0.57993, 0.41795, 0.89129, 0.94212, 0.90876, 0.008614, 0.008614, 0.008614
10
+ 9, 0.71771, 0.80686, 0.92444, 0.69417, 0.63691, 0.65233, 0.45829, 0.97338, 0.90419, 0.91354, 0.008416, 0.008416, 0.008416
11
+ 10, 0.69945, 0.79207, 0.92035, 0.70663, 0.6354, 0.6535, 0.46606, 0.94147, 0.88182, 0.9055, 0.008218, 0.008218, 0.008218
12
+ 11, 0.68906, 0.7779, 0.91928, 0.7144, 0.62651, 0.65311, 0.47162, 0.91772, 0.84109, 0.90079, 0.00802, 0.00802, 0.00802
13
+ 12, 0.67671, 0.76459, 0.9154, 0.73492, 0.64679, 0.67784, 0.48652, 0.93361, 0.82215, 0.89809, 0.007822, 0.007822, 0.007822
14
+ 13, 0.6695, 0.75645, 0.91421, 0.74046, 0.67265, 0.68807, 0.50801, 0.87572, 0.78447, 0.89185, 0.007624, 0.007624, 0.007624
15
+ 14, 0.65772, 0.74686, 0.91276, 0.70831, 0.66406, 0.6756, 0.49598, 0.89099, 0.809, 0.89361, 0.007426, 0.007426, 0.007426
16
+ 15, 0.64838, 0.73921, 0.91048, 0.73901, 0.67272, 0.69414, 0.50384, 0.91268, 0.78883, 0.89314, 0.007228, 0.007228, 0.007228
17
+ 16, 0.64318, 0.73332, 0.90944, 0.73722, 0.66283, 0.68438, 0.50449, 0.88498, 0.78891, 0.89024, 0.00703, 0.00703, 0.00703
18
+ 17, 0.63517, 0.71968, 0.90762, 0.73674, 0.66026, 0.6846, 0.50563, 0.86233, 0.76849, 0.88728, 0.006832, 0.006832, 0.006832
19
+ 18, 0.62645, 0.71206, 0.90625, 0.73068, 0.67481, 0.6912, 0.50687, 0.89239, 0.77089, 0.88878, 0.006634, 0.006634, 0.006634
20
+ 19, 0.62225, 0.70597, 0.90488, 0.74263, 0.67654, 0.6955, 0.51037, 0.89019, 0.76911, 0.89037, 0.006436, 0.006436, 0.006436
21
+ 20, 0.61923, 0.70433, 0.90338, 0.73945, 0.67925, 0.69435, 0.51349, 0.88558, 0.77009, 0.88795, 0.006238, 0.006238, 0.006238
22
+ 21, 0.6127, 0.69905, 0.90324, 0.74363, 0.68653, 0.69837, 0.51584, 0.883, 0.76373, 0.88796, 0.00604, 0.00604, 0.00604
23
+ 22, 0.60804, 0.69447, 0.90127, 0.738, 0.69062, 0.70069, 0.51986, 0.87369, 0.7532, 0.88617, 0.005842, 0.005842, 0.005842
24
+ 23, 0.60248, 0.6903, 0.90117, 0.74448, 0.68515, 0.69952, 0.51987, 0.87129, 0.75178, 0.88652, 0.005644, 0.005644, 0.005644
25
+ 24, 0.59894, 0.68679, 0.90008, 0.7407, 0.69175, 0.70089, 0.52119, 0.87184, 0.75078, 0.88612, 0.005446, 0.005446, 0.005446
26
+ 25, 0.59195, 0.6832, 0.90016, 0.7392, 0.69505, 0.70268, 0.52213, 0.87137, 0.74718, 0.88534, 0.005248, 0.005248, 0.005248
27
+ 26, 0.58794, 0.67378, 0.89764, 0.74341, 0.69777, 0.70374, 0.52272, 0.86765, 0.74486, 0.88419, 0.00505, 0.00505, 0.00505
28
+ 27, 0.58058, 0.669, 0.89661, 0.74115, 0.69983, 0.70483, 0.52517, 0.86479, 0.7424, 0.88359, 0.004852, 0.004852, 0.004852
29
+ 28, 0.58037, 0.66923, 0.8968, 0.74459, 0.69919, 0.70514, 0.5269, 0.85679, 0.73945, 0.88255, 0.004654, 0.004654, 0.004654
30
+ 29, 0.5699, 0.65951, 0.89447, 0.74297, 0.69858, 0.7053, 0.52726, 0.85509, 0.73707, 0.88213, 0.004456, 0.004456, 0.004456
31
+ 30, 0.56856, 0.6609, 0.89444, 0.74558, 0.69784, 0.70539, 0.52774, 0.85336, 0.73423, 0.88135, 0.004258, 0.004258, 0.004258
32
+ 31, 0.56373, 0.65491, 0.89333, 0.74675, 0.6994, 0.7061, 0.52752, 0.85582, 0.73318, 0.88132, 0.00406, 0.00406, 0.00406
33
+ 32, 0.55739, 0.64846, 0.89159, 0.74465, 0.70046, 0.7066, 0.52743, 0.85626, 0.73147, 0.88089, 0.003862, 0.003862, 0.003862
34
+ 33, 0.55434, 0.64246, 0.89068, 0.74554, 0.69848, 0.7068, 0.52802, 0.85465, 0.73019, 0.88046, 0.003664, 0.003664, 0.003664
35
+ 34, 0.55257, 0.6423, 0.89011, 0.74671, 0.69966, 0.70714, 0.52878, 0.85496, 0.72917, 0.88024, 0.003466, 0.003466, 0.003466
36
+ 35, 0.54855, 0.64, 0.89089, 0.74688, 0.69971, 0.70764, 0.52892, 0.85486, 0.72808, 0.88, 0.003268, 0.003268, 0.003268
37
+ 36, 0.54273, 0.63532, 0.89023, 0.74882, 0.70045, 0.70824, 0.52967, 0.85486, 0.72762, 0.87995, 0.00307, 0.00307, 0.00307
38
+ 37, 0.5404, 0.63057, 0.88832, 0.74901, 0.70327, 0.70884, 0.53018, 0.85435, 0.72628, 0.87945, 0.002872, 0.002872, 0.002872
39
+ 38, 0.53537, 0.62986, 0.88727, 0.74921, 0.70367, 0.7094, 0.53072, 0.8544, 0.72512, 0.8792, 0.002674, 0.002674, 0.002674
40
+ 39, 0.53037, 0.62478, 0.88738, 0.75215, 0.70152, 0.70988, 0.53145, 0.8531, 0.72362, 0.87875, 0.002476, 0.002476, 0.002476
41
+ 40, 0.52702, 0.62092, 0.88644, 0.75409, 0.70166, 0.71023, 0.5322, 0.85273, 0.72239, 0.87845, 0.002278, 0.002278, 0.002278
42
+ 41, 0.54289, 0.5836, 0.85975, 0.75162, 0.70718, 0.71083, 0.53259, 0.85194, 0.72014, 0.87801, 0.00208, 0.00208, 0.00208
43
+ 42, 0.53222, 0.57036, 0.85669, 0.75109, 0.70796, 0.71152, 0.53364, 0.85019, 0.71774, 0.87752, 0.001882, 0.001882, 0.001882
44
+ 43, 0.52475, 0.56334, 0.85515, 0.75295, 0.70835, 0.71233, 0.53476, 0.84982, 0.71554, 0.8771, 0.001684, 0.001684, 0.001684
45
+ 44, 0.5159, 0.55631, 0.85303, 0.75584, 0.70873, 0.71302, 0.53549, 0.84999, 0.71385, 0.87674, 0.001486, 0.001486, 0.001486
46
+ 45, 0.51181, 0.54565, 0.85192, 0.75556, 0.70892, 0.71402, 0.53619, 0.84961, 0.71199, 0.87626, 0.001288, 0.001288, 0.001288
47
+ 46, 0.50433, 0.54366, 0.85179, 0.75658, 0.70938, 0.71504, 0.53698, 0.84973, 0.71064, 0.87589, 0.00109, 0.00109, 0.00109
48
+ 47, 0.49892, 0.53682, 0.85103, 0.75615, 0.71036, 0.71581, 0.5374, 0.84936, 0.70939, 0.87548, 0.000892, 0.000892, 0.000892
49
+ 48, 0.49287, 0.53122, 0.84835, 0.75681, 0.71052, 0.71664, 0.53801, 0.84932, 0.70832, 0.87519, 0.000694, 0.000694, 0.000694
50
+ 49, 0.48793, 0.52509, 0.84831, 0.75751, 0.71004, 0.71717, 0.53889, 0.84847, 0.7069, 0.8748, 0.000496, 0.000496, 0.000496
51
+ 50, 0.48096, 0.51687, 0.84705, 0.7559, 0.711, 0.71754, 0.53947, 0.84741, 0.70598, 0.87447, 0.000298, 0.000298, 0.000298
results.png ADDED
train_batch0.jpg ADDED
train_batch1.jpg ADDED
train_batch2.jpg ADDED
train_batch21640.jpg ADDED
train_batch21641.jpg ADDED
train_batch21642.jpg ADDED
val_batch0_labels.jpg ADDED
val_batch0_pred.jpg ADDED
val_batch1_labels.jpg ADDED
val_batch1_pred.jpg ADDED
val_batch2_labels.jpg ADDED
val_batch2_pred.jpg ADDED
weights/best.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1dd302218b154d57bb4971bb9c5908d64484af51703a90bd427377841a481811
3
+ size 6258543