arisha07 commited on
Commit
3291c10
·
1 Parent(s): 49d0ad1

Upload 15 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ INT8/unet_controlnet_int8_NPU.blob filter=lfs diff=lfs merge=lfs -text
FP16/unet_controlnet.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89dc7eb9c8733e3b0e53f7f01b01709712d9df1a3140c4f0134378e47286fb71
3
+ size 1719042636
FP16/unet_controlnet.xml ADDED
The diff for this file is too large to render. See raw diff
 
INT8/unet_controlnet_int8.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e76d6426134be952140fed5847fe45f3793ad88fb3df90bbf471b324c3de57cc
3
+ size 862161348
INT8/unet_controlnet_int8.xml ADDED
The diff for this file is too large to render. See raw diff
 
INT8/unet_controlnet_int8_NPU.blob ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:289973ffa71fd289a9ba150afb48fe6770ff1d7d92d218afd4603f203fcc37a1
3
+ size 928567160
INT8/unet_time_proj_sym.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:585fe71d7f1e1e2871d45528a95012ab00b495d7547d834f8043cb37f8b439da
3
+ size 420568
INT8/unet_time_proj_sym.xml ADDED
@@ -0,0 +1,525 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="Model2" version="11">
3
+ <layers>
4
+ <layer id="0" name="timestep" type="Parameter" version="opset1">
5
+ <data shape="" element_type="i64" />
6
+ <rt_info>
7
+ <attribute name="old_api_map_element_type" version="0" value="i32" />
8
+ </rt_info>
9
+ <output>
10
+ <port id="0" precision="I64" names="timestep" />
11
+ </output>
12
+ </layer>
13
+ <layer id="1" name="/Constant384272028" type="Const" version="opset1">
14
+ <data element_type="i64" shape="1" offset="0" size="8" />
15
+ <output>
16
+ <port id="0" precision="I64" names="/Constant_output_0">
17
+ <dim>1</dim>
18
+ </port>
19
+ </output>
20
+ </layer>
21
+ <layer id="2" name="/Unsqueeze" type="Unsqueeze" version="opset1">
22
+ <input>
23
+ <port id="0" precision="I64" />
24
+ <port id="1" precision="I64">
25
+ <dim>1</dim>
26
+ </port>
27
+ </input>
28
+ <output>
29
+ <port id="2" precision="I64" names="/Cast_output_0,/Unsqueeze_output_0">
30
+ <dim>1</dim>
31
+ </port>
32
+ </output>
33
+ </layer>
34
+ <layer id="3" name="/Where3844" type="Const" version="opset1">
35
+ <data element_type="i64" shape="1" offset="8" size="8" />
36
+ <output>
37
+ <port id="0" precision="I64" names="/Where_output_0">
38
+ <dim>1</dim>
39
+ </port>
40
+ </output>
41
+ </layer>
42
+ <layer id="4" name="/Expand" type="Broadcast" version="opset3">
43
+ <data mode="bidirectional" />
44
+ <input>
45
+ <port id="0" precision="I64">
46
+ <dim>1</dim>
47
+ </port>
48
+ <port id="1" precision="I64">
49
+ <dim>1</dim>
50
+ </port>
51
+ </input>
52
+ <output>
53
+ <port id="2" precision="I64" names="/Expand_output_0">
54
+ <dim>1</dim>
55
+ </port>
56
+ </output>
57
+ </layer>
58
+ <layer id="5" name="/time_proj/Constant384670198" type="Const" version="opset1">
59
+ <data element_type="i64" shape="1" offset="8" size="8" />
60
+ <output>
61
+ <port id="0" precision="I64" names="/time_proj/Constant_output_0">
62
+ <dim>1</dim>
63
+ </port>
64
+ </output>
65
+ </layer>
66
+ <layer id="6" name="/time_proj/Unsqueeze" type="Unsqueeze" version="opset1">
67
+ <input>
68
+ <port id="0" precision="I64">
69
+ <dim>1</dim>
70
+ </port>
71
+ <port id="1" precision="I64">
72
+ <dim>1</dim>
73
+ </port>
74
+ </input>
75
+ <output>
76
+ <port id="2" precision="I64" names="/time_proj/Unsqueeze_output_0">
77
+ <dim>1</dim>
78
+ <dim>1</dim>
79
+ </port>
80
+ </output>
81
+ </layer>
82
+ <layer id="7" name="/time_proj/Cast" type="Convert" version="opset1">
83
+ <data destination_type="f32" />
84
+ <input>
85
+ <port id="0" precision="I64">
86
+ <dim>1</dim>
87
+ <dim>1</dim>
88
+ </port>
89
+ </input>
90
+ <output>
91
+ <port id="1" precision="FP32" names="/time_proj/Cast_output_0">
92
+ <dim>1</dim>
93
+ <dim>1</dim>
94
+ </port>
95
+ </output>
96
+ </layer>
97
+ <layer id="8" name="/time_proj/Constant_1384972730" type="Const" version="opset1">
98
+ <data element_type="f32" shape="1, 160" offset="16" size="640" />
99
+ <output>
100
+ <port id="0" precision="FP32" names="/time_proj/Constant_1_output_0">
101
+ <dim>1</dim>
102
+ <dim>160</dim>
103
+ </port>
104
+ </output>
105
+ </layer>
106
+ <layer id="9" name="/time_proj/Mul" type="Multiply" version="opset1">
107
+ <data auto_broadcast="numpy" />
108
+ <input>
109
+ <port id="0" precision="FP32">
110
+ <dim>1</dim>
111
+ <dim>1</dim>
112
+ </port>
113
+ <port id="1" precision="FP32">
114
+ <dim>1</dim>
115
+ <dim>160</dim>
116
+ </port>
117
+ </input>
118
+ <output>
119
+ <port id="2" precision="FP32" names="/time_proj/Mul_output_0">
120
+ <dim>1</dim>
121
+ <dim>160</dim>
122
+ </port>
123
+ </output>
124
+ </layer>
125
+ <layer id="10" name="/time_proj/Sin" type="Sin" version="opset1">
126
+ <input>
127
+ <port id="0" precision="FP32">
128
+ <dim>1</dim>
129
+ <dim>160</dim>
130
+ </port>
131
+ </input>
132
+ <output>
133
+ <port id="1" precision="FP32" names="/time_proj/Sin_output_0">
134
+ <dim>1</dim>
135
+ <dim>160</dim>
136
+ </port>
137
+ </output>
138
+ </layer>
139
+ <layer id="11" name="250762508072964" type="Const" version="opset1">
140
+ <data element_type="f32" shape="" offset="656" size="4" />
141
+ <output>
142
+ <port id="0" precision="FP32" />
143
+ </output>
144
+ </layer>
145
+ <layer id="12" name="250772508175535" type="Const" version="opset1">
146
+ <data element_type="f32" shape="" offset="660" size="4" />
147
+ <output>
148
+ <port id="0" precision="FP32" />
149
+ </output>
150
+ </layer>
151
+ <layer id="13" name="250782508270750" type="Const" version="opset1">
152
+ <data element_type="f32" shape="" offset="656" size="4" />
153
+ <output>
154
+ <port id="0" precision="FP32" />
155
+ </output>
156
+ </layer>
157
+ <layer id="14" name="250792508378268" type="Const" version="opset1">
158
+ <data element_type="f32" shape="" offset="660" size="4" />
159
+ <output>
160
+ <port id="0" precision="FP32" />
161
+ </output>
162
+ </layer>
163
+ <layer id="15" name="/time_proj/Concat/fq_input_0" type="FakeQuantize" version="opset1">
164
+ <data levels="256" auto_broadcast="numpy" />
165
+ <input>
166
+ <port id="0" precision="FP32">
167
+ <dim>1</dim>
168
+ <dim>160</dim>
169
+ </port>
170
+ <port id="1" precision="FP32" />
171
+ <port id="2" precision="FP32" />
172
+ <port id="3" precision="FP32" />
173
+ <port id="4" precision="FP32" />
174
+ </input>
175
+ <output>
176
+ <port id="5" precision="FP32">
177
+ <dim>1</dim>
178
+ <dim>160</dim>
179
+ </port>
180
+ </output>
181
+ </layer>
182
+ <layer id="16" name="/time_proj/Cos" type="Cos" version="opset1">
183
+ <input>
184
+ <port id="0" precision="FP32">
185
+ <dim>1</dim>
186
+ <dim>160</dim>
187
+ </port>
188
+ </input>
189
+ <output>
190
+ <port id="1" precision="FP32" names="/time_proj/Cos_output_0">
191
+ <dim>1</dim>
192
+ <dim>160</dim>
193
+ </port>
194
+ </output>
195
+ </layer>
196
+ <layer id="17" name="250862509070744" type="Const" version="opset1">
197
+ <data element_type="f32" shape="" offset="656" size="4" />
198
+ <output>
199
+ <port id="0" precision="FP32" />
200
+ </output>
201
+ </layer>
202
+ <layer id="18" name="250872509175832" type="Const" version="opset1">
203
+ <data element_type="f32" shape="" offset="660" size="4" />
204
+ <output>
205
+ <port id="0" precision="FP32" />
206
+ </output>
207
+ </layer>
208
+ <layer id="19" name="250882509271665" type="Const" version="opset1">
209
+ <data element_type="f32" shape="" offset="656" size="4" />
210
+ <output>
211
+ <port id="0" precision="FP32" />
212
+ </output>
213
+ </layer>
214
+ <layer id="20" name="250892509371395" type="Const" version="opset1">
215
+ <data element_type="f32" shape="" offset="660" size="4" />
216
+ <output>
217
+ <port id="0" precision="FP32" />
218
+ </output>
219
+ </layer>
220
+ <layer id="21" name="/time_proj/Concat/fq_input_1" type="FakeQuantize" version="opset1">
221
+ <data levels="256" auto_broadcast="numpy" />
222
+ <input>
223
+ <port id="0" precision="FP32">
224
+ <dim>1</dim>
225
+ <dim>160</dim>
226
+ </port>
227
+ <port id="1" precision="FP32" />
228
+ <port id="2" precision="FP32" />
229
+ <port id="3" precision="FP32" />
230
+ <port id="4" precision="FP32" />
231
+ </input>
232
+ <output>
233
+ <port id="5" precision="FP32">
234
+ <dim>1</dim>
235
+ <dim>160</dim>
236
+ </port>
237
+ </output>
238
+ </layer>
239
+ <layer id="22" name="/time_proj/Concat" type="Concat" version="opset1">
240
+ <data axis="1" />
241
+ <input>
242
+ <port id="0" precision="FP32">
243
+ <dim>1</dim>
244
+ <dim>160</dim>
245
+ </port>
246
+ <port id="1" precision="FP32">
247
+ <dim>1</dim>
248
+ <dim>160</dim>
249
+ </port>
250
+ </input>
251
+ <output>
252
+ <port id="2" precision="FP32" names="/time_proj/Concat_output_0">
253
+ <dim>1</dim>
254
+ <dim>320</dim>
255
+ </port>
256
+ </output>
257
+ </layer>
258
+ <layer id="23" name="Constant_182093854" type="Const" version="opset1">
259
+ <data element_type="i64" shape="2" offset="664" size="16" />
260
+ <output>
261
+ <port id="0" precision="I64">
262
+ <dim>2</dim>
263
+ </port>
264
+ </output>
265
+ </layer>
266
+ <layer id="24" name="Constant_182123855" type="Const" version="opset1">
267
+ <data element_type="i64" shape="2" offset="680" size="16" />
268
+ <output>
269
+ <port id="0" precision="I64">
270
+ <dim>2</dim>
271
+ </port>
272
+ </output>
273
+ </layer>
274
+ <layer id="25" name="Constant_182153856" type="Const" version="opset1">
275
+ <data element_type="i64" shape="2" offset="696" size="16" />
276
+ <output>
277
+ <port id="0" precision="I64">
278
+ <dim>2</dim>
279
+ </port>
280
+ </output>
281
+ </layer>
282
+ <layer id="26" name="/time_proj/Slice" type="StridedSlice" version="opset1">
283
+ <data begin_mask="1, 0" end_mask="1, 0" new_axis_mask="" shrink_axis_mask="" ellipsis_mask="" />
284
+ <input>
285
+ <port id="0" precision="FP32">
286
+ <dim>1</dim>
287
+ <dim>320</dim>
288
+ </port>
289
+ <port id="1" precision="I64">
290
+ <dim>2</dim>
291
+ </port>
292
+ <port id="2" precision="I64">
293
+ <dim>2</dim>
294
+ </port>
295
+ <port id="3" precision="I64">
296
+ <dim>2</dim>
297
+ </port>
298
+ </input>
299
+ <output>
300
+ <port id="4" precision="FP32" names="/time_proj/Slice_output_0">
301
+ <dim>1</dim>
302
+ <dim>160</dim>
303
+ </port>
304
+ </output>
305
+ </layer>
306
+ <layer id="27" name="Constant_182213858" type="Const" version="opset1">
307
+ <data element_type="i64" shape="2" offset="712" size="16" />
308
+ <output>
309
+ <port id="0" precision="I64">
310
+ <dim>2</dim>
311
+ </port>
312
+ </output>
313
+ </layer>
314
+ <layer id="28" name="Constant_182243859" type="Const" version="opset1">
315
+ <data element_type="i64" shape="2" offset="664" size="16" />
316
+ <output>
317
+ <port id="0" precision="I64">
318
+ <dim>2</dim>
319
+ </port>
320
+ </output>
321
+ </layer>
322
+ <layer id="29" name="Constant_182273860" type="Const" version="opset1">
323
+ <data element_type="i64" shape="2" offset="696" size="16" />
324
+ <output>
325
+ <port id="0" precision="I64">
326
+ <dim>2</dim>
327
+ </port>
328
+ </output>
329
+ </layer>
330
+ <layer id="30" name="/time_proj/Slice_1" type="StridedSlice" version="opset1">
331
+ <data begin_mask="1, 0" end_mask="1, 0" new_axis_mask="" shrink_axis_mask="" ellipsis_mask="" />
332
+ <input>
333
+ <port id="0" precision="FP32">
334
+ <dim>1</dim>
335
+ <dim>320</dim>
336
+ </port>
337
+ <port id="1" precision="I64">
338
+ <dim>2</dim>
339
+ </port>
340
+ <port id="2" precision="I64">
341
+ <dim>2</dim>
342
+ </port>
343
+ <port id="3" precision="I64">
344
+ <dim>2</dim>
345
+ </port>
346
+ </input>
347
+ <output>
348
+ <port id="4" precision="FP32" names="/time_proj/Slice_1_output_0">
349
+ <dim>1</dim>
350
+ <dim>160</dim>
351
+ </port>
352
+ </output>
353
+ </layer>
354
+ <layer id="31" name="/time_proj/Concat_1" type="Concat" version="opset1">
355
+ <data axis="1" />
356
+ <input>
357
+ <port id="0" precision="FP32">
358
+ <dim>1</dim>
359
+ <dim>160</dim>
360
+ </port>
361
+ <port id="1" precision="FP32">
362
+ <dim>1</dim>
363
+ <dim>160</dim>
364
+ </port>
365
+ </input>
366
+ <output>
367
+ <port id="2" precision="FP32" names="/Cast_1_output_0,/time_proj/Concat_1_output_0">
368
+ <dim>1</dim>
369
+ <dim>320</dim>
370
+ </port>
371
+ </output>
372
+ </layer>
373
+ <layer id="32" name="time_embedding.linear_1.weight386341283/quantized5001874713" type="Const" version="opset1">
374
+ <data element_type="i8" shape="1280, 320" offset="728" size="409600" />
375
+ <output>
376
+ <port id="0" precision="I8">
377
+ <dim>1280</dim>
378
+ <dim>320</dim>
379
+ </port>
380
+ </output>
381
+ </layer>
382
+ <layer id="33" name="time_embedding.linear_1.weight386341283/quantized/to_f32" type="Convert" version="opset1">
383
+ <data destination_type="f32" />
384
+ <input>
385
+ <port id="0" precision="I8">
386
+ <dim>1280</dim>
387
+ <dim>320</dim>
388
+ </port>
389
+ </input>
390
+ <output>
391
+ <port id="1" precision="FP32">
392
+ <dim>1280</dim>
393
+ <dim>320</dim>
394
+ </port>
395
+ </output>
396
+ </layer>
397
+ <layer id="34" name="/time_embedding/linear_1/Gemm/WithoutBiases/fq_weights_1/scale5002673147" type="Const" version="opset1">
398
+ <data element_type="f32" shape="1280, 1" offset="410328" size="5120" />
399
+ <output>
400
+ <port id="0" precision="FP32">
401
+ <dim>1280</dim>
402
+ <dim>1</dim>
403
+ </port>
404
+ </output>
405
+ </layer>
406
+ <layer id="35" name="/time_embedding/linear_1/Gemm/WithoutBiases/fq_weights_1/mulpiply_by_scale" type="Multiply" version="opset1">
407
+ <data auto_broadcast="numpy" />
408
+ <input>
409
+ <port id="0" precision="FP32">
410
+ <dim>1280</dim>
411
+ <dim>320</dim>
412
+ </port>
413
+ <port id="1" precision="FP32">
414
+ <dim>1280</dim>
415
+ <dim>1</dim>
416
+ </port>
417
+ </input>
418
+ <output>
419
+ <port id="2" precision="FP32">
420
+ <dim>1280</dim>
421
+ <dim>320</dim>
422
+ </port>
423
+ </output>
424
+ </layer>
425
+ <layer id="36" name="/time_embedding/linear_1/Gemm/WithoutBiases" type="MatMul" version="opset1">
426
+ <data transpose_a="false" transpose_b="true" />
427
+ <input>
428
+ <port id="0" precision="FP32">
429
+ <dim>1</dim>
430
+ <dim>320</dim>
431
+ </port>
432
+ <port id="1" precision="FP32">
433
+ <dim>1280</dim>
434
+ <dim>320</dim>
435
+ </port>
436
+ </input>
437
+ <output>
438
+ <port id="2" precision="FP32">
439
+ <dim>1</dim>
440
+ <dim>1280</dim>
441
+ </port>
442
+ </output>
443
+ </layer>
444
+ <layer id="37" name="Constant_22762386570027" type="Const" version="opset1">
445
+ <data element_type="f32" shape="1, 1280" offset="415448" size="5120" />
446
+ <output>
447
+ <port id="0" precision="FP32">
448
+ <dim>1</dim>
449
+ <dim>1280</dim>
450
+ </port>
451
+ </output>
452
+ </layer>
453
+ <layer id="38" name="/time_embedding/linear_1/Gemm" type="Add" version="opset1">
454
+ <data auto_broadcast="numpy" />
455
+ <input>
456
+ <port id="0" precision="FP32">
457
+ <dim>1</dim>
458
+ <dim>1280</dim>
459
+ </port>
460
+ <port id="1" precision="FP32">
461
+ <dim>1</dim>
462
+ <dim>1280</dim>
463
+ </port>
464
+ </input>
465
+ <output>
466
+ <port id="2" precision="FP32" names="/time_embedding/linear_1/Gemm_output_0">
467
+ <dim>1</dim>
468
+ <dim>1280</dim>
469
+ </port>
470
+ </output>
471
+ </layer>
472
+ <layer id="39" name="/time_embedding/linear_1/Gemm0" type="Result" version="opset1">
473
+ <input>
474
+ <port id="0" precision="FP32">
475
+ <dim>1</dim>
476
+ <dim>1280</dim>
477
+ </port>
478
+ </input>
479
+ </layer>
480
+ </layers>
481
+ <edges>
482
+ <edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
483
+ <edge from-layer="1" from-port="0" to-layer="2" to-port="1" />
484
+ <edge from-layer="2" from-port="2" to-layer="4" to-port="0" />
485
+ <edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
486
+ <edge from-layer="4" from-port="2" to-layer="6" to-port="0" />
487
+ <edge from-layer="5" from-port="0" to-layer="6" to-port="1" />
488
+ <edge from-layer="6" from-port="2" to-layer="7" to-port="0" />
489
+ <edge from-layer="7" from-port="1" to-layer="9" to-port="0" />
490
+ <edge from-layer="8" from-port="0" to-layer="9" to-port="1" />
491
+ <edge from-layer="9" from-port="2" to-layer="10" to-port="0" />
492
+ <edge from-layer="9" from-port="2" to-layer="16" to-port="0" />
493
+ <edge from-layer="10" from-port="1" to-layer="15" to-port="0" />
494
+ <edge from-layer="11" from-port="0" to-layer="15" to-port="1" />
495
+ <edge from-layer="12" from-port="0" to-layer="15" to-port="2" />
496
+ <edge from-layer="13" from-port="0" to-layer="15" to-port="3" />
497
+ <edge from-layer="14" from-port="0" to-layer="15" to-port="4" />
498
+ <edge from-layer="15" from-port="5" to-layer="22" to-port="0" />
499
+ <edge from-layer="16" from-port="1" to-layer="21" to-port="0" />
500
+ <edge from-layer="17" from-port="0" to-layer="21" to-port="1" />
501
+ <edge from-layer="18" from-port="0" to-layer="21" to-port="2" />
502
+ <edge from-layer="19" from-port="0" to-layer="21" to-port="3" />
503
+ <edge from-layer="20" from-port="0" to-layer="21" to-port="4" />
504
+ <edge from-layer="21" from-port="5" to-layer="22" to-port="1" />
505
+ <edge from-layer="22" from-port="2" to-layer="26" to-port="0" />
506
+ <edge from-layer="22" from-port="2" to-layer="30" to-port="0" />
507
+ <edge from-layer="23" from-port="0" to-layer="26" to-port="1" />
508
+ <edge from-layer="24" from-port="0" to-layer="26" to-port="2" />
509
+ <edge from-layer="25" from-port="0" to-layer="26" to-port="3" />
510
+ <edge from-layer="26" from-port="4" to-layer="31" to-port="0" />
511
+ <edge from-layer="27" from-port="0" to-layer="30" to-port="1" />
512
+ <edge from-layer="28" from-port="0" to-layer="30" to-port="2" />
513
+ <edge from-layer="29" from-port="0" to-layer="30" to-port="3" />
514
+ <edge from-layer="30" from-port="4" to-layer="31" to-port="1" />
515
+ <edge from-layer="31" from-port="2" to-layer="36" to-port="0" />
516
+ <edge from-layer="32" from-port="0" to-layer="33" to-port="0" />
517
+ <edge from-layer="33" from-port="1" to-layer="35" to-port="0" />
518
+ <edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
519
+ <edge from-layer="35" from-port="2" to-layer="36" to-port="1" />
520
+ <edge from-layer="36" from-port="2" to-layer="38" to-port="0" />
521
+ <edge from-layer="37" from-port="0" to-layer="38" to-port="1" />
522
+ <edge from-layer="38" from-port="2" to-layer="39" to-port="0" />
523
+ </edges>
524
+ <rt_info />
525
+ </net>
controlnet-scribble.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef6ff6ddb4f86d8ede166ad1b92baad902439ae7a2e6e093e5d793b473e816ba
3
+ size 722537172
controlnet-scribble.xml ADDED
The diff for this file is too large to render. See raw diff
 
hed.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf526dcc88f1fdbaa2d3dc30e5608d8ec85eedde4925a728ca9e4edaf03cfd6b
3
+ size 29432336
hed.xml ADDED
@@ -0,0 +1,2928 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" ?>
2
+ <net name="torch_jit" version="11">
3
+ <layers>
4
+ <layer id="0" name="onnx::Sub_0" type="Parameter" version="opset1">
5
+ <data shape="1,3,512,512" element_type="f32"/>
6
+ <rt_info>
7
+ <attribute name="fused_names" version="0" value="onnx::Sub_0"/>
8
+ <attribute name="old_api_map_element_type" version="0" value="f16"/>
9
+ </rt_info>
10
+ <output>
11
+ <port id="0" precision="FP32" names="onnx::Sub_0">
12
+ <dim>1</dim>
13
+ <dim>3</dim>
14
+ <dim>512</dim>
15
+ <dim>512</dim>
16
+ </port>
17
+ </output>
18
+ </layer>
19
+ <layer id="1" name="norm_compressed" type="Const" version="opset1">
20
+ <data element_type="f16" shape="1, 3, 1, 1" offset="0" size="6"/>
21
+ <output>
22
+ <port id="0" precision="FP16">
23
+ <dim>1</dim>
24
+ <dim>3</dim>
25
+ <dim>1</dim>
26
+ <dim>1</dim>
27
+ </port>
28
+ </output>
29
+ </layer>
30
+ <layer id="2" name="norm" type="Convert" version="opset1">
31
+ <data destination_type="f32"/>
32
+ <rt_info>
33
+ <attribute name="decompression" version="0"/>
34
+ <attribute name="fused_names" version="0" value="norm"/>
35
+ </rt_info>
36
+ <input>
37
+ <port id="0" precision="FP16">
38
+ <dim>1</dim>
39
+ <dim>3</dim>
40
+ <dim>1</dim>
41
+ <dim>1</dim>
42
+ </port>
43
+ </input>
44
+ <output>
45
+ <port id="1" precision="FP32" names="norm">
46
+ <dim>1</dim>
47
+ <dim>3</dim>
48
+ <dim>1</dim>
49
+ <dim>1</dim>
50
+ </port>
51
+ </output>
52
+ </layer>
53
+ <layer id="3" name="Sub_0" type="Subtract" version="opset1">
54
+ <data auto_broadcast="numpy"/>
55
+ <rt_info>
56
+ <attribute name="fused_names" version="0" value="Sub_0"/>
57
+ </rt_info>
58
+ <input>
59
+ <port id="0" precision="FP32">
60
+ <dim>1</dim>
61
+ <dim>3</dim>
62
+ <dim>512</dim>
63
+ <dim>512</dim>
64
+ </port>
65
+ <port id="1" precision="FP32">
66
+ <dim>1</dim>
67
+ <dim>3</dim>
68
+ <dim>1</dim>
69
+ <dim>1</dim>
70
+ </port>
71
+ </input>
72
+ <output>
73
+ <port id="2" precision="FP32" names="input">
74
+ <dim>1</dim>
75
+ <dim>3</dim>
76
+ <dim>512</dim>
77
+ <dim>512</dim>
78
+ </port>
79
+ </output>
80
+ </layer>
81
+ <layer id="4" name="block1.convs.0.weight_compressed" type="Const" version="opset1">
82
+ <data element_type="f16" shape="64, 3, 3, 3" offset="6" size="3456"/>
83
+ <output>
84
+ <port id="0" precision="FP16">
85
+ <dim>64</dim>
86
+ <dim>3</dim>
87
+ <dim>3</dim>
88
+ <dim>3</dim>
89
+ </port>
90
+ </output>
91
+ </layer>
92
+ <layer id="5" name="block1.convs.0.weight" type="Convert" version="opset1">
93
+ <data destination_type="f32"/>
94
+ <rt_info>
95
+ <attribute name="decompression" version="0"/>
96
+ <attribute name="fused_names" version="0" value="block1.convs.0.weight"/>
97
+ </rt_info>
98
+ <input>
99
+ <port id="0" precision="FP16">
100
+ <dim>64</dim>
101
+ <dim>3</dim>
102
+ <dim>3</dim>
103
+ <dim>3</dim>
104
+ </port>
105
+ </input>
106
+ <output>
107
+ <port id="1" precision="FP32" names="block1.convs.0.weight">
108
+ <dim>64</dim>
109
+ <dim>3</dim>
110
+ <dim>3</dim>
111
+ <dim>3</dim>
112
+ </port>
113
+ </output>
114
+ </layer>
115
+ <layer id="6" name="convs.0/Conv/WithoutBiases" type="Convolution" version="opset1">
116
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
117
+ <rt_info>
118
+ <attribute name="fused_names" version="0" value="convs.0/Conv/WithoutBiases"/>
119
+ </rt_info>
120
+ <input>
121
+ <port id="0" precision="FP32">
122
+ <dim>1</dim>
123
+ <dim>3</dim>
124
+ <dim>512</dim>
125
+ <dim>512</dim>
126
+ </port>
127
+ <port id="1" precision="FP32">
128
+ <dim>64</dim>
129
+ <dim>3</dim>
130
+ <dim>3</dim>
131
+ <dim>3</dim>
132
+ </port>
133
+ </input>
134
+ <output>
135
+ <port id="2" precision="FP32">
136
+ <dim>1</dim>
137
+ <dim>64</dim>
138
+ <dim>512</dim>
139
+ <dim>512</dim>
140
+ </port>
141
+ </output>
142
+ </layer>
143
+ <layer id="7" name="Reshape_59_compressed" type="Const" version="opset1">
144
+ <data element_type="f16" shape="1, 64, 1, 1" offset="3462" size="128"/>
145
+ <output>
146
+ <port id="0" precision="FP16">
147
+ <dim>1</dim>
148
+ <dim>64</dim>
149
+ <dim>1</dim>
150
+ <dim>1</dim>
151
+ </port>
152
+ </output>
153
+ </layer>
154
+ <layer id="8" name="Reshape_59" type="Convert" version="opset1">
155
+ <data destination_type="f32"/>
156
+ <rt_info>
157
+ <attribute name="decompression" version="0"/>
158
+ </rt_info>
159
+ <input>
160
+ <port id="0" precision="FP16">
161
+ <dim>1</dim>
162
+ <dim>64</dim>
163
+ <dim>1</dim>
164
+ <dim>1</dim>
165
+ </port>
166
+ </input>
167
+ <output>
168
+ <port id="1" precision="FP32">
169
+ <dim>1</dim>
170
+ <dim>64</dim>
171
+ <dim>1</dim>
172
+ <dim>1</dim>
173
+ </port>
174
+ </output>
175
+ </layer>
176
+ <layer id="9" name="convs.0/Conv" type="Add" version="opset1">
177
+ <data auto_broadcast="numpy"/>
178
+ <rt_info>
179
+ <attribute name="fused_names" version="0" value="Concat_58, Reshape_59, convs.0/Conv"/>
180
+ </rt_info>
181
+ <input>
182
+ <port id="0" precision="FP32">
183
+ <dim>1</dim>
184
+ <dim>64</dim>
185
+ <dim>512</dim>
186
+ <dim>512</dim>
187
+ </port>
188
+ <port id="1" precision="FP32">
189
+ <dim>1</dim>
190
+ <dim>64</dim>
191
+ <dim>1</dim>
192
+ <dim>1</dim>
193
+ </port>
194
+ </input>
195
+ <output>
196
+ <port id="2" precision="FP32" names="convs.0/Conv_output_0">
197
+ <dim>1</dim>
198
+ <dim>64</dim>
199
+ <dim>512</dim>
200
+ <dim>512</dim>
201
+ </port>
202
+ </output>
203
+ </layer>
204
+ <layer id="10" name="Relu_2" type="ReLU" version="opset1">
205
+ <rt_info>
206
+ <attribute name="fused_names" version="0" value="Relu_2"/>
207
+ </rt_info>
208
+ <input>
209
+ <port id="0" precision="FP32">
210
+ <dim>1</dim>
211
+ <dim>64</dim>
212
+ <dim>512</dim>
213
+ <dim>512</dim>
214
+ </port>
215
+ </input>
216
+ <output>
217
+ <port id="1" precision="FP32" names="input.3">
218
+ <dim>1</dim>
219
+ <dim>64</dim>
220
+ <dim>512</dim>
221
+ <dim>512</dim>
222
+ </port>
223
+ </output>
224
+ </layer>
225
+ <layer id="11" name="block1.convs.1.weight_compressed" type="Const" version="opset1">
226
+ <data element_type="f16" shape="64, 64, 3, 3" offset="3590" size="73728"/>
227
+ <output>
228
+ <port id="0" precision="FP16">
229
+ <dim>64</dim>
230
+ <dim>64</dim>
231
+ <dim>3</dim>
232
+ <dim>3</dim>
233
+ </port>
234
+ </output>
235
+ </layer>
236
+ <layer id="12" name="block1.convs.1.weight" type="Convert" version="opset1">
237
+ <data destination_type="f32"/>
238
+ <rt_info>
239
+ <attribute name="decompression" version="0"/>
240
+ <attribute name="fused_names" version="0" value="block1.convs.1.weight"/>
241
+ </rt_info>
242
+ <input>
243
+ <port id="0" precision="FP16">
244
+ <dim>64</dim>
245
+ <dim>64</dim>
246
+ <dim>3</dim>
247
+ <dim>3</dim>
248
+ </port>
249
+ </input>
250
+ <output>
251
+ <port id="1" precision="FP32" names="block1.convs.1.weight">
252
+ <dim>64</dim>
253
+ <dim>64</dim>
254
+ <dim>3</dim>
255
+ <dim>3</dim>
256
+ </port>
257
+ </output>
258
+ </layer>
259
+ <layer id="13" name="convs.1/Conv/WithoutBiases" type="Convolution" version="opset1">
260
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
261
+ <rt_info>
262
+ <attribute name="fused_names" version="0" value="convs.1/Conv/WithoutBiases"/>
263
+ </rt_info>
264
+ <input>
265
+ <port id="0" precision="FP32">
266
+ <dim>1</dim>
267
+ <dim>64</dim>
268
+ <dim>512</dim>
269
+ <dim>512</dim>
270
+ </port>
271
+ <port id="1" precision="FP32">
272
+ <dim>64</dim>
273
+ <dim>64</dim>
274
+ <dim>3</dim>
275
+ <dim>3</dim>
276
+ </port>
277
+ </input>
278
+ <output>
279
+ <port id="2" precision="FP32">
280
+ <dim>1</dim>
281
+ <dim>64</dim>
282
+ <dim>512</dim>
283
+ <dim>512</dim>
284
+ </port>
285
+ </output>
286
+ </layer>
287
+ <layer id="14" name="Reshape_108_compressed" type="Const" version="opset1">
288
+ <data element_type="f16" shape="1, 64, 1, 1" offset="77318" size="128"/>
289
+ <output>
290
+ <port id="0" precision="FP16">
291
+ <dim>1</dim>
292
+ <dim>64</dim>
293
+ <dim>1</dim>
294
+ <dim>1</dim>
295
+ </port>
296
+ </output>
297
+ </layer>
298
+ <layer id="15" name="Reshape_108" type="Convert" version="opset1">
299
+ <data destination_type="f32"/>
300
+ <rt_info>
301
+ <attribute name="decompression" version="0"/>
302
+ </rt_info>
303
+ <input>
304
+ <port id="0" precision="FP16">
305
+ <dim>1</dim>
306
+ <dim>64</dim>
307
+ <dim>1</dim>
308
+ <dim>1</dim>
309
+ </port>
310
+ </input>
311
+ <output>
312
+ <port id="1" precision="FP32">
313
+ <dim>1</dim>
314
+ <dim>64</dim>
315
+ <dim>1</dim>
316
+ <dim>1</dim>
317
+ </port>
318
+ </output>
319
+ </layer>
320
+ <layer id="16" name="convs.1/Conv" type="Add" version="opset1">
321
+ <data auto_broadcast="numpy"/>
322
+ <rt_info>
323
+ <attribute name="fused_names" version="0" value="Concat_107, Reshape_108, convs.1/Conv"/>
324
+ </rt_info>
325
+ <input>
326
+ <port id="0" precision="FP32">
327
+ <dim>1</dim>
328
+ <dim>64</dim>
329
+ <dim>512</dim>
330
+ <dim>512</dim>
331
+ </port>
332
+ <port id="1" precision="FP32">
333
+ <dim>1</dim>
334
+ <dim>64</dim>
335
+ <dim>1</dim>
336
+ <dim>1</dim>
337
+ </port>
338
+ </input>
339
+ <output>
340
+ <port id="2" precision="FP32" names="convs.1/Conv_output_0">
341
+ <dim>1</dim>
342
+ <dim>64</dim>
343
+ <dim>512</dim>
344
+ <dim>512</dim>
345
+ </port>
346
+ </output>
347
+ </layer>
348
+ <layer id="17" name="Relu_4" type="ReLU" version="opset1">
349
+ <rt_info>
350
+ <attribute name="fused_names" version="0" value="Relu_4"/>
351
+ </rt_info>
352
+ <input>
353
+ <port id="0" precision="FP32">
354
+ <dim>1</dim>
355
+ <dim>64</dim>
356
+ <dim>512</dim>
357
+ <dim>512</dim>
358
+ </port>
359
+ </input>
360
+ <output>
361
+ <port id="1" precision="FP32" names="input.7">
362
+ <dim>1</dim>
363
+ <dim>64</dim>
364
+ <dim>512</dim>
365
+ <dim>512</dim>
366
+ </port>
367
+ </output>
368
+ </layer>
369
+ <layer id="18" name="MaxPool_6" type="MaxPool" version="opset8">
370
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0"/>
371
+ <rt_info>
372
+ <attribute name="fused_names" version="0" value="MaxPool_6"/>
373
+ </rt_info>
374
+ <input>
375
+ <port id="0" precision="FP32">
376
+ <dim>1</dim>
377
+ <dim>64</dim>
378
+ <dim>512</dim>
379
+ <dim>512</dim>
380
+ </port>
381
+ </input>
382
+ <output>
383
+ <port id="1" precision="FP32" names="input.11">
384
+ <dim>1</dim>
385
+ <dim>64</dim>
386
+ <dim>256</dim>
387
+ <dim>256</dim>
388
+ </port>
389
+ <port id="2" precision="I64">
390
+ <dim>1</dim>
391
+ <dim>64</dim>
392
+ <dim>256</dim>
393
+ <dim>256</dim>
394
+ </port>
395
+ </output>
396
+ </layer>
397
+ <layer id="19" name="block2.convs.0.weight_compressed" type="Const" version="opset1">
398
+ <data element_type="f16" shape="128, 64, 3, 3" offset="77446" size="147456"/>
399
+ <output>
400
+ <port id="0" precision="FP16">
401
+ <dim>128</dim>
402
+ <dim>64</dim>
403
+ <dim>3</dim>
404
+ <dim>3</dim>
405
+ </port>
406
+ </output>
407
+ </layer>
408
+ <layer id="20" name="block2.convs.0.weight" type="Convert" version="opset1">
409
+ <data destination_type="f32"/>
410
+ <rt_info>
411
+ <attribute name="decompression" version="0"/>
412
+ <attribute name="fused_names" version="0" value="block2.convs.0.weight"/>
413
+ </rt_info>
414
+ <input>
415
+ <port id="0" precision="FP16">
416
+ <dim>128</dim>
417
+ <dim>64</dim>
418
+ <dim>3</dim>
419
+ <dim>3</dim>
420
+ </port>
421
+ </input>
422
+ <output>
423
+ <port id="1" precision="FP32" names="block2.convs.0.weight">
424
+ <dim>128</dim>
425
+ <dim>64</dim>
426
+ <dim>3</dim>
427
+ <dim>3</dim>
428
+ </port>
429
+ </output>
430
+ </layer>
431
+ <layer id="21" name="convs.0_1/Conv/WithoutBiases" type="Convolution" version="opset1">
432
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
433
+ <rt_info>
434
+ <attribute name="fused_names" version="0" value="convs.0_1/Conv/WithoutBiases"/>
435
+ </rt_info>
436
+ <input>
437
+ <port id="0" precision="FP32">
438
+ <dim>1</dim>
439
+ <dim>64</dim>
440
+ <dim>256</dim>
441
+ <dim>256</dim>
442
+ </port>
443
+ <port id="1" precision="FP32">
444
+ <dim>128</dim>
445
+ <dim>64</dim>
446
+ <dim>3</dim>
447
+ <dim>3</dim>
448
+ </port>
449
+ </input>
450
+ <output>
451
+ <port id="2" precision="FP32">
452
+ <dim>1</dim>
453
+ <dim>128</dim>
454
+ <dim>256</dim>
455
+ <dim>256</dim>
456
+ </port>
457
+ </output>
458
+ </layer>
459
+ <layer id="22" name="Reshape_206_compressed" type="Const" version="opset1">
460
+ <data element_type="f16" shape="1, 128, 1, 1" offset="224902" size="256"/>
461
+ <output>
462
+ <port id="0" precision="FP16">
463
+ <dim>1</dim>
464
+ <dim>128</dim>
465
+ <dim>1</dim>
466
+ <dim>1</dim>
467
+ </port>
468
+ </output>
469
+ </layer>
470
+ <layer id="23" name="Reshape_206" type="Convert" version="opset1">
471
+ <data destination_type="f32"/>
472
+ <rt_info>
473
+ <attribute name="decompression" version="0"/>
474
+ </rt_info>
475
+ <input>
476
+ <port id="0" precision="FP16">
477
+ <dim>1</dim>
478
+ <dim>128</dim>
479
+ <dim>1</dim>
480
+ <dim>1</dim>
481
+ </port>
482
+ </input>
483
+ <output>
484
+ <port id="1" precision="FP32">
485
+ <dim>1</dim>
486
+ <dim>128</dim>
487
+ <dim>1</dim>
488
+ <dim>1</dim>
489
+ </port>
490
+ </output>
491
+ </layer>
492
+ <layer id="24" name="convs.0_1/Conv" type="Add" version="opset1">
493
+ <data auto_broadcast="numpy"/>
494
+ <rt_info>
495
+ <attribute name="fused_names" version="0" value="Concat_205, Reshape_206, convs.0_1/Conv"/>
496
+ </rt_info>
497
+ <input>
498
+ <port id="0" precision="FP32">
499
+ <dim>1</dim>
500
+ <dim>128</dim>
501
+ <dim>256</dim>
502
+ <dim>256</dim>
503
+ </port>
504
+ <port id="1" precision="FP32">
505
+ <dim>1</dim>
506
+ <dim>128</dim>
507
+ <dim>1</dim>
508
+ <dim>1</dim>
509
+ </port>
510
+ </input>
511
+ <output>
512
+ <port id="2" precision="FP32" names="convs.0_1/Conv_output_0">
513
+ <dim>1</dim>
514
+ <dim>128</dim>
515
+ <dim>256</dim>
516
+ <dim>256</dim>
517
+ </port>
518
+ </output>
519
+ </layer>
520
+ <layer id="25" name="Relu_8" type="ReLU" version="opset1">
521
+ <rt_info>
522
+ <attribute name="fused_names" version="0" value="Relu_8"/>
523
+ </rt_info>
524
+ <input>
525
+ <port id="0" precision="FP32">
526
+ <dim>1</dim>
527
+ <dim>128</dim>
528
+ <dim>256</dim>
529
+ <dim>256</dim>
530
+ </port>
531
+ </input>
532
+ <output>
533
+ <port id="1" precision="FP32" names="input.15">
534
+ <dim>1</dim>
535
+ <dim>128</dim>
536
+ <dim>256</dim>
537
+ <dim>256</dim>
538
+ </port>
539
+ </output>
540
+ </layer>
541
+ <layer id="26" name="block2.convs.1.weight_compressed" type="Const" version="opset1">
542
+ <data element_type="f16" shape="128, 128, 3, 3" offset="225158" size="294912"/>
543
+ <output>
544
+ <port id="0" precision="FP16">
545
+ <dim>128</dim>
546
+ <dim>128</dim>
547
+ <dim>3</dim>
548
+ <dim>3</dim>
549
+ </port>
550
+ </output>
551
+ </layer>
552
+ <layer id="27" name="block2.convs.1.weight" type="Convert" version="opset1">
553
+ <data destination_type="f32"/>
554
+ <rt_info>
555
+ <attribute name="decompression" version="0"/>
556
+ <attribute name="fused_names" version="0" value="block2.convs.1.weight"/>
557
+ </rt_info>
558
+ <input>
559
+ <port id="0" precision="FP16">
560
+ <dim>128</dim>
561
+ <dim>128</dim>
562
+ <dim>3</dim>
563
+ <dim>3</dim>
564
+ </port>
565
+ </input>
566
+ <output>
567
+ <port id="1" precision="FP32" names="block2.convs.1.weight">
568
+ <dim>128</dim>
569
+ <dim>128</dim>
570
+ <dim>3</dim>
571
+ <dim>3</dim>
572
+ </port>
573
+ </output>
574
+ </layer>
575
+ <layer id="28" name="convs.1_1/Conv/WithoutBiases" type="Convolution" version="opset1">
576
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
577
+ <rt_info>
578
+ <attribute name="fused_names" version="0" value="convs.1_1/Conv/WithoutBiases"/>
579
+ </rt_info>
580
+ <input>
581
+ <port id="0" precision="FP32">
582
+ <dim>1</dim>
583
+ <dim>128</dim>
584
+ <dim>256</dim>
585
+ <dim>256</dim>
586
+ </port>
587
+ <port id="1" precision="FP32">
588
+ <dim>128</dim>
589
+ <dim>128</dim>
590
+ <dim>3</dim>
591
+ <dim>3</dim>
592
+ </port>
593
+ </input>
594
+ <output>
595
+ <port id="2" precision="FP32">
596
+ <dim>1</dim>
597
+ <dim>128</dim>
598
+ <dim>256</dim>
599
+ <dim>256</dim>
600
+ </port>
601
+ </output>
602
+ </layer>
603
+ <layer id="29" name="Reshape_255_compressed" type="Const" version="opset1">
604
+ <data element_type="f16" shape="1, 128, 1, 1" offset="520070" size="256"/>
605
+ <output>
606
+ <port id="0" precision="FP16">
607
+ <dim>1</dim>
608
+ <dim>128</dim>
609
+ <dim>1</dim>
610
+ <dim>1</dim>
611
+ </port>
612
+ </output>
613
+ </layer>
614
+ <layer id="30" name="Reshape_255" type="Convert" version="opset1">
615
+ <data destination_type="f32"/>
616
+ <rt_info>
617
+ <attribute name="decompression" version="0"/>
618
+ </rt_info>
619
+ <input>
620
+ <port id="0" precision="FP16">
621
+ <dim>1</dim>
622
+ <dim>128</dim>
623
+ <dim>1</dim>
624
+ <dim>1</dim>
625
+ </port>
626
+ </input>
627
+ <output>
628
+ <port id="1" precision="FP32">
629
+ <dim>1</dim>
630
+ <dim>128</dim>
631
+ <dim>1</dim>
632
+ <dim>1</dim>
633
+ </port>
634
+ </output>
635
+ </layer>
636
+ <layer id="31" name="convs.1_1/Conv" type="Add" version="opset1">
637
+ <data auto_broadcast="numpy"/>
638
+ <rt_info>
639
+ <attribute name="fused_names" version="0" value="Concat_254, Reshape_255, convs.1_1/Conv"/>
640
+ </rt_info>
641
+ <input>
642
+ <port id="0" precision="FP32">
643
+ <dim>1</dim>
644
+ <dim>128</dim>
645
+ <dim>256</dim>
646
+ <dim>256</dim>
647
+ </port>
648
+ <port id="1" precision="FP32">
649
+ <dim>1</dim>
650
+ <dim>128</dim>
651
+ <dim>1</dim>
652
+ <dim>1</dim>
653
+ </port>
654
+ </input>
655
+ <output>
656
+ <port id="2" precision="FP32" names="convs.1_1/Conv_output_0">
657
+ <dim>1</dim>
658
+ <dim>128</dim>
659
+ <dim>256</dim>
660
+ <dim>256</dim>
661
+ </port>
662
+ </output>
663
+ </layer>
664
+ <layer id="32" name="Relu_10" type="ReLU" version="opset1">
665
+ <rt_info>
666
+ <attribute name="fused_names" version="0" value="Relu_10"/>
667
+ </rt_info>
668
+ <input>
669
+ <port id="0" precision="FP32">
670
+ <dim>1</dim>
671
+ <dim>128</dim>
672
+ <dim>256</dim>
673
+ <dim>256</dim>
674
+ </port>
675
+ </input>
676
+ <output>
677
+ <port id="1" precision="FP32" names="input.19">
678
+ <dim>1</dim>
679
+ <dim>128</dim>
680
+ <dim>256</dim>
681
+ <dim>256</dim>
682
+ </port>
683
+ </output>
684
+ </layer>
685
+ <layer id="33" name="MaxPool_12" type="MaxPool" version="opset8">
686
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0"/>
687
+ <rt_info>
688
+ <attribute name="fused_names" version="0" value="MaxPool_12"/>
689
+ </rt_info>
690
+ <input>
691
+ <port id="0" precision="FP32">
692
+ <dim>1</dim>
693
+ <dim>128</dim>
694
+ <dim>256</dim>
695
+ <dim>256</dim>
696
+ </port>
697
+ </input>
698
+ <output>
699
+ <port id="1" precision="FP32" names="input.23">
700
+ <dim>1</dim>
701
+ <dim>128</dim>
702
+ <dim>128</dim>
703
+ <dim>128</dim>
704
+ </port>
705
+ <port id="2" precision="I64">
706
+ <dim>1</dim>
707
+ <dim>128</dim>
708
+ <dim>128</dim>
709
+ <dim>128</dim>
710
+ </port>
711
+ </output>
712
+ </layer>
713
+ <layer id="34" name="block3.convs.0.weight_compressed" type="Const" version="opset1">
714
+ <data element_type="f16" shape="256, 128, 3, 3" offset="520326" size="589824"/>
715
+ <output>
716
+ <port id="0" precision="FP16">
717
+ <dim>256</dim>
718
+ <dim>128</dim>
719
+ <dim>3</dim>
720
+ <dim>3</dim>
721
+ </port>
722
+ </output>
723
+ </layer>
724
+ <layer id="35" name="block3.convs.0.weight" type="Convert" version="opset1">
725
+ <data destination_type="f32"/>
726
+ <rt_info>
727
+ <attribute name="decompression" version="0"/>
728
+ <attribute name="fused_names" version="0" value="block3.convs.0.weight"/>
729
+ </rt_info>
730
+ <input>
731
+ <port id="0" precision="FP16">
732
+ <dim>256</dim>
733
+ <dim>128</dim>
734
+ <dim>3</dim>
735
+ <dim>3</dim>
736
+ </port>
737
+ </input>
738
+ <output>
739
+ <port id="1" precision="FP32" names="block3.convs.0.weight">
740
+ <dim>256</dim>
741
+ <dim>128</dim>
742
+ <dim>3</dim>
743
+ <dim>3</dim>
744
+ </port>
745
+ </output>
746
+ </layer>
747
+ <layer id="36" name="convs.0_2/Conv/WithoutBiases" type="Convolution" version="opset1">
748
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
749
+ <rt_info>
750
+ <attribute name="fused_names" version="0" value="convs.0_2/Conv/WithoutBiases"/>
751
+ </rt_info>
752
+ <input>
753
+ <port id="0" precision="FP32">
754
+ <dim>1</dim>
755
+ <dim>128</dim>
756
+ <dim>128</dim>
757
+ <dim>128</dim>
758
+ </port>
759
+ <port id="1" precision="FP32">
760
+ <dim>256</dim>
761
+ <dim>128</dim>
762
+ <dim>3</dim>
763
+ <dim>3</dim>
764
+ </port>
765
+ </input>
766
+ <output>
767
+ <port id="2" precision="FP32">
768
+ <dim>1</dim>
769
+ <dim>256</dim>
770
+ <dim>128</dim>
771
+ <dim>128</dim>
772
+ </port>
773
+ </output>
774
+ </layer>
775
+ <layer id="37" name="Reshape_353_compressed" type="Const" version="opset1">
776
+ <data element_type="f16" shape="1, 256, 1, 1" offset="1110150" size="512"/>
777
+ <output>
778
+ <port id="0" precision="FP16">
779
+ <dim>1</dim>
780
+ <dim>256</dim>
781
+ <dim>1</dim>
782
+ <dim>1</dim>
783
+ </port>
784
+ </output>
785
+ </layer>
786
+ <layer id="38" name="Reshape_353" type="Convert" version="opset1">
787
+ <data destination_type="f32"/>
788
+ <rt_info>
789
+ <attribute name="decompression" version="0"/>
790
+ </rt_info>
791
+ <input>
792
+ <port id="0" precision="FP16">
793
+ <dim>1</dim>
794
+ <dim>256</dim>
795
+ <dim>1</dim>
796
+ <dim>1</dim>
797
+ </port>
798
+ </input>
799
+ <output>
800
+ <port id="1" precision="FP32">
801
+ <dim>1</dim>
802
+ <dim>256</dim>
803
+ <dim>1</dim>
804
+ <dim>1</dim>
805
+ </port>
806
+ </output>
807
+ </layer>
808
+ <layer id="39" name="convs.0_2/Conv" type="Add" version="opset1">
809
+ <data auto_broadcast="numpy"/>
810
+ <rt_info>
811
+ <attribute name="fused_names" version="0" value="Concat_352, Reshape_353, convs.0_2/Conv"/>
812
+ </rt_info>
813
+ <input>
814
+ <port id="0" precision="FP32">
815
+ <dim>1</dim>
816
+ <dim>256</dim>
817
+ <dim>128</dim>
818
+ <dim>128</dim>
819
+ </port>
820
+ <port id="1" precision="FP32">
821
+ <dim>1</dim>
822
+ <dim>256</dim>
823
+ <dim>1</dim>
824
+ <dim>1</dim>
825
+ </port>
826
+ </input>
827
+ <output>
828
+ <port id="2" precision="FP32" names="convs.0_2/Conv_output_0">
829
+ <dim>1</dim>
830
+ <dim>256</dim>
831
+ <dim>128</dim>
832
+ <dim>128</dim>
833
+ </port>
834
+ </output>
835
+ </layer>
836
+ <layer id="40" name="Relu_14" type="ReLU" version="opset1">
837
+ <rt_info>
838
+ <attribute name="fused_names" version="0" value="Relu_14"/>
839
+ </rt_info>
840
+ <input>
841
+ <port id="0" precision="FP32">
842
+ <dim>1</dim>
843
+ <dim>256</dim>
844
+ <dim>128</dim>
845
+ <dim>128</dim>
846
+ </port>
847
+ </input>
848
+ <output>
849
+ <port id="1" precision="FP32" names="input.27">
850
+ <dim>1</dim>
851
+ <dim>256</dim>
852
+ <dim>128</dim>
853
+ <dim>128</dim>
854
+ </port>
855
+ </output>
856
+ </layer>
857
+ <layer id="41" name="block3.convs.1.weight_compressed" type="Const" version="opset1">
858
+ <data element_type="f16" shape="256, 256, 3, 3" offset="1110662" size="1179648"/>
859
+ <output>
860
+ <port id="0" precision="FP16">
861
+ <dim>256</dim>
862
+ <dim>256</dim>
863
+ <dim>3</dim>
864
+ <dim>3</dim>
865
+ </port>
866
+ </output>
867
+ </layer>
868
+ <layer id="42" name="block3.convs.1.weight" type="Convert" version="opset1">
869
+ <data destination_type="f32"/>
870
+ <rt_info>
871
+ <attribute name="decompression" version="0"/>
872
+ <attribute name="fused_names" version="0" value="block3.convs.1.weight"/>
873
+ </rt_info>
874
+ <input>
875
+ <port id="0" precision="FP16">
876
+ <dim>256</dim>
877
+ <dim>256</dim>
878
+ <dim>3</dim>
879
+ <dim>3</dim>
880
+ </port>
881
+ </input>
882
+ <output>
883
+ <port id="1" precision="FP32" names="block3.convs.1.weight">
884
+ <dim>256</dim>
885
+ <dim>256</dim>
886
+ <dim>3</dim>
887
+ <dim>3</dim>
888
+ </port>
889
+ </output>
890
+ </layer>
891
+ <layer id="43" name="convs.1_2/Conv/WithoutBiases" type="Convolution" version="opset1">
892
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
893
+ <rt_info>
894
+ <attribute name="fused_names" version="0" value="convs.1_2/Conv/WithoutBiases"/>
895
+ </rt_info>
896
+ <input>
897
+ <port id="0" precision="FP32">
898
+ <dim>1</dim>
899
+ <dim>256</dim>
900
+ <dim>128</dim>
901
+ <dim>128</dim>
902
+ </port>
903
+ <port id="1" precision="FP32">
904
+ <dim>256</dim>
905
+ <dim>256</dim>
906
+ <dim>3</dim>
907
+ <dim>3</dim>
908
+ </port>
909
+ </input>
910
+ <output>
911
+ <port id="2" precision="FP32">
912
+ <dim>1</dim>
913
+ <dim>256</dim>
914
+ <dim>128</dim>
915
+ <dim>128</dim>
916
+ </port>
917
+ </output>
918
+ </layer>
919
+ <layer id="44" name="Reshape_402_compressed" type="Const" version="opset1">
920
+ <data element_type="f16" shape="1, 256, 1, 1" offset="2290310" size="512"/>
921
+ <output>
922
+ <port id="0" precision="FP16">
923
+ <dim>1</dim>
924
+ <dim>256</dim>
925
+ <dim>1</dim>
926
+ <dim>1</dim>
927
+ </port>
928
+ </output>
929
+ </layer>
930
+ <layer id="45" name="Reshape_402" type="Convert" version="opset1">
931
+ <data destination_type="f32"/>
932
+ <rt_info>
933
+ <attribute name="decompression" version="0"/>
934
+ </rt_info>
935
+ <input>
936
+ <port id="0" precision="FP16">
937
+ <dim>1</dim>
938
+ <dim>256</dim>
939
+ <dim>1</dim>
940
+ <dim>1</dim>
941
+ </port>
942
+ </input>
943
+ <output>
944
+ <port id="1" precision="FP32">
945
+ <dim>1</dim>
946
+ <dim>256</dim>
947
+ <dim>1</dim>
948
+ <dim>1</dim>
949
+ </port>
950
+ </output>
951
+ </layer>
952
+ <layer id="46" name="convs.1_2/Conv" type="Add" version="opset1">
953
+ <data auto_broadcast="numpy"/>
954
+ <rt_info>
955
+ <attribute name="fused_names" version="0" value="Concat_401, Reshape_402, convs.1_2/Conv"/>
956
+ </rt_info>
957
+ <input>
958
+ <port id="0" precision="FP32">
959
+ <dim>1</dim>
960
+ <dim>256</dim>
961
+ <dim>128</dim>
962
+ <dim>128</dim>
963
+ </port>
964
+ <port id="1" precision="FP32">
965
+ <dim>1</dim>
966
+ <dim>256</dim>
967
+ <dim>1</dim>
968
+ <dim>1</dim>
969
+ </port>
970
+ </input>
971
+ <output>
972
+ <port id="2" precision="FP32" names="convs.1_2/Conv_output_0">
973
+ <dim>1</dim>
974
+ <dim>256</dim>
975
+ <dim>128</dim>
976
+ <dim>128</dim>
977
+ </port>
978
+ </output>
979
+ </layer>
980
+ <layer id="47" name="Relu_16" type="ReLU" version="opset1">
981
+ <rt_info>
982
+ <attribute name="fused_names" version="0" value="Relu_16"/>
983
+ </rt_info>
984
+ <input>
985
+ <port id="0" precision="FP32">
986
+ <dim>1</dim>
987
+ <dim>256</dim>
988
+ <dim>128</dim>
989
+ <dim>128</dim>
990
+ </port>
991
+ </input>
992
+ <output>
993
+ <port id="1" precision="FP32" names="input.31">
994
+ <dim>1</dim>
995
+ <dim>256</dim>
996
+ <dim>128</dim>
997
+ <dim>128</dim>
998
+ </port>
999
+ </output>
1000
+ </layer>
1001
+ <layer id="48" name="block3.convs.2.weight_compressed" type="Const" version="opset1">
1002
+ <data element_type="f16" shape="256, 256, 3, 3" offset="2290822" size="1179648"/>
1003
+ <output>
1004
+ <port id="0" precision="FP16">
1005
+ <dim>256</dim>
1006
+ <dim>256</dim>
1007
+ <dim>3</dim>
1008
+ <dim>3</dim>
1009
+ </port>
1010
+ </output>
1011
+ </layer>
1012
+ <layer id="49" name="block3.convs.2.weight" type="Convert" version="opset1">
1013
+ <data destination_type="f32"/>
1014
+ <rt_info>
1015
+ <attribute name="decompression" version="0"/>
1016
+ <attribute name="fused_names" version="0" value="block3.convs.2.weight"/>
1017
+ </rt_info>
1018
+ <input>
1019
+ <port id="0" precision="FP16">
1020
+ <dim>256</dim>
1021
+ <dim>256</dim>
1022
+ <dim>3</dim>
1023
+ <dim>3</dim>
1024
+ </port>
1025
+ </input>
1026
+ <output>
1027
+ <port id="1" precision="FP32" names="block3.convs.2.weight">
1028
+ <dim>256</dim>
1029
+ <dim>256</dim>
1030
+ <dim>3</dim>
1031
+ <dim>3</dim>
1032
+ </port>
1033
+ </output>
1034
+ </layer>
1035
+ <layer id="50" name="convs.2/Conv/WithoutBiases" type="Convolution" version="opset1">
1036
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
1037
+ <rt_info>
1038
+ <attribute name="fused_names" version="0" value="convs.2/Conv/WithoutBiases"/>
1039
+ </rt_info>
1040
+ <input>
1041
+ <port id="0" precision="FP32">
1042
+ <dim>1</dim>
1043
+ <dim>256</dim>
1044
+ <dim>128</dim>
1045
+ <dim>128</dim>
1046
+ </port>
1047
+ <port id="1" precision="FP32">
1048
+ <dim>256</dim>
1049
+ <dim>256</dim>
1050
+ <dim>3</dim>
1051
+ <dim>3</dim>
1052
+ </port>
1053
+ </input>
1054
+ <output>
1055
+ <port id="2" precision="FP32">
1056
+ <dim>1</dim>
1057
+ <dim>256</dim>
1058
+ <dim>128</dim>
1059
+ <dim>128</dim>
1060
+ </port>
1061
+ </output>
1062
+ </layer>
1063
+ <layer id="51" name="Reshape_451_compressed" type="Const" version="opset1">
1064
+ <data element_type="f16" shape="1, 256, 1, 1" offset="3470470" size="512"/>
1065
+ <output>
1066
+ <port id="0" precision="FP16">
1067
+ <dim>1</dim>
1068
+ <dim>256</dim>
1069
+ <dim>1</dim>
1070
+ <dim>1</dim>
1071
+ </port>
1072
+ </output>
1073
+ </layer>
1074
+ <layer id="52" name="Reshape_451" type="Convert" version="opset1">
1075
+ <data destination_type="f32"/>
1076
+ <rt_info>
1077
+ <attribute name="decompression" version="0"/>
1078
+ </rt_info>
1079
+ <input>
1080
+ <port id="0" precision="FP16">
1081
+ <dim>1</dim>
1082
+ <dim>256</dim>
1083
+ <dim>1</dim>
1084
+ <dim>1</dim>
1085
+ </port>
1086
+ </input>
1087
+ <output>
1088
+ <port id="1" precision="FP32">
1089
+ <dim>1</dim>
1090
+ <dim>256</dim>
1091
+ <dim>1</dim>
1092
+ <dim>1</dim>
1093
+ </port>
1094
+ </output>
1095
+ </layer>
1096
+ <layer id="53" name="convs.2/Conv" type="Add" version="opset1">
1097
+ <data auto_broadcast="numpy"/>
1098
+ <rt_info>
1099
+ <attribute name="fused_names" version="0" value="Concat_450, Reshape_451, convs.2/Conv"/>
1100
+ </rt_info>
1101
+ <input>
1102
+ <port id="0" precision="FP32">
1103
+ <dim>1</dim>
1104
+ <dim>256</dim>
1105
+ <dim>128</dim>
1106
+ <dim>128</dim>
1107
+ </port>
1108
+ <port id="1" precision="FP32">
1109
+ <dim>1</dim>
1110
+ <dim>256</dim>
1111
+ <dim>1</dim>
1112
+ <dim>1</dim>
1113
+ </port>
1114
+ </input>
1115
+ <output>
1116
+ <port id="2" precision="FP32" names="convs.2/Conv_output_0">
1117
+ <dim>1</dim>
1118
+ <dim>256</dim>
1119
+ <dim>128</dim>
1120
+ <dim>128</dim>
1121
+ </port>
1122
+ </output>
1123
+ </layer>
1124
+ <layer id="54" name="Relu_18" type="ReLU" version="opset1">
1125
+ <rt_info>
1126
+ <attribute name="fused_names" version="0" value="Relu_18"/>
1127
+ </rt_info>
1128
+ <input>
1129
+ <port id="0" precision="FP32">
1130
+ <dim>1</dim>
1131
+ <dim>256</dim>
1132
+ <dim>128</dim>
1133
+ <dim>128</dim>
1134
+ </port>
1135
+ </input>
1136
+ <output>
1137
+ <port id="1" precision="FP32" names="input.35">
1138
+ <dim>1</dim>
1139
+ <dim>256</dim>
1140
+ <dim>128</dim>
1141
+ <dim>128</dim>
1142
+ </port>
1143
+ </output>
1144
+ </layer>
1145
+ <layer id="55" name="MaxPool_20" type="MaxPool" version="opset8">
1146
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0"/>
1147
+ <rt_info>
1148
+ <attribute name="fused_names" version="0" value="MaxPool_20"/>
1149
+ </rt_info>
1150
+ <input>
1151
+ <port id="0" precision="FP32">
1152
+ <dim>1</dim>
1153
+ <dim>256</dim>
1154
+ <dim>128</dim>
1155
+ <dim>128</dim>
1156
+ </port>
1157
+ </input>
1158
+ <output>
1159
+ <port id="1" precision="FP32" names="input.39">
1160
+ <dim>1</dim>
1161
+ <dim>256</dim>
1162
+ <dim>64</dim>
1163
+ <dim>64</dim>
1164
+ </port>
1165
+ <port id="2" precision="I64">
1166
+ <dim>1</dim>
1167
+ <dim>256</dim>
1168
+ <dim>64</dim>
1169
+ <dim>64</dim>
1170
+ </port>
1171
+ </output>
1172
+ </layer>
1173
+ <layer id="56" name="block4.convs.0.weight_compressed" type="Const" version="opset1">
1174
+ <data element_type="f16" shape="512, 256, 3, 3" offset="3470982" size="2359296"/>
1175
+ <output>
1176
+ <port id="0" precision="FP16">
1177
+ <dim>512</dim>
1178
+ <dim>256</dim>
1179
+ <dim>3</dim>
1180
+ <dim>3</dim>
1181
+ </port>
1182
+ </output>
1183
+ </layer>
1184
+ <layer id="57" name="block4.convs.0.weight" type="Convert" version="opset1">
1185
+ <data destination_type="f32"/>
1186
+ <rt_info>
1187
+ <attribute name="decompression" version="0"/>
1188
+ <attribute name="fused_names" version="0" value="block4.convs.0.weight"/>
1189
+ </rt_info>
1190
+ <input>
1191
+ <port id="0" precision="FP16">
1192
+ <dim>512</dim>
1193
+ <dim>256</dim>
1194
+ <dim>3</dim>
1195
+ <dim>3</dim>
1196
+ </port>
1197
+ </input>
1198
+ <output>
1199
+ <port id="1" precision="FP32" names="block4.convs.0.weight">
1200
+ <dim>512</dim>
1201
+ <dim>256</dim>
1202
+ <dim>3</dim>
1203
+ <dim>3</dim>
1204
+ </port>
1205
+ </output>
1206
+ </layer>
1207
+ <layer id="58" name="convs.0_3/Conv/WithoutBiases" type="Convolution" version="opset1">
1208
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
1209
+ <rt_info>
1210
+ <attribute name="fused_names" version="0" value="convs.0_3/Conv/WithoutBiases"/>
1211
+ </rt_info>
1212
+ <input>
1213
+ <port id="0" precision="FP32">
1214
+ <dim>1</dim>
1215
+ <dim>256</dim>
1216
+ <dim>64</dim>
1217
+ <dim>64</dim>
1218
+ </port>
1219
+ <port id="1" precision="FP32">
1220
+ <dim>512</dim>
1221
+ <dim>256</dim>
1222
+ <dim>3</dim>
1223
+ <dim>3</dim>
1224
+ </port>
1225
+ </input>
1226
+ <output>
1227
+ <port id="2" precision="FP32">
1228
+ <dim>1</dim>
1229
+ <dim>512</dim>
1230
+ <dim>64</dim>
1231
+ <dim>64</dim>
1232
+ </port>
1233
+ </output>
1234
+ </layer>
1235
+ <layer id="59" name="Reshape_549_compressed" type="Const" version="opset1">
1236
+ <data element_type="f16" shape="1, 512, 1, 1" offset="5830278" size="1024"/>
1237
+ <output>
1238
+ <port id="0" precision="FP16">
1239
+ <dim>1</dim>
1240
+ <dim>512</dim>
1241
+ <dim>1</dim>
1242
+ <dim>1</dim>
1243
+ </port>
1244
+ </output>
1245
+ </layer>
1246
+ <layer id="60" name="Reshape_549" type="Convert" version="opset1">
1247
+ <data destination_type="f32"/>
1248
+ <rt_info>
1249
+ <attribute name="decompression" version="0"/>
1250
+ </rt_info>
1251
+ <input>
1252
+ <port id="0" precision="FP16">
1253
+ <dim>1</dim>
1254
+ <dim>512</dim>
1255
+ <dim>1</dim>
1256
+ <dim>1</dim>
1257
+ </port>
1258
+ </input>
1259
+ <output>
1260
+ <port id="1" precision="FP32">
1261
+ <dim>1</dim>
1262
+ <dim>512</dim>
1263
+ <dim>1</dim>
1264
+ <dim>1</dim>
1265
+ </port>
1266
+ </output>
1267
+ </layer>
1268
+ <layer id="61" name="convs.0_3/Conv" type="Add" version="opset1">
1269
+ <data auto_broadcast="numpy"/>
1270
+ <rt_info>
1271
+ <attribute name="fused_names" version="0" value="Concat_548, Reshape_549, convs.0_3/Conv"/>
1272
+ </rt_info>
1273
+ <input>
1274
+ <port id="0" precision="FP32">
1275
+ <dim>1</dim>
1276
+ <dim>512</dim>
1277
+ <dim>64</dim>
1278
+ <dim>64</dim>
1279
+ </port>
1280
+ <port id="1" precision="FP32">
1281
+ <dim>1</dim>
1282
+ <dim>512</dim>
1283
+ <dim>1</dim>
1284
+ <dim>1</dim>
1285
+ </port>
1286
+ </input>
1287
+ <output>
1288
+ <port id="2" precision="FP32" names="convs.0_3/Conv_output_0">
1289
+ <dim>1</dim>
1290
+ <dim>512</dim>
1291
+ <dim>64</dim>
1292
+ <dim>64</dim>
1293
+ </port>
1294
+ </output>
1295
+ </layer>
1296
+ <layer id="62" name="Relu_22" type="ReLU" version="opset1">
1297
+ <rt_info>
1298
+ <attribute name="fused_names" version="0" value="Relu_22"/>
1299
+ </rt_info>
1300
+ <input>
1301
+ <port id="0" precision="FP32">
1302
+ <dim>1</dim>
1303
+ <dim>512</dim>
1304
+ <dim>64</dim>
1305
+ <dim>64</dim>
1306
+ </port>
1307
+ </input>
1308
+ <output>
1309
+ <port id="1" precision="FP32" names="input.43">
1310
+ <dim>1</dim>
1311
+ <dim>512</dim>
1312
+ <dim>64</dim>
1313
+ <dim>64</dim>
1314
+ </port>
1315
+ </output>
1316
+ </layer>
1317
+ <layer id="63" name="block4.convs.1.weight_compressed" type="Const" version="opset1">
1318
+ <data element_type="f16" shape="512, 512, 3, 3" offset="5831302" size="4718592"/>
1319
+ <output>
1320
+ <port id="0" precision="FP16">
1321
+ <dim>512</dim>
1322
+ <dim>512</dim>
1323
+ <dim>3</dim>
1324
+ <dim>3</dim>
1325
+ </port>
1326
+ </output>
1327
+ </layer>
1328
+ <layer id="64" name="block4.convs.1.weight" type="Convert" version="opset1">
1329
+ <data destination_type="f32"/>
1330
+ <rt_info>
1331
+ <attribute name="decompression" version="0"/>
1332
+ <attribute name="fused_names" version="0" value="block4.convs.1.weight"/>
1333
+ </rt_info>
1334
+ <input>
1335
+ <port id="0" precision="FP16">
1336
+ <dim>512</dim>
1337
+ <dim>512</dim>
1338
+ <dim>3</dim>
1339
+ <dim>3</dim>
1340
+ </port>
1341
+ </input>
1342
+ <output>
1343
+ <port id="1" precision="FP32" names="block4.convs.1.weight">
1344
+ <dim>512</dim>
1345
+ <dim>512</dim>
1346
+ <dim>3</dim>
1347
+ <dim>3</dim>
1348
+ </port>
1349
+ </output>
1350
+ </layer>
1351
+ <layer id="65" name="convs.1_3/Conv/WithoutBiases" type="Convolution" version="opset1">
1352
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
1353
+ <rt_info>
1354
+ <attribute name="fused_names" version="0" value="convs.1_3/Conv/WithoutBiases"/>
1355
+ </rt_info>
1356
+ <input>
1357
+ <port id="0" precision="FP32">
1358
+ <dim>1</dim>
1359
+ <dim>512</dim>
1360
+ <dim>64</dim>
1361
+ <dim>64</dim>
1362
+ </port>
1363
+ <port id="1" precision="FP32">
1364
+ <dim>512</dim>
1365
+ <dim>512</dim>
1366
+ <dim>3</dim>
1367
+ <dim>3</dim>
1368
+ </port>
1369
+ </input>
1370
+ <output>
1371
+ <port id="2" precision="FP32">
1372
+ <dim>1</dim>
1373
+ <dim>512</dim>
1374
+ <dim>64</dim>
1375
+ <dim>64</dim>
1376
+ </port>
1377
+ </output>
1378
+ </layer>
1379
+ <layer id="66" name="Reshape_598_compressed" type="Const" version="opset1">
1380
+ <data element_type="f16" shape="1, 512, 1, 1" offset="10549894" size="1024"/>
1381
+ <output>
1382
+ <port id="0" precision="FP16">
1383
+ <dim>1</dim>
1384
+ <dim>512</dim>
1385
+ <dim>1</dim>
1386
+ <dim>1</dim>
1387
+ </port>
1388
+ </output>
1389
+ </layer>
1390
+ <layer id="67" name="Reshape_598" type="Convert" version="opset1">
1391
+ <data destination_type="f32"/>
1392
+ <rt_info>
1393
+ <attribute name="decompression" version="0"/>
1394
+ </rt_info>
1395
+ <input>
1396
+ <port id="0" precision="FP16">
1397
+ <dim>1</dim>
1398
+ <dim>512</dim>
1399
+ <dim>1</dim>
1400
+ <dim>1</dim>
1401
+ </port>
1402
+ </input>
1403
+ <output>
1404
+ <port id="1" precision="FP32">
1405
+ <dim>1</dim>
1406
+ <dim>512</dim>
1407
+ <dim>1</dim>
1408
+ <dim>1</dim>
1409
+ </port>
1410
+ </output>
1411
+ </layer>
1412
+ <layer id="68" name="convs.1_3/Conv" type="Add" version="opset1">
1413
+ <data auto_broadcast="numpy"/>
1414
+ <rt_info>
1415
+ <attribute name="fused_names" version="0" value="Concat_597, Reshape_598, convs.1_3/Conv"/>
1416
+ </rt_info>
1417
+ <input>
1418
+ <port id="0" precision="FP32">
1419
+ <dim>1</dim>
1420
+ <dim>512</dim>
1421
+ <dim>64</dim>
1422
+ <dim>64</dim>
1423
+ </port>
1424
+ <port id="1" precision="FP32">
1425
+ <dim>1</dim>
1426
+ <dim>512</dim>
1427
+ <dim>1</dim>
1428
+ <dim>1</dim>
1429
+ </port>
1430
+ </input>
1431
+ <output>
1432
+ <port id="2" precision="FP32" names="convs.1_3/Conv_output_0">
1433
+ <dim>1</dim>
1434
+ <dim>512</dim>
1435
+ <dim>64</dim>
1436
+ <dim>64</dim>
1437
+ </port>
1438
+ </output>
1439
+ </layer>
1440
+ <layer id="69" name="Relu_24" type="ReLU" version="opset1">
1441
+ <rt_info>
1442
+ <attribute name="fused_names" version="0" value="Relu_24"/>
1443
+ </rt_info>
1444
+ <input>
1445
+ <port id="0" precision="FP32">
1446
+ <dim>1</dim>
1447
+ <dim>512</dim>
1448
+ <dim>64</dim>
1449
+ <dim>64</dim>
1450
+ </port>
1451
+ </input>
1452
+ <output>
1453
+ <port id="1" precision="FP32" names="input.47">
1454
+ <dim>1</dim>
1455
+ <dim>512</dim>
1456
+ <dim>64</dim>
1457
+ <dim>64</dim>
1458
+ </port>
1459
+ </output>
1460
+ </layer>
1461
+ <layer id="70" name="block4.convs.2.weight_compressed" type="Const" version="opset1">
1462
+ <data element_type="f16" shape="512, 512, 3, 3" offset="10550918" size="4718592"/>
1463
+ <output>
1464
+ <port id="0" precision="FP16">
1465
+ <dim>512</dim>
1466
+ <dim>512</dim>
1467
+ <dim>3</dim>
1468
+ <dim>3</dim>
1469
+ </port>
1470
+ </output>
1471
+ </layer>
1472
+ <layer id="71" name="block4.convs.2.weight" type="Convert" version="opset1">
1473
+ <data destination_type="f32"/>
1474
+ <rt_info>
1475
+ <attribute name="decompression" version="0"/>
1476
+ <attribute name="fused_names" version="0" value="block4.convs.2.weight"/>
1477
+ </rt_info>
1478
+ <input>
1479
+ <port id="0" precision="FP16">
1480
+ <dim>512</dim>
1481
+ <dim>512</dim>
1482
+ <dim>3</dim>
1483
+ <dim>3</dim>
1484
+ </port>
1485
+ </input>
1486
+ <output>
1487
+ <port id="1" precision="FP32" names="block4.convs.2.weight">
1488
+ <dim>512</dim>
1489
+ <dim>512</dim>
1490
+ <dim>3</dim>
1491
+ <dim>3</dim>
1492
+ </port>
1493
+ </output>
1494
+ </layer>
1495
+ <layer id="72" name="convs.2_1/Conv/WithoutBiases" type="Convolution" version="opset1">
1496
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
1497
+ <rt_info>
1498
+ <attribute name="fused_names" version="0" value="convs.2_1/Conv/WithoutBiases"/>
1499
+ </rt_info>
1500
+ <input>
1501
+ <port id="0" precision="FP32">
1502
+ <dim>1</dim>
1503
+ <dim>512</dim>
1504
+ <dim>64</dim>
1505
+ <dim>64</dim>
1506
+ </port>
1507
+ <port id="1" precision="FP32">
1508
+ <dim>512</dim>
1509
+ <dim>512</dim>
1510
+ <dim>3</dim>
1511
+ <dim>3</dim>
1512
+ </port>
1513
+ </input>
1514
+ <output>
1515
+ <port id="2" precision="FP32">
1516
+ <dim>1</dim>
1517
+ <dim>512</dim>
1518
+ <dim>64</dim>
1519
+ <dim>64</dim>
1520
+ </port>
1521
+ </output>
1522
+ </layer>
1523
+ <layer id="73" name="Reshape_647_compressed" type="Const" version="opset1">
1524
+ <data element_type="f16" shape="1, 512, 1, 1" offset="15269510" size="1024"/>
1525
+ <output>
1526
+ <port id="0" precision="FP16">
1527
+ <dim>1</dim>
1528
+ <dim>512</dim>
1529
+ <dim>1</dim>
1530
+ <dim>1</dim>
1531
+ </port>
1532
+ </output>
1533
+ </layer>
1534
+ <layer id="74" name="Reshape_647" type="Convert" version="opset1">
1535
+ <data destination_type="f32"/>
1536
+ <rt_info>
1537
+ <attribute name="decompression" version="0"/>
1538
+ </rt_info>
1539
+ <input>
1540
+ <port id="0" precision="FP16">
1541
+ <dim>1</dim>
1542
+ <dim>512</dim>
1543
+ <dim>1</dim>
1544
+ <dim>1</dim>
1545
+ </port>
1546
+ </input>
1547
+ <output>
1548
+ <port id="1" precision="FP32">
1549
+ <dim>1</dim>
1550
+ <dim>512</dim>
1551
+ <dim>1</dim>
1552
+ <dim>1</dim>
1553
+ </port>
1554
+ </output>
1555
+ </layer>
1556
+ <layer id="75" name="convs.2_1/Conv" type="Add" version="opset1">
1557
+ <data auto_broadcast="numpy"/>
1558
+ <rt_info>
1559
+ <attribute name="fused_names" version="0" value="Concat_646, Reshape_647, convs.2_1/Conv"/>
1560
+ </rt_info>
1561
+ <input>
1562
+ <port id="0" precision="FP32">
1563
+ <dim>1</dim>
1564
+ <dim>512</dim>
1565
+ <dim>64</dim>
1566
+ <dim>64</dim>
1567
+ </port>
1568
+ <port id="1" precision="FP32">
1569
+ <dim>1</dim>
1570
+ <dim>512</dim>
1571
+ <dim>1</dim>
1572
+ <dim>1</dim>
1573
+ </port>
1574
+ </input>
1575
+ <output>
1576
+ <port id="2" precision="FP32" names="convs.2_1/Conv_output_0">
1577
+ <dim>1</dim>
1578
+ <dim>512</dim>
1579
+ <dim>64</dim>
1580
+ <dim>64</dim>
1581
+ </port>
1582
+ </output>
1583
+ </layer>
1584
+ <layer id="76" name="Relu_26" type="ReLU" version="opset1">
1585
+ <rt_info>
1586
+ <attribute name="fused_names" version="0" value="Relu_26"/>
1587
+ </rt_info>
1588
+ <input>
1589
+ <port id="0" precision="FP32">
1590
+ <dim>1</dim>
1591
+ <dim>512</dim>
1592
+ <dim>64</dim>
1593
+ <dim>64</dim>
1594
+ </port>
1595
+ </input>
1596
+ <output>
1597
+ <port id="1" precision="FP32" names="input.51">
1598
+ <dim>1</dim>
1599
+ <dim>512</dim>
1600
+ <dim>64</dim>
1601
+ <dim>64</dim>
1602
+ </port>
1603
+ </output>
1604
+ </layer>
1605
+ <layer id="77" name="MaxPool_28" type="MaxPool" version="opset8">
1606
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0"/>
1607
+ <rt_info>
1608
+ <attribute name="fused_names" version="0" value="MaxPool_28"/>
1609
+ </rt_info>
1610
+ <input>
1611
+ <port id="0" precision="FP32">
1612
+ <dim>1</dim>
1613
+ <dim>512</dim>
1614
+ <dim>64</dim>
1615
+ <dim>64</dim>
1616
+ </port>
1617
+ </input>
1618
+ <output>
1619
+ <port id="1" precision="FP32" names="input.55">
1620
+ <dim>1</dim>
1621
+ <dim>512</dim>
1622
+ <dim>32</dim>
1623
+ <dim>32</dim>
1624
+ </port>
1625
+ <port id="2" precision="I64">
1626
+ <dim>1</dim>
1627
+ <dim>512</dim>
1628
+ <dim>32</dim>
1629
+ <dim>32</dim>
1630
+ </port>
1631
+ </output>
1632
+ </layer>
1633
+ <layer id="78" name="block5.convs.0.weight_compressed" type="Const" version="opset1">
1634
+ <data element_type="f16" shape="512, 512, 3, 3" offset="15270534" size="4718592"/>
1635
+ <output>
1636
+ <port id="0" precision="FP16">
1637
+ <dim>512</dim>
1638
+ <dim>512</dim>
1639
+ <dim>3</dim>
1640
+ <dim>3</dim>
1641
+ </port>
1642
+ </output>
1643
+ </layer>
1644
+ <layer id="79" name="block5.convs.0.weight" type="Convert" version="opset1">
1645
+ <data destination_type="f32"/>
1646
+ <rt_info>
1647
+ <attribute name="decompression" version="0"/>
1648
+ <attribute name="fused_names" version="0" value="block5.convs.0.weight"/>
1649
+ </rt_info>
1650
+ <input>
1651
+ <port id="0" precision="FP16">
1652
+ <dim>512</dim>
1653
+ <dim>512</dim>
1654
+ <dim>3</dim>
1655
+ <dim>3</dim>
1656
+ </port>
1657
+ </input>
1658
+ <output>
1659
+ <port id="1" precision="FP32" names="block5.convs.0.weight">
1660
+ <dim>512</dim>
1661
+ <dim>512</dim>
1662
+ <dim>3</dim>
1663
+ <dim>3</dim>
1664
+ </port>
1665
+ </output>
1666
+ </layer>
1667
+ <layer id="80" name="convs.0_4/Conv/WithoutBiases" type="Convolution" version="opset1">
1668
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
1669
+ <rt_info>
1670
+ <attribute name="fused_names" version="0" value="convs.0_4/Conv/WithoutBiases"/>
1671
+ </rt_info>
1672
+ <input>
1673
+ <port id="0" precision="FP32">
1674
+ <dim>1</dim>
1675
+ <dim>512</dim>
1676
+ <dim>32</dim>
1677
+ <dim>32</dim>
1678
+ </port>
1679
+ <port id="1" precision="FP32">
1680
+ <dim>512</dim>
1681
+ <dim>512</dim>
1682
+ <dim>3</dim>
1683
+ <dim>3</dim>
1684
+ </port>
1685
+ </input>
1686
+ <output>
1687
+ <port id="2" precision="FP32">
1688
+ <dim>1</dim>
1689
+ <dim>512</dim>
1690
+ <dim>32</dim>
1691
+ <dim>32</dim>
1692
+ </port>
1693
+ </output>
1694
+ </layer>
1695
+ <layer id="81" name="Reshape_745_compressed" type="Const" version="opset1">
1696
+ <data element_type="f16" shape="1, 512, 1, 1" offset="19989126" size="1024"/>
1697
+ <output>
1698
+ <port id="0" precision="FP16">
1699
+ <dim>1</dim>
1700
+ <dim>512</dim>
1701
+ <dim>1</dim>
1702
+ <dim>1</dim>
1703
+ </port>
1704
+ </output>
1705
+ </layer>
1706
+ <layer id="82" name="Reshape_745" type="Convert" version="opset1">
1707
+ <data destination_type="f32"/>
1708
+ <rt_info>
1709
+ <attribute name="decompression" version="0"/>
1710
+ </rt_info>
1711
+ <input>
1712
+ <port id="0" precision="FP16">
1713
+ <dim>1</dim>
1714
+ <dim>512</dim>
1715
+ <dim>1</dim>
1716
+ <dim>1</dim>
1717
+ </port>
1718
+ </input>
1719
+ <output>
1720
+ <port id="1" precision="FP32">
1721
+ <dim>1</dim>
1722
+ <dim>512</dim>
1723
+ <dim>1</dim>
1724
+ <dim>1</dim>
1725
+ </port>
1726
+ </output>
1727
+ </layer>
1728
+ <layer id="83" name="convs.0_4/Conv" type="Add" version="opset1">
1729
+ <data auto_broadcast="numpy"/>
1730
+ <rt_info>
1731
+ <attribute name="fused_names" version="0" value="Concat_744, Reshape_745, convs.0_4/Conv"/>
1732
+ </rt_info>
1733
+ <input>
1734
+ <port id="0" precision="FP32">
1735
+ <dim>1</dim>
1736
+ <dim>512</dim>
1737
+ <dim>32</dim>
1738
+ <dim>32</dim>
1739
+ </port>
1740
+ <port id="1" precision="FP32">
1741
+ <dim>1</dim>
1742
+ <dim>512</dim>
1743
+ <dim>1</dim>
1744
+ <dim>1</dim>
1745
+ </port>
1746
+ </input>
1747
+ <output>
1748
+ <port id="2" precision="FP32" names="convs.0_4/Conv_output_0">
1749
+ <dim>1</dim>
1750
+ <dim>512</dim>
1751
+ <dim>32</dim>
1752
+ <dim>32</dim>
1753
+ </port>
1754
+ </output>
1755
+ </layer>
1756
+ <layer id="84" name="Relu_30" type="ReLU" version="opset1">
1757
+ <rt_info>
1758
+ <attribute name="fused_names" version="0" value="Relu_30"/>
1759
+ </rt_info>
1760
+ <input>
1761
+ <port id="0" precision="FP32">
1762
+ <dim>1</dim>
1763
+ <dim>512</dim>
1764
+ <dim>32</dim>
1765
+ <dim>32</dim>
1766
+ </port>
1767
+ </input>
1768
+ <output>
1769
+ <port id="1" precision="FP32" names="input.59">
1770
+ <dim>1</dim>
1771
+ <dim>512</dim>
1772
+ <dim>32</dim>
1773
+ <dim>32</dim>
1774
+ </port>
1775
+ </output>
1776
+ </layer>
1777
+ <layer id="85" name="block5.convs.1.weight_compressed" type="Const" version="opset1">
1778
+ <data element_type="f16" shape="512, 512, 3, 3" offset="19990150" size="4718592"/>
1779
+ <output>
1780
+ <port id="0" precision="FP16">
1781
+ <dim>512</dim>
1782
+ <dim>512</dim>
1783
+ <dim>3</dim>
1784
+ <dim>3</dim>
1785
+ </port>
1786
+ </output>
1787
+ </layer>
1788
+ <layer id="86" name="block5.convs.1.weight" type="Convert" version="opset1">
1789
+ <data destination_type="f32"/>
1790
+ <rt_info>
1791
+ <attribute name="decompression" version="0"/>
1792
+ <attribute name="fused_names" version="0" value="block5.convs.1.weight"/>
1793
+ </rt_info>
1794
+ <input>
1795
+ <port id="0" precision="FP16">
1796
+ <dim>512</dim>
1797
+ <dim>512</dim>
1798
+ <dim>3</dim>
1799
+ <dim>3</dim>
1800
+ </port>
1801
+ </input>
1802
+ <output>
1803
+ <port id="1" precision="FP32" names="block5.convs.1.weight">
1804
+ <dim>512</dim>
1805
+ <dim>512</dim>
1806
+ <dim>3</dim>
1807
+ <dim>3</dim>
1808
+ </port>
1809
+ </output>
1810
+ </layer>
1811
+ <layer id="87" name="convs.1_4/Conv/WithoutBiases" type="Convolution" version="opset1">
1812
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
1813
+ <rt_info>
1814
+ <attribute name="fused_names" version="0" value="convs.1_4/Conv/WithoutBiases"/>
1815
+ </rt_info>
1816
+ <input>
1817
+ <port id="0" precision="FP32">
1818
+ <dim>1</dim>
1819
+ <dim>512</dim>
1820
+ <dim>32</dim>
1821
+ <dim>32</dim>
1822
+ </port>
1823
+ <port id="1" precision="FP32">
1824
+ <dim>512</dim>
1825
+ <dim>512</dim>
1826
+ <dim>3</dim>
1827
+ <dim>3</dim>
1828
+ </port>
1829
+ </input>
1830
+ <output>
1831
+ <port id="2" precision="FP32">
1832
+ <dim>1</dim>
1833
+ <dim>512</dim>
1834
+ <dim>32</dim>
1835
+ <dim>32</dim>
1836
+ </port>
1837
+ </output>
1838
+ </layer>
1839
+ <layer id="88" name="Reshape_794_compressed" type="Const" version="opset1">
1840
+ <data element_type="f16" shape="1, 512, 1, 1" offset="24708742" size="1024"/>
1841
+ <output>
1842
+ <port id="0" precision="FP16">
1843
+ <dim>1</dim>
1844
+ <dim>512</dim>
1845
+ <dim>1</dim>
1846
+ <dim>1</dim>
1847
+ </port>
1848
+ </output>
1849
+ </layer>
1850
+ <layer id="89" name="Reshape_794" type="Convert" version="opset1">
1851
+ <data destination_type="f32"/>
1852
+ <rt_info>
1853
+ <attribute name="decompression" version="0"/>
1854
+ </rt_info>
1855
+ <input>
1856
+ <port id="0" precision="FP16">
1857
+ <dim>1</dim>
1858
+ <dim>512</dim>
1859
+ <dim>1</dim>
1860
+ <dim>1</dim>
1861
+ </port>
1862
+ </input>
1863
+ <output>
1864
+ <port id="1" precision="FP32">
1865
+ <dim>1</dim>
1866
+ <dim>512</dim>
1867
+ <dim>1</dim>
1868
+ <dim>1</dim>
1869
+ </port>
1870
+ </output>
1871
+ </layer>
1872
+ <layer id="90" name="convs.1_4/Conv" type="Add" version="opset1">
1873
+ <data auto_broadcast="numpy"/>
1874
+ <rt_info>
1875
+ <attribute name="fused_names" version="0" value="Concat_793, Reshape_794, convs.1_4/Conv"/>
1876
+ </rt_info>
1877
+ <input>
1878
+ <port id="0" precision="FP32">
1879
+ <dim>1</dim>
1880
+ <dim>512</dim>
1881
+ <dim>32</dim>
1882
+ <dim>32</dim>
1883
+ </port>
1884
+ <port id="1" precision="FP32">
1885
+ <dim>1</dim>
1886
+ <dim>512</dim>
1887
+ <dim>1</dim>
1888
+ <dim>1</dim>
1889
+ </port>
1890
+ </input>
1891
+ <output>
1892
+ <port id="2" precision="FP32" names="convs.1_4/Conv_output_0">
1893
+ <dim>1</dim>
1894
+ <dim>512</dim>
1895
+ <dim>32</dim>
1896
+ <dim>32</dim>
1897
+ </port>
1898
+ </output>
1899
+ </layer>
1900
+ <layer id="91" name="Relu_32" type="ReLU" version="opset1">
1901
+ <rt_info>
1902
+ <attribute name="fused_names" version="0" value="Relu_32"/>
1903
+ </rt_info>
1904
+ <input>
1905
+ <port id="0" precision="FP32">
1906
+ <dim>1</dim>
1907
+ <dim>512</dim>
1908
+ <dim>32</dim>
1909
+ <dim>32</dim>
1910
+ </port>
1911
+ </input>
1912
+ <output>
1913
+ <port id="1" precision="FP32" names="input.63">
1914
+ <dim>1</dim>
1915
+ <dim>512</dim>
1916
+ <dim>32</dim>
1917
+ <dim>32</dim>
1918
+ </port>
1919
+ </output>
1920
+ </layer>
1921
+ <layer id="92" name="block5.convs.2.weight_compressed" type="Const" version="opset1">
1922
+ <data element_type="f16" shape="512, 512, 3, 3" offset="24709766" size="4718592"/>
1923
+ <output>
1924
+ <port id="0" precision="FP16">
1925
+ <dim>512</dim>
1926
+ <dim>512</dim>
1927
+ <dim>3</dim>
1928
+ <dim>3</dim>
1929
+ </port>
1930
+ </output>
1931
+ </layer>
1932
+ <layer id="93" name="block5.convs.2.weight" type="Convert" version="opset1">
1933
+ <data destination_type="f32"/>
1934
+ <rt_info>
1935
+ <attribute name="decompression" version="0"/>
1936
+ <attribute name="fused_names" version="0" value="block5.convs.2.weight"/>
1937
+ </rt_info>
1938
+ <input>
1939
+ <port id="0" precision="FP16">
1940
+ <dim>512</dim>
1941
+ <dim>512</dim>
1942
+ <dim>3</dim>
1943
+ <dim>3</dim>
1944
+ </port>
1945
+ </input>
1946
+ <output>
1947
+ <port id="1" precision="FP32" names="block5.convs.2.weight">
1948
+ <dim>512</dim>
1949
+ <dim>512</dim>
1950
+ <dim>3</dim>
1951
+ <dim>3</dim>
1952
+ </port>
1953
+ </output>
1954
+ </layer>
1955
+ <layer id="94" name="convs.2_2/Conv/WithoutBiases" type="Convolution" version="opset1">
1956
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit"/>
1957
+ <rt_info>
1958
+ <attribute name="fused_names" version="0" value="convs.2_2/Conv/WithoutBiases"/>
1959
+ </rt_info>
1960
+ <input>
1961
+ <port id="0" precision="FP32">
1962
+ <dim>1</dim>
1963
+ <dim>512</dim>
1964
+ <dim>32</dim>
1965
+ <dim>32</dim>
1966
+ </port>
1967
+ <port id="1" precision="FP32">
1968
+ <dim>512</dim>
1969
+ <dim>512</dim>
1970
+ <dim>3</dim>
1971
+ <dim>3</dim>
1972
+ </port>
1973
+ </input>
1974
+ <output>
1975
+ <port id="2" precision="FP32">
1976
+ <dim>1</dim>
1977
+ <dim>512</dim>
1978
+ <dim>32</dim>
1979
+ <dim>32</dim>
1980
+ </port>
1981
+ </output>
1982
+ </layer>
1983
+ <layer id="95" name="Reshape_843_compressed" type="Const" version="opset1">
1984
+ <data element_type="f16" shape="1, 512, 1, 1" offset="29428358" size="1024"/>
1985
+ <output>
1986
+ <port id="0" precision="FP16">
1987
+ <dim>1</dim>
1988
+ <dim>512</dim>
1989
+ <dim>1</dim>
1990
+ <dim>1</dim>
1991
+ </port>
1992
+ </output>
1993
+ </layer>
1994
+ <layer id="96" name="Reshape_843" type="Convert" version="opset1">
1995
+ <data destination_type="f32"/>
1996
+ <rt_info>
1997
+ <attribute name="decompression" version="0"/>
1998
+ </rt_info>
1999
+ <input>
2000
+ <port id="0" precision="FP16">
2001
+ <dim>1</dim>
2002
+ <dim>512</dim>
2003
+ <dim>1</dim>
2004
+ <dim>1</dim>
2005
+ </port>
2006
+ </input>
2007
+ <output>
2008
+ <port id="1" precision="FP32">
2009
+ <dim>1</dim>
2010
+ <dim>512</dim>
2011
+ <dim>1</dim>
2012
+ <dim>1</dim>
2013
+ </port>
2014
+ </output>
2015
+ </layer>
2016
+ <layer id="97" name="convs.2_2/Conv" type="Add" version="opset1">
2017
+ <data auto_broadcast="numpy"/>
2018
+ <rt_info>
2019
+ <attribute name="fused_names" version="0" value="Concat_842, Reshape_843, convs.2_2/Conv"/>
2020
+ </rt_info>
2021
+ <input>
2022
+ <port id="0" precision="FP32">
2023
+ <dim>1</dim>
2024
+ <dim>512</dim>
2025
+ <dim>32</dim>
2026
+ <dim>32</dim>
2027
+ </port>
2028
+ <port id="1" precision="FP32">
2029
+ <dim>1</dim>
2030
+ <dim>512</dim>
2031
+ <dim>1</dim>
2032
+ <dim>1</dim>
2033
+ </port>
2034
+ </input>
2035
+ <output>
2036
+ <port id="2" precision="FP32" names="convs.2_2/Conv_output_0">
2037
+ <dim>1</dim>
2038
+ <dim>512</dim>
2039
+ <dim>32</dim>
2040
+ <dim>32</dim>
2041
+ </port>
2042
+ </output>
2043
+ </layer>
2044
+ <layer id="98" name="Relu_34" type="ReLU" version="opset1">
2045
+ <rt_info>
2046
+ <attribute name="fused_names" version="0" value="Relu_34"/>
2047
+ </rt_info>
2048
+ <input>
2049
+ <port id="0" precision="FP32">
2050
+ <dim>1</dim>
2051
+ <dim>512</dim>
2052
+ <dim>32</dim>
2053
+ <dim>32</dim>
2054
+ </port>
2055
+ </input>
2056
+ <output>
2057
+ <port id="1" precision="FP32" names="input.67">
2058
+ <dim>1</dim>
2059
+ <dim>512</dim>
2060
+ <dim>32</dim>
2061
+ <dim>32</dim>
2062
+ </port>
2063
+ </output>
2064
+ </layer>
2065
+ <layer id="99" name="block5.projection.weight_compressed" type="Const" version="opset1">
2066
+ <data element_type="f16" shape="1, 512, 1, 1" offset="29429382" size="1024"/>
2067
+ <output>
2068
+ <port id="0" precision="FP16">
2069
+ <dim>1</dim>
2070
+ <dim>512</dim>
2071
+ <dim>1</dim>
2072
+ <dim>1</dim>
2073
+ </port>
2074
+ </output>
2075
+ </layer>
2076
+ <layer id="100" name="block5.projection.weight" type="Convert" version="opset1">
2077
+ <data destination_type="f32"/>
2078
+ <rt_info>
2079
+ <attribute name="decompression" version="0"/>
2080
+ <attribute name="fused_names" version="0" value="block5.projection.weight"/>
2081
+ </rt_info>
2082
+ <input>
2083
+ <port id="0" precision="FP16">
2084
+ <dim>1</dim>
2085
+ <dim>512</dim>
2086
+ <dim>1</dim>
2087
+ <dim>1</dim>
2088
+ </port>
2089
+ </input>
2090
+ <output>
2091
+ <port id="1" precision="FP32" names="block5.projection.weight">
2092
+ <dim>1</dim>
2093
+ <dim>512</dim>
2094
+ <dim>1</dim>
2095
+ <dim>1</dim>
2096
+ </port>
2097
+ </output>
2098
+ </layer>
2099
+ <layer id="101" name="projection_4/Conv/WithoutBiases" type="Convolution" version="opset1">
2100
+ <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit"/>
2101
+ <rt_info>
2102
+ <attribute name="fused_names" version="0" value="projection_4/Conv/WithoutBiases"/>
2103
+ </rt_info>
2104
+ <input>
2105
+ <port id="0" precision="FP32">
2106
+ <dim>1</dim>
2107
+ <dim>512</dim>
2108
+ <dim>32</dim>
2109
+ <dim>32</dim>
2110
+ </port>
2111
+ <port id="1" precision="FP32">
2112
+ <dim>1</dim>
2113
+ <dim>512</dim>
2114
+ <dim>1</dim>
2115
+ <dim>1</dim>
2116
+ </port>
2117
+ </input>
2118
+ <output>
2119
+ <port id="2" precision="FP32">
2120
+ <dim>1</dim>
2121
+ <dim>1</dim>
2122
+ <dim>32</dim>
2123
+ <dim>32</dim>
2124
+ </port>
2125
+ </output>
2126
+ </layer>
2127
+ <layer id="102" name="Reshape_892_compressed" type="Const" version="opset1">
2128
+ <data element_type="f16" shape="1, 1, 1, 1" offset="29430406" size="2"/>
2129
+ <output>
2130
+ <port id="0" precision="FP16">
2131
+ <dim>1</dim>
2132
+ <dim>1</dim>
2133
+ <dim>1</dim>
2134
+ <dim>1</dim>
2135
+ </port>
2136
+ </output>
2137
+ </layer>
2138
+ <layer id="103" name="Reshape_892" type="Convert" version="opset1">
2139
+ <data destination_type="f32"/>
2140
+ <rt_info>
2141
+ <attribute name="decompression" version="0"/>
2142
+ </rt_info>
2143
+ <input>
2144
+ <port id="0" precision="FP16">
2145
+ <dim>1</dim>
2146
+ <dim>1</dim>
2147
+ <dim>1</dim>
2148
+ <dim>1</dim>
2149
+ </port>
2150
+ </input>
2151
+ <output>
2152
+ <port id="1" precision="FP32">
2153
+ <dim>1</dim>
2154
+ <dim>1</dim>
2155
+ <dim>1</dim>
2156
+ <dim>1</dim>
2157
+ </port>
2158
+ </output>
2159
+ </layer>
2160
+ <layer id="104" name="73" type="Add" version="opset1">
2161
+ <data auto_broadcast="numpy"/>
2162
+ <rt_info>
2163
+ <attribute name="fused_names" version="0" value="73, Concat_891, Reshape_892"/>
2164
+ </rt_info>
2165
+ <input>
2166
+ <port id="0" precision="FP32">
2167
+ <dim>1</dim>
2168
+ <dim>1</dim>
2169
+ <dim>32</dim>
2170
+ <dim>32</dim>
2171
+ </port>
2172
+ <port id="1" precision="FP32">
2173
+ <dim>1</dim>
2174
+ <dim>1</dim>
2175
+ <dim>1</dim>
2176
+ <dim>1</dim>
2177
+ </port>
2178
+ </input>
2179
+ <output>
2180
+ <port id="2" precision="FP32" names="73">
2181
+ <dim>1</dim>
2182
+ <dim>1</dim>
2183
+ <dim>32</dim>
2184
+ <dim>32</dim>
2185
+ </port>
2186
+ </output>
2187
+ </layer>
2188
+ <layer id="106" name="block4.projection.weight_compressed" type="Const" version="opset1">
2189
+ <data element_type="f16" shape="1, 512, 1, 1" offset="29430408" size="1024"/>
2190
+ <output>
2191
+ <port id="0" precision="FP16">
2192
+ <dim>1</dim>
2193
+ <dim>512</dim>
2194
+ <dim>1</dim>
2195
+ <dim>1</dim>
2196
+ </port>
2197
+ </output>
2198
+ </layer>
2199
+ <layer id="107" name="block4.projection.weight" type="Convert" version="opset1">
2200
+ <data destination_type="f32"/>
2201
+ <rt_info>
2202
+ <attribute name="decompression" version="0"/>
2203
+ <attribute name="fused_names" version="0" value="block4.projection.weight"/>
2204
+ </rt_info>
2205
+ <input>
2206
+ <port id="0" precision="FP16">
2207
+ <dim>1</dim>
2208
+ <dim>512</dim>
2209
+ <dim>1</dim>
2210
+ <dim>1</dim>
2211
+ </port>
2212
+ </input>
2213
+ <output>
2214
+ <port id="1" precision="FP32" names="block4.projection.weight">
2215
+ <dim>1</dim>
2216
+ <dim>512</dim>
2217
+ <dim>1</dim>
2218
+ <dim>1</dim>
2219
+ </port>
2220
+ </output>
2221
+ </layer>
2222
+ <layer id="108" name="projection_3/Conv/WithoutBiases" type="Convolution" version="opset1">
2223
+ <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit"/>
2224
+ <rt_info>
2225
+ <attribute name="fused_names" version="0" value="projection_3/Conv/WithoutBiases"/>
2226
+ </rt_info>
2227
+ <input>
2228
+ <port id="0" precision="FP32">
2229
+ <dim>1</dim>
2230
+ <dim>512</dim>
2231
+ <dim>64</dim>
2232
+ <dim>64</dim>
2233
+ </port>
2234
+ <port id="1" precision="FP32">
2235
+ <dim>1</dim>
2236
+ <dim>512</dim>
2237
+ <dim>1</dim>
2238
+ <dim>1</dim>
2239
+ </port>
2240
+ </input>
2241
+ <output>
2242
+ <port id="2" precision="FP32">
2243
+ <dim>1</dim>
2244
+ <dim>1</dim>
2245
+ <dim>64</dim>
2246
+ <dim>64</dim>
2247
+ </port>
2248
+ </output>
2249
+ </layer>
2250
+ <layer id="109" name="Reshape_696_compressed" type="Const" version="opset1">
2251
+ <data element_type="f16" shape="1, 1, 1, 1" offset="29431432" size="2"/>
2252
+ <output>
2253
+ <port id="0" precision="FP16">
2254
+ <dim>1</dim>
2255
+ <dim>1</dim>
2256
+ <dim>1</dim>
2257
+ <dim>1</dim>
2258
+ </port>
2259
+ </output>
2260
+ </layer>
2261
+ <layer id="110" name="Reshape_696" type="Convert" version="opset1">
2262
+ <data destination_type="f32"/>
2263
+ <rt_info>
2264
+ <attribute name="decompression" version="0"/>
2265
+ </rt_info>
2266
+ <input>
2267
+ <port id="0" precision="FP16">
2268
+ <dim>1</dim>
2269
+ <dim>1</dim>
2270
+ <dim>1</dim>
2271
+ <dim>1</dim>
2272
+ </port>
2273
+ </input>
2274
+ <output>
2275
+ <port id="1" precision="FP32">
2276
+ <dim>1</dim>
2277
+ <dim>1</dim>
2278
+ <dim>1</dim>
2279
+ <dim>1</dim>
2280
+ </port>
2281
+ </output>
2282
+ </layer>
2283
+ <layer id="111" name="65" type="Add" version="opset1">
2284
+ <data auto_broadcast="numpy"/>
2285
+ <rt_info>
2286
+ <attribute name="fused_names" version="0" value="65, Concat_695, Reshape_696"/>
2287
+ </rt_info>
2288
+ <input>
2289
+ <port id="0" precision="FP32">
2290
+ <dim>1</dim>
2291
+ <dim>1</dim>
2292
+ <dim>64</dim>
2293
+ <dim>64</dim>
2294
+ </port>
2295
+ <port id="1" precision="FP32">
2296
+ <dim>1</dim>
2297
+ <dim>1</dim>
2298
+ <dim>1</dim>
2299
+ <dim>1</dim>
2300
+ </port>
2301
+ </input>
2302
+ <output>
2303
+ <port id="2" precision="FP32" names="65">
2304
+ <dim>1</dim>
2305
+ <dim>1</dim>
2306
+ <dim>64</dim>
2307
+ <dim>64</dim>
2308
+ </port>
2309
+ </output>
2310
+ </layer>
2311
+ <layer id="113" name="block3.projection.weight_compressed" type="Const" version="opset1">
2312
+ <data element_type="f16" shape="1, 256, 1, 1" offset="29431434" size="512"/>
2313
+ <output>
2314
+ <port id="0" precision="FP16">
2315
+ <dim>1</dim>
2316
+ <dim>256</dim>
2317
+ <dim>1</dim>
2318
+ <dim>1</dim>
2319
+ </port>
2320
+ </output>
2321
+ </layer>
2322
+ <layer id="114" name="block3.projection.weight" type="Convert" version="opset1">
2323
+ <data destination_type="f32"/>
2324
+ <rt_info>
2325
+ <attribute name="decompression" version="0"/>
2326
+ <attribute name="fused_names" version="0" value="block3.projection.weight"/>
2327
+ </rt_info>
2328
+ <input>
2329
+ <port id="0" precision="FP16">
2330
+ <dim>1</dim>
2331
+ <dim>256</dim>
2332
+ <dim>1</dim>
2333
+ <dim>1</dim>
2334
+ </port>
2335
+ </input>
2336
+ <output>
2337
+ <port id="1" precision="FP32" names="block3.projection.weight">
2338
+ <dim>1</dim>
2339
+ <dim>256</dim>
2340
+ <dim>1</dim>
2341
+ <dim>1</dim>
2342
+ </port>
2343
+ </output>
2344
+ </layer>
2345
+ <layer id="115" name="projection_2/Conv/WithoutBiases" type="Convolution" version="opset1">
2346
+ <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit"/>
2347
+ <rt_info>
2348
+ <attribute name="fused_names" version="0" value="projection_2/Conv/WithoutBiases"/>
2349
+ </rt_info>
2350
+ <input>
2351
+ <port id="0" precision="FP32">
2352
+ <dim>1</dim>
2353
+ <dim>256</dim>
2354
+ <dim>128</dim>
2355
+ <dim>128</dim>
2356
+ </port>
2357
+ <port id="1" precision="FP32">
2358
+ <dim>1</dim>
2359
+ <dim>256</dim>
2360
+ <dim>1</dim>
2361
+ <dim>1</dim>
2362
+ </port>
2363
+ </input>
2364
+ <output>
2365
+ <port id="2" precision="FP32">
2366
+ <dim>1</dim>
2367
+ <dim>1</dim>
2368
+ <dim>128</dim>
2369
+ <dim>128</dim>
2370
+ </port>
2371
+ </output>
2372
+ </layer>
2373
+ <layer id="116" name="Reshape_500_compressed" type="Const" version="opset1">
2374
+ <data element_type="f16" shape="1, 1, 1, 1" offset="29431946" size="2"/>
2375
+ <output>
2376
+ <port id="0" precision="FP16">
2377
+ <dim>1</dim>
2378
+ <dim>1</dim>
2379
+ <dim>1</dim>
2380
+ <dim>1</dim>
2381
+ </port>
2382
+ </output>
2383
+ </layer>
2384
+ <layer id="117" name="Reshape_500" type="Convert" version="opset1">
2385
+ <data destination_type="f32"/>
2386
+ <rt_info>
2387
+ <attribute name="decompression" version="0"/>
2388
+ </rt_info>
2389
+ <input>
2390
+ <port id="0" precision="FP16">
2391
+ <dim>1</dim>
2392
+ <dim>1</dim>
2393
+ <dim>1</dim>
2394
+ <dim>1</dim>
2395
+ </port>
2396
+ </input>
2397
+ <output>
2398
+ <port id="1" precision="FP32">
2399
+ <dim>1</dim>
2400
+ <dim>1</dim>
2401
+ <dim>1</dim>
2402
+ <dim>1</dim>
2403
+ </port>
2404
+ </output>
2405
+ </layer>
2406
+ <layer id="118" name="57" type="Add" version="opset1">
2407
+ <data auto_broadcast="numpy"/>
2408
+ <rt_info>
2409
+ <attribute name="fused_names" version="0" value="57, Concat_499, Reshape_500"/>
2410
+ </rt_info>
2411
+ <input>
2412
+ <port id="0" precision="FP32">
2413
+ <dim>1</dim>
2414
+ <dim>1</dim>
2415
+ <dim>128</dim>
2416
+ <dim>128</dim>
2417
+ </port>
2418
+ <port id="1" precision="FP32">
2419
+ <dim>1</dim>
2420
+ <dim>1</dim>
2421
+ <dim>1</dim>
2422
+ <dim>1</dim>
2423
+ </port>
2424
+ </input>
2425
+ <output>
2426
+ <port id="2" precision="FP32" names="57">
2427
+ <dim>1</dim>
2428
+ <dim>1</dim>
2429
+ <dim>128</dim>
2430
+ <dim>128</dim>
2431
+ </port>
2432
+ </output>
2433
+ </layer>
2434
+ <layer id="120" name="block2.projection.weight_compressed" type="Const" version="opset1">
2435
+ <data element_type="f16" shape="1, 128, 1, 1" offset="29431948" size="256"/>
2436
+ <output>
2437
+ <port id="0" precision="FP16">
2438
+ <dim>1</dim>
2439
+ <dim>128</dim>
2440
+ <dim>1</dim>
2441
+ <dim>1</dim>
2442
+ </port>
2443
+ </output>
2444
+ </layer>
2445
+ <layer id="121" name="block2.projection.weight" type="Convert" version="opset1">
2446
+ <data destination_type="f32"/>
2447
+ <rt_info>
2448
+ <attribute name="decompression" version="0"/>
2449
+ <attribute name="fused_names" version="0" value="block2.projection.weight"/>
2450
+ </rt_info>
2451
+ <input>
2452
+ <port id="0" precision="FP16">
2453
+ <dim>1</dim>
2454
+ <dim>128</dim>
2455
+ <dim>1</dim>
2456
+ <dim>1</dim>
2457
+ </port>
2458
+ </input>
2459
+ <output>
2460
+ <port id="1" precision="FP32" names="block2.projection.weight">
2461
+ <dim>1</dim>
2462
+ <dim>128</dim>
2463
+ <dim>1</dim>
2464
+ <dim>1</dim>
2465
+ </port>
2466
+ </output>
2467
+ </layer>
2468
+ <layer id="122" name="projection_1/Conv/WithoutBiases" type="Convolution" version="opset1">
2469
+ <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit"/>
2470
+ <rt_info>
2471
+ <attribute name="fused_names" version="0" value="projection_1/Conv/WithoutBiases"/>
2472
+ </rt_info>
2473
+ <input>
2474
+ <port id="0" precision="FP32">
2475
+ <dim>1</dim>
2476
+ <dim>128</dim>
2477
+ <dim>256</dim>
2478
+ <dim>256</dim>
2479
+ </port>
2480
+ <port id="1" precision="FP32">
2481
+ <dim>1</dim>
2482
+ <dim>128</dim>
2483
+ <dim>1</dim>
2484
+ <dim>1</dim>
2485
+ </port>
2486
+ </input>
2487
+ <output>
2488
+ <port id="2" precision="FP32">
2489
+ <dim>1</dim>
2490
+ <dim>1</dim>
2491
+ <dim>256</dim>
2492
+ <dim>256</dim>
2493
+ </port>
2494
+ </output>
2495
+ </layer>
2496
+ <layer id="123" name="Reshape_304_compressed" type="Const" version="opset1">
2497
+ <data element_type="f16" shape="1, 1, 1, 1" offset="29432204" size="2"/>
2498
+ <output>
2499
+ <port id="0" precision="FP16">
2500
+ <dim>1</dim>
2501
+ <dim>1</dim>
2502
+ <dim>1</dim>
2503
+ <dim>1</dim>
2504
+ </port>
2505
+ </output>
2506
+ </layer>
2507
+ <layer id="124" name="Reshape_304" type="Convert" version="opset1">
2508
+ <data destination_type="f32"/>
2509
+ <rt_info>
2510
+ <attribute name="decompression" version="0"/>
2511
+ </rt_info>
2512
+ <input>
2513
+ <port id="0" precision="FP16">
2514
+ <dim>1</dim>
2515
+ <dim>1</dim>
2516
+ <dim>1</dim>
2517
+ <dim>1</dim>
2518
+ </port>
2519
+ </input>
2520
+ <output>
2521
+ <port id="1" precision="FP32">
2522
+ <dim>1</dim>
2523
+ <dim>1</dim>
2524
+ <dim>1</dim>
2525
+ <dim>1</dim>
2526
+ </port>
2527
+ </output>
2528
+ </layer>
2529
+ <layer id="125" name="49" type="Add" version="opset1">
2530
+ <data auto_broadcast="numpy"/>
2531
+ <rt_info>
2532
+ <attribute name="fused_names" version="0" value="49, Concat_303, Reshape_304"/>
2533
+ </rt_info>
2534
+ <input>
2535
+ <port id="0" precision="FP32">
2536
+ <dim>1</dim>
2537
+ <dim>1</dim>
2538
+ <dim>256</dim>
2539
+ <dim>256</dim>
2540
+ </port>
2541
+ <port id="1" precision="FP32">
2542
+ <dim>1</dim>
2543
+ <dim>1</dim>
2544
+ <dim>1</dim>
2545
+ <dim>1</dim>
2546
+ </port>
2547
+ </input>
2548
+ <output>
2549
+ <port id="2" precision="FP32" names="49">
2550
+ <dim>1</dim>
2551
+ <dim>1</dim>
2552
+ <dim>256</dim>
2553
+ <dim>256</dim>
2554
+ </port>
2555
+ </output>
2556
+ </layer>
2557
+ <layer id="127" name="block1.projection.weight_compressed" type="Const" version="opset1">
2558
+ <data element_type="f16" shape="1, 64, 1, 1" offset="29432206" size="128"/>
2559
+ <output>
2560
+ <port id="0" precision="FP16">
2561
+ <dim>1</dim>
2562
+ <dim>64</dim>
2563
+ <dim>1</dim>
2564
+ <dim>1</dim>
2565
+ </port>
2566
+ </output>
2567
+ </layer>
2568
+ <layer id="128" name="block1.projection.weight" type="Convert" version="opset1">
2569
+ <data destination_type="f32"/>
2570
+ <rt_info>
2571
+ <attribute name="decompression" version="0"/>
2572
+ <attribute name="fused_names" version="0" value="block1.projection.weight"/>
2573
+ </rt_info>
2574
+ <input>
2575
+ <port id="0" precision="FP16">
2576
+ <dim>1</dim>
2577
+ <dim>64</dim>
2578
+ <dim>1</dim>
2579
+ <dim>1</dim>
2580
+ </port>
2581
+ </input>
2582
+ <output>
2583
+ <port id="1" precision="FP32" names="block1.projection.weight">
2584
+ <dim>1</dim>
2585
+ <dim>64</dim>
2586
+ <dim>1</dim>
2587
+ <dim>1</dim>
2588
+ </port>
2589
+ </output>
2590
+ </layer>
2591
+ <layer id="129" name="projection/Conv/WithoutBiases" type="Convolution" version="opset1">
2592
+ <data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit"/>
2593
+ <rt_info>
2594
+ <attribute name="fused_names" version="0" value="projection/Conv/WithoutBiases"/>
2595
+ </rt_info>
2596
+ <input>
2597
+ <port id="0" precision="FP32">
2598
+ <dim>1</dim>
2599
+ <dim>64</dim>
2600
+ <dim>512</dim>
2601
+ <dim>512</dim>
2602
+ </port>
2603
+ <port id="1" precision="FP32">
2604
+ <dim>1</dim>
2605
+ <dim>64</dim>
2606
+ <dim>1</dim>
2607
+ <dim>1</dim>
2608
+ </port>
2609
+ </input>
2610
+ <output>
2611
+ <port id="2" precision="FP32">
2612
+ <dim>1</dim>
2613
+ <dim>1</dim>
2614
+ <dim>512</dim>
2615
+ <dim>512</dim>
2616
+ </port>
2617
+ </output>
2618
+ </layer>
2619
+ <layer id="130" name="Reshape_157_compressed" type="Const" version="opset1">
2620
+ <data element_type="f16" shape="1, 1, 1, 1" offset="29432334" size="2"/>
2621
+ <output>
2622
+ <port id="0" precision="FP16">
2623
+ <dim>1</dim>
2624
+ <dim>1</dim>
2625
+ <dim>1</dim>
2626
+ <dim>1</dim>
2627
+ </port>
2628
+ </output>
2629
+ </layer>
2630
+ <layer id="131" name="Reshape_157" type="Convert" version="opset1">
2631
+ <data destination_type="f32"/>
2632
+ <rt_info>
2633
+ <attribute name="decompression" version="0"/>
2634
+ </rt_info>
2635
+ <input>
2636
+ <port id="0" precision="FP16">
2637
+ <dim>1</dim>
2638
+ <dim>1</dim>
2639
+ <dim>1</dim>
2640
+ <dim>1</dim>
2641
+ </port>
2642
+ </input>
2643
+ <output>
2644
+ <port id="1" precision="FP32">
2645
+ <dim>1</dim>
2646
+ <dim>1</dim>
2647
+ <dim>1</dim>
2648
+ <dim>1</dim>
2649
+ </port>
2650
+ </output>
2651
+ </layer>
2652
+ <layer id="132" name="43" type="Add" version="opset1">
2653
+ <data auto_broadcast="numpy"/>
2654
+ <rt_info>
2655
+ <attribute name="fused_names" version="0" value="43, Concat_156, Reshape_157"/>
2656
+ </rt_info>
2657
+ <input>
2658
+ <port id="0" precision="FP32">
2659
+ <dim>1</dim>
2660
+ <dim>1</dim>
2661
+ <dim>512</dim>
2662
+ <dim>512</dim>
2663
+ </port>
2664
+ <port id="1" precision="FP32">
2665
+ <dim>1</dim>
2666
+ <dim>1</dim>
2667
+ <dim>1</dim>
2668
+ <dim>1</dim>
2669
+ </port>
2670
+ </input>
2671
+ <output>
2672
+ <port id="2" precision="FP32" names="43">
2673
+ <dim>1</dim>
2674
+ <dim>1</dim>
2675
+ <dim>512</dim>
2676
+ <dim>512</dim>
2677
+ </port>
2678
+ </output>
2679
+ </layer>
2680
+ <layer id="133" name="43/sink_port_0" type="Result" version="opset1">
2681
+ <rt_info>
2682
+ <attribute name="fused_names" version="0" value="43/sink_port_0"/>
2683
+ </rt_info>
2684
+ <input>
2685
+ <port id="0" precision="FP32">
2686
+ <dim>1</dim>
2687
+ <dim>1</dim>
2688
+ <dim>512</dim>
2689
+ <dim>512</dim>
2690
+ </port>
2691
+ </input>
2692
+ </layer>
2693
+ <layer id="126" name="49/sink_port_0" type="Result" version="opset1">
2694
+ <rt_info>
2695
+ <attribute name="fused_names" version="0" value="49/sink_port_0"/>
2696
+ </rt_info>
2697
+ <input>
2698
+ <port id="0" precision="FP32">
2699
+ <dim>1</dim>
2700
+ <dim>1</dim>
2701
+ <dim>256</dim>
2702
+ <dim>256</dim>
2703
+ </port>
2704
+ </input>
2705
+ </layer>
2706
+ <layer id="119" name="57/sink_port_0" type="Result" version="opset1">
2707
+ <rt_info>
2708
+ <attribute name="fused_names" version="0" value="57/sink_port_0"/>
2709
+ </rt_info>
2710
+ <input>
2711
+ <port id="0" precision="FP32">
2712
+ <dim>1</dim>
2713
+ <dim>1</dim>
2714
+ <dim>128</dim>
2715
+ <dim>128</dim>
2716
+ </port>
2717
+ </input>
2718
+ </layer>
2719
+ <layer id="112" name="65/sink_port_0" type="Result" version="opset1">
2720
+ <rt_info>
2721
+ <attribute name="fused_names" version="0" value="65/sink_port_0"/>
2722
+ </rt_info>
2723
+ <input>
2724
+ <port id="0" precision="FP32">
2725
+ <dim>1</dim>
2726
+ <dim>1</dim>
2727
+ <dim>64</dim>
2728
+ <dim>64</dim>
2729
+ </port>
2730
+ </input>
2731
+ </layer>
2732
+ <layer id="105" name="73/sink_port_0" type="Result" version="opset1">
2733
+ <rt_info>
2734
+ <attribute name="fused_names" version="0" value="73/sink_port_0"/>
2735
+ </rt_info>
2736
+ <input>
2737
+ <port id="0" precision="FP32">
2738
+ <dim>1</dim>
2739
+ <dim>1</dim>
2740
+ <dim>32</dim>
2741
+ <dim>32</dim>
2742
+ </port>
2743
+ </input>
2744
+ </layer>
2745
+ </layers>
2746
+ <edges>
2747
+ <edge from-layer="0" from-port="0" to-layer="3" to-port="0"/>
2748
+ <edge from-layer="1" from-port="0" to-layer="2" to-port="0"/>
2749
+ <edge from-layer="2" from-port="1" to-layer="3" to-port="1"/>
2750
+ <edge from-layer="3" from-port="2" to-layer="6" to-port="0"/>
2751
+ <edge from-layer="4" from-port="0" to-layer="5" to-port="0"/>
2752
+ <edge from-layer="5" from-port="1" to-layer="6" to-port="1"/>
2753
+ <edge from-layer="6" from-port="2" to-layer="9" to-port="0"/>
2754
+ <edge from-layer="7" from-port="0" to-layer="8" to-port="0"/>
2755
+ <edge from-layer="8" from-port="1" to-layer="9" to-port="1"/>
2756
+ <edge from-layer="9" from-port="2" to-layer="10" to-port="0"/>
2757
+ <edge from-layer="10" from-port="1" to-layer="13" to-port="0"/>
2758
+ <edge from-layer="11" from-port="0" to-layer="12" to-port="0"/>
2759
+ <edge from-layer="12" from-port="1" to-layer="13" to-port="1"/>
2760
+ <edge from-layer="13" from-port="2" to-layer="16" to-port="0"/>
2761
+ <edge from-layer="14" from-port="0" to-layer="15" to-port="0"/>
2762
+ <edge from-layer="15" from-port="1" to-layer="16" to-port="1"/>
2763
+ <edge from-layer="16" from-port="2" to-layer="17" to-port="0"/>
2764
+ <edge from-layer="17" from-port="1" to-layer="18" to-port="0"/>
2765
+ <edge from-layer="17" from-port="1" to-layer="129" to-port="0"/>
2766
+ <edge from-layer="18" from-port="1" to-layer="21" to-port="0"/>
2767
+ <edge from-layer="19" from-port="0" to-layer="20" to-port="0"/>
2768
+ <edge from-layer="20" from-port="1" to-layer="21" to-port="1"/>
2769
+ <edge from-layer="21" from-port="2" to-layer="24" to-port="0"/>
2770
+ <edge from-layer="22" from-port="0" to-layer="23" to-port="0"/>
2771
+ <edge from-layer="23" from-port="1" to-layer="24" to-port="1"/>
2772
+ <edge from-layer="24" from-port="2" to-layer="25" to-port="0"/>
2773
+ <edge from-layer="25" from-port="1" to-layer="28" to-port="0"/>
2774
+ <edge from-layer="26" from-port="0" to-layer="27" to-port="0"/>
2775
+ <edge from-layer="27" from-port="1" to-layer="28" to-port="1"/>
2776
+ <edge from-layer="28" from-port="2" to-layer="31" to-port="0"/>
2777
+ <edge from-layer="29" from-port="0" to-layer="30" to-port="0"/>
2778
+ <edge from-layer="30" from-port="1" to-layer="31" to-port="1"/>
2779
+ <edge from-layer="31" from-port="2" to-layer="32" to-port="0"/>
2780
+ <edge from-layer="32" from-port="1" to-layer="33" to-port="0"/>
2781
+ <edge from-layer="32" from-port="1" to-layer="122" to-port="0"/>
2782
+ <edge from-layer="33" from-port="1" to-layer="36" to-port="0"/>
2783
+ <edge from-layer="34" from-port="0" to-layer="35" to-port="0"/>
2784
+ <edge from-layer="35" from-port="1" to-layer="36" to-port="1"/>
2785
+ <edge from-layer="36" from-port="2" to-layer="39" to-port="0"/>
2786
+ <edge from-layer="37" from-port="0" to-layer="38" to-port="0"/>
2787
+ <edge from-layer="38" from-port="1" to-layer="39" to-port="1"/>
2788
+ <edge from-layer="39" from-port="2" to-layer="40" to-port="0"/>
2789
+ <edge from-layer="40" from-port="1" to-layer="43" to-port="0"/>
2790
+ <edge from-layer="41" from-port="0" to-layer="42" to-port="0"/>
2791
+ <edge from-layer="42" from-port="1" to-layer="43" to-port="1"/>
2792
+ <edge from-layer="43" from-port="2" to-layer="46" to-port="0"/>
2793
+ <edge from-layer="44" from-port="0" to-layer="45" to-port="0"/>
2794
+ <edge from-layer="45" from-port="1" to-layer="46" to-port="1"/>
2795
+ <edge from-layer="46" from-port="2" to-layer="47" to-port="0"/>
2796
+ <edge from-layer="47" from-port="1" to-layer="50" to-port="0"/>
2797
+ <edge from-layer="48" from-port="0" to-layer="49" to-port="0"/>
2798
+ <edge from-layer="49" from-port="1" to-layer="50" to-port="1"/>
2799
+ <edge from-layer="50" from-port="2" to-layer="53" to-port="0"/>
2800
+ <edge from-layer="51" from-port="0" to-layer="52" to-port="0"/>
2801
+ <edge from-layer="52" from-port="1" to-layer="53" to-port="1"/>
2802
+ <edge from-layer="53" from-port="2" to-layer="54" to-port="0"/>
2803
+ <edge from-layer="54" from-port="1" to-layer="55" to-port="0"/>
2804
+ <edge from-layer="54" from-port="1" to-layer="115" to-port="0"/>
2805
+ <edge from-layer="55" from-port="1" to-layer="58" to-port="0"/>
2806
+ <edge from-layer="56" from-port="0" to-layer="57" to-port="0"/>
2807
+ <edge from-layer="57" from-port="1" to-layer="58" to-port="1"/>
2808
+ <edge from-layer="58" from-port="2" to-layer="61" to-port="0"/>
2809
+ <edge from-layer="59" from-port="0" to-layer="60" to-port="0"/>
2810
+ <edge from-layer="60" from-port="1" to-layer="61" to-port="1"/>
2811
+ <edge from-layer="61" from-port="2" to-layer="62" to-port="0"/>
2812
+ <edge from-layer="62" from-port="1" to-layer="65" to-port="0"/>
2813
+ <edge from-layer="63" from-port="0" to-layer="64" to-port="0"/>
2814
+ <edge from-layer="64" from-port="1" to-layer="65" to-port="1"/>
2815
+ <edge from-layer="65" from-port="2" to-layer="68" to-port="0"/>
2816
+ <edge from-layer="66" from-port="0" to-layer="67" to-port="0"/>
2817
+ <edge from-layer="67" from-port="1" to-layer="68" to-port="1"/>
2818
+ <edge from-layer="68" from-port="2" to-layer="69" to-port="0"/>
2819
+ <edge from-layer="69" from-port="1" to-layer="72" to-port="0"/>
2820
+ <edge from-layer="70" from-port="0" to-layer="71" to-port="0"/>
2821
+ <edge from-layer="71" from-port="1" to-layer="72" to-port="1"/>
2822
+ <edge from-layer="72" from-port="2" to-layer="75" to-port="0"/>
2823
+ <edge from-layer="73" from-port="0" to-layer="74" to-port="0"/>
2824
+ <edge from-layer="74" from-port="1" to-layer="75" to-port="1"/>
2825
+ <edge from-layer="75" from-port="2" to-layer="76" to-port="0"/>
2826
+ <edge from-layer="76" from-port="1" to-layer="77" to-port="0"/>
2827
+ <edge from-layer="76" from-port="1" to-layer="108" to-port="0"/>
2828
+ <edge from-layer="77" from-port="1" to-layer="80" to-port="0"/>
2829
+ <edge from-layer="78" from-port="0" to-layer="79" to-port="0"/>
2830
+ <edge from-layer="79" from-port="1" to-layer="80" to-port="1"/>
2831
+ <edge from-layer="80" from-port="2" to-layer="83" to-port="0"/>
2832
+ <edge from-layer="81" from-port="0" to-layer="82" to-port="0"/>
2833
+ <edge from-layer="82" from-port="1" to-layer="83" to-port="1"/>
2834
+ <edge from-layer="83" from-port="2" to-layer="84" to-port="0"/>
2835
+ <edge from-layer="84" from-port="1" to-layer="87" to-port="0"/>
2836
+ <edge from-layer="85" from-port="0" to-layer="86" to-port="0"/>
2837
+ <edge from-layer="86" from-port="1" to-layer="87" to-port="1"/>
2838
+ <edge from-layer="87" from-port="2" to-layer="90" to-port="0"/>
2839
+ <edge from-layer="88" from-port="0" to-layer="89" to-port="0"/>
2840
+ <edge from-layer="89" from-port="1" to-layer="90" to-port="1"/>
2841
+ <edge from-layer="90" from-port="2" to-layer="91" to-port="0"/>
2842
+ <edge from-layer="91" from-port="1" to-layer="94" to-port="0"/>
2843
+ <edge from-layer="92" from-port="0" to-layer="93" to-port="0"/>
2844
+ <edge from-layer="93" from-port="1" to-layer="94" to-port="1"/>
2845
+ <edge from-layer="94" from-port="2" to-layer="97" to-port="0"/>
2846
+ <edge from-layer="95" from-port="0" to-layer="96" to-port="0"/>
2847
+ <edge from-layer="96" from-port="1" to-layer="97" to-port="1"/>
2848
+ <edge from-layer="97" from-port="2" to-layer="98" to-port="0"/>
2849
+ <edge from-layer="98" from-port="1" to-layer="101" to-port="0"/>
2850
+ <edge from-layer="99" from-port="0" to-layer="100" to-port="0"/>
2851
+ <edge from-layer="100" from-port="1" to-layer="101" to-port="1"/>
2852
+ <edge from-layer="101" from-port="2" to-layer="104" to-port="0"/>
2853
+ <edge from-layer="102" from-port="0" to-layer="103" to-port="0"/>
2854
+ <edge from-layer="103" from-port="1" to-layer="104" to-port="1"/>
2855
+ <edge from-layer="104" from-port="2" to-layer="105" to-port="0"/>
2856
+ <edge from-layer="106" from-port="0" to-layer="107" to-port="0"/>
2857
+ <edge from-layer="107" from-port="1" to-layer="108" to-port="1"/>
2858
+ <edge from-layer="108" from-port="2" to-layer="111" to-port="0"/>
2859
+ <edge from-layer="109" from-port="0" to-layer="110" to-port="0"/>
2860
+ <edge from-layer="110" from-port="1" to-layer="111" to-port="1"/>
2861
+ <edge from-layer="111" from-port="2" to-layer="112" to-port="0"/>
2862
+ <edge from-layer="113" from-port="0" to-layer="114" to-port="0"/>
2863
+ <edge from-layer="114" from-port="1" to-layer="115" to-port="1"/>
2864
+ <edge from-layer="115" from-port="2" to-layer="118" to-port="0"/>
2865
+ <edge from-layer="116" from-port="0" to-layer="117" to-port="0"/>
2866
+ <edge from-layer="117" from-port="1" to-layer="118" to-port="1"/>
2867
+ <edge from-layer="118" from-port="2" to-layer="119" to-port="0"/>
2868
+ <edge from-layer="120" from-port="0" to-layer="121" to-port="0"/>
2869
+ <edge from-layer="121" from-port="1" to-layer="122" to-port="1"/>
2870
+ <edge from-layer="122" from-port="2" to-layer="125" to-port="0"/>
2871
+ <edge from-layer="123" from-port="0" to-layer="124" to-port="0"/>
2872
+ <edge from-layer="124" from-port="1" to-layer="125" to-port="1"/>
2873
+ <edge from-layer="125" from-port="2" to-layer="126" to-port="0"/>
2874
+ <edge from-layer="127" from-port="0" to-layer="128" to-port="0"/>
2875
+ <edge from-layer="128" from-port="1" to-layer="129" to-port="1"/>
2876
+ <edge from-layer="129" from-port="2" to-layer="132" to-port="0"/>
2877
+ <edge from-layer="130" from-port="0" to-layer="131" to-port="0"/>
2878
+ <edge from-layer="131" from-port="1" to-layer="132" to-port="1"/>
2879
+ <edge from-layer="132" from-port="2" to-layer="133" to-port="0"/>
2880
+ </edges>
2881
+ <meta_data>
2882
+ <MO_version value="2022.2.0-7713-af16ea1d79a-releases/2022/2"/>
2883
+ <Runtime_version value="2022.2.0-7713-af16ea1d79a-releases/2022/2"/>
2884
+ <legacy_path value="False"/>
2885
+ <cli_parameters>
2886
+ <caffe_parser_path value="DIR"/>
2887
+ <compress_fp16 value="True"/>
2888
+ <data_type value="FP32"/>
2889
+ <disable_nhwc_to_nchw value="False"/>
2890
+ <disable_omitting_optional value="False"/>
2891
+ <disable_resnet_optimization value="False"/>
2892
+ <disable_weights_compression value="False"/>
2893
+ <enable_concat_optimization value="False"/>
2894
+ <enable_flattening_nested_params value="False"/>
2895
+ <enable_ssd_gluoncv value="False"/>
2896
+ <extensions value="DIR"/>
2897
+ <framework value="onnx"/>
2898
+ <freeze_placeholder_with_value value="{}"/>
2899
+ <input_model value="DIR\hed.onnx"/>
2900
+ <input_model_is_text value="False"/>
2901
+ <k value="DIR\CustomLayersMapping.xml"/>
2902
+ <layout value="()"/>
2903
+ <layout_values value="{}"/>
2904
+ <legacy_mxnet_model value="False"/>
2905
+ <log_level value="ERROR"/>
2906
+ <mean_scale_values value="{}"/>
2907
+ <mean_values value="()"/>
2908
+ <model_name value="hed"/>
2909
+ <output_dir value="DIR"/>
2910
+ <placeholder_data_types value="{}"/>
2911
+ <progress value="False"/>
2912
+ <remove_memory value="False"/>
2913
+ <remove_output_softmax value="False"/>
2914
+ <reverse_input_channels value="False"/>
2915
+ <save_params_from_nd value="False"/>
2916
+ <scale_values value="()"/>
2917
+ <silent value="False"/>
2918
+ <source_layout value="()"/>
2919
+ <static_shape value="False"/>
2920
+ <stream_output value="False"/>
2921
+ <target_layout value="()"/>
2922
+ <transform value=""/>
2923
+ <use_legacy_frontend value="False"/>
2924
+ <use_new_frontend value="False"/>
2925
+ <unset unset_cli_parameters="batch, counts, disable_fusing, finegrain_fusing, input, input_checkpoint, input_meta_graph, input_proto, input_shape, input_symbol, mean_file, mean_file_offsets, nd_prefix_name, output, placeholder_shapes, pretrained_model_name, saved_model_dir, saved_model_tags, scale, tensorboard_logdir, tensorflow_custom_layer_libraries, tensorflow_custom_operations_config_update, tensorflow_object_detection_api_pipeline_config, tensorflow_use_custom_operations_config, transformations_config"/>
2926
+ </cli_parameters>
2927
+ </meta_data>
2928
+ </net>
text_encoder.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a88d312a68aebcb8d39779b0e8e7dd939556585adc5b082e6aebff27248bc478
3
+ size 246121826
text_encoder.xml ADDED
The diff for this file is too large to render. See raw diff
 
vae_decoder.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ece0c4b3fa7dd0acbd7bc586205ca7aa0b8f74b7790448466f36d892efcb79c
3
+ size 98980680
vae_decoder.xml ADDED
The diff for this file is too large to render. See raw diff