File size: 11,373 Bytes
4ac748f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
{
    "config": {
        "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
        "backend": {
            "name": "pytorch",
            "version": "2.2.2",
            "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
            "task": "text-classification",
            "model": "FacebookAI/roberta-base",
            "library": "transformers",
            "device": "cuda",
            "device_ids": "0",
            "seed": 42,
            "inter_op_num_threads": null,
            "intra_op_num_threads": null,
            "hub_kwargs": {
                "revision": "main",
                "force_download": false,
                "local_files_only": false,
                "trust_remote_code": false
            },
            "no_weights": true,
            "device_map": null,
            "torch_dtype": null,
            "eval_mode": true,
            "to_bettertransformer": false,
            "low_cpu_mem_usage": null,
            "attn_implementation": null,
            "cache_implementation": null,
            "autocast_enabled": false,
            "autocast_dtype": null,
            "torch_compile": false,
            "torch_compile_target": "forward",
            "torch_compile_config": {},
            "quantization_scheme": null,
            "quantization_config": {},
            "deepspeed_inference": false,
            "deepspeed_inference_config": {},
            "peft_type": null,
            "peft_config": {}
        },
        "scenario": {
            "name": "inference",
            "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
            "iterations": 1,
            "duration": 1,
            "warmup_runs": 1,
            "input_shapes": {
                "batch_size": 1,
                "num_choices": 2,
                "sequence_length": 2
            },
            "new_tokens": null,
            "latency": true,
            "memory": true,
            "energy": true,
            "forward_kwargs": {},
            "generate_kwargs": {
                "max_new_tokens": 2,
                "min_new_tokens": 2
            },
            "call_kwargs": {
                "num_inference_steps": 2
            }
        },
        "launcher": {
            "name": "process",
            "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
            "device_isolation": true,
            "device_isolation_action": "error",
            "start_method": "spawn"
        },
        "environment": {
            "cpu": " AMD EPYC 7R32",
            "cpu_count": 16,
            "cpu_ram_mb": 66697.29792,
            "system": "Linux",
            "machine": "x86_64",
            "platform": "Linux-5.10.214-202.855.amzn2.x86_64-x86_64-with-glibc2.35",
            "processor": "x86_64",
            "python_version": "3.10.14",
            "gpu": [
                "NVIDIA A10G"
            ],
            "gpu_count": 1,
            "gpu_vram_mb": 24146608128,
            "optimum_benchmark_version": "0.2.0",
            "optimum_benchmark_commit": null,
            "transformers_version": "4.40.2",
            "transformers_commit": null,
            "accelerate_version": "0.30.0",
            "accelerate_commit": null,
            "diffusers_version": "0.27.2",
            "diffusers_commit": null,
            "optimum_version": null,
            "optimum_commit": null,
            "timm_version": "0.9.16",
            "timm_commit": null,
            "peft_version": null,
            "peft_commit": null
        }
    },
    "report": {
        "forward": {
            "memory": {
                "unit": "MB",
                "max_ram": 890.138624,
                "max_global_vram": 1195.900928,
                "max_process_vram": 0.0,
                "max_reserved": 555.74528,
                "max_allocated": 508.993536
            },
            "latency": {
                "unit": "s",
                "count": 154,
                "total": 1.0014327363967899,
                "mean": 0.00650280997660253,
                "stdev": 0.00021388913393038193,
                "p50": 0.006459903955459595,
                "p90": 0.006831820774078369,
                "p95": 0.006857471919059753,
                "p99": 0.0069436825037002565,
                "values": [
                    0.007262207984924316,
                    0.0067983360290527345,
                    0.006879231929779053,
                    0.006853631973266602,
                    0.006860799789428711,
                    0.006831103801727295,
                    0.00682700777053833,
                    0.00685152006149292,
                    0.00694271993637085,
                    0.006814720153808594,
                    0.006944767951965332,
                    0.006825984001159668,
                    0.006788095951080322,
                    0.006792191982269287,
                    0.0067041277885437015,
                    0.006729728221893311,
                    0.006793216228485107,
                    0.006850560188293457,
                    0.006896639823913574,
                    0.006820864200592041,
                    0.006834176063537598,
                    0.006816768169403077,
                    0.006791168212890625,
                    0.0068321280479431154,
                    0.006855679988861084,
                    0.006842368125915528,
                    0.006790143966674805,
                    0.00682700777053833,
                    0.006816768169403077,
                    0.006427648067474365,
                    0.006453248023986816,
                    0.006552576065063476,
                    0.006458367824554443,
                    0.006496223926544189,
                    0.006458367824554443,
                    0.00657203197479248,
                    0.006557695865631104,
                    0.006412288188934326,
                    0.0064204797744750975,
                    0.0063907837867736815,
                    0.006481919765472412,
                    0.006502431869506836,
                    0.006575104236602783,
                    0.006882304191589355,
                    0.0068618240356445315,
                    0.006852608203887939,
                    0.006523903846740723,
                    0.0064767999649047855,
                    0.006418432235717773,
                    0.0064440321922302245,
                    0.006808576107025147,
                    0.006523903846740723,
                    0.006467584133148193,
                    0.006665184020996094,
                    0.006419456005096436,
                    0.006407167911529541,
                    0.006486015796661377,
                    0.006436863899230957,
                    0.006506559848785401,
                    0.0065781760215759275,
                    0.006404096126556396,
                    0.006455296039581298,
                    0.006433792114257812,
                    0.006498303890228272,
                    0.006457344055175781,
                    0.006512639999389648,
                    0.006477759838104248,
                    0.006490111827850342,
                    0.006460415840148926,
                    0.00658739185333252,
                    0.006543360233306885,
                    0.006508480072021485,
                    0.006599679946899414,
                    0.006478847980499268,
                    0.006461440086364746,
                    0.006533120155334473,
                    0.006404096126556396,
                    0.006509568214416504,
                    0.006450175762176514,
                    0.006375423908233643,
                    0.00636518383026123,
                    0.006194176197052002,
                    0.006404096126556396,
                    0.006467584133148193,
                    0.006201312065124511,
                    0.006201344013214111,
                    0.006171648025512695,
                    0.006261760234832763,
                    0.006228991985321045,
                    0.006301695823669433,
                    0.006254591941833496,
                    0.0063303041458129886,
                    0.006481919765472412,
                    0.006253568172454834,
                    0.00618393611907959,
                    0.006127615928649902,
                    0.006198272228240966,
                    0.006131711959838867,
                    0.006128640174865723,
                    0.006179840087890625,
                    0.006153215885162353,
                    0.00618393611907959,
                    0.006137792110443115,
                    0.006278143882751465,
                    0.00611737585067749,
                    0.006278143882751465,
                    0.006145023822784424,
                    0.00614192008972168,
                    0.0062494721412658695,
                    0.006147039890289307,
                    0.0066826238632202144,
                    0.006419456005096436,
                    0.006433792114257812,
                    0.006445055961608887,
                    0.006451200008392334,
                    0.00652288007736206,
                    0.006462463855743408,
                    0.006412288188934326,
                    0.00643071985244751,
                    0.006379519939422608,
                    0.00643174409866333,
                    0.0064174079895019534,
                    0.00638976001739502,
                    0.006446080207824707,
                    0.00637440013885498,
                    0.006553599834442139,
                    0.006446080207824707,
                    0.006403071880340576,
                    0.006481919765472412,
                    0.00643174409866333,
                    0.006437888145446777,
                    0.006496255874633789,
                    0.006459392070770263,
                    0.00648089599609375,
                    0.006452223777770996,
                    0.006419424057006836,
                    0.006425663948059082,
                    0.006435840129852295,
                    0.006437888145446777,
                    0.006432767868041992,
                    0.006435840129852295,
                    0.006478847980499268,
                    0.0064287037849426265,
                    0.0064430079460144046,
                    0.0064778242111206055,
                    0.006423552036285401,
                    0.006436863899230957,
                    0.006426623821258545,
                    0.0064471039772033695,
                    0.006494175910949707,
                    0.00648089599609375,
                    0.006475776195526123,
                    0.006486015796661377,
                    0.006469632148742676
                ]
            },
            "throughput": {
                "unit": "samples/s",
                "value": 153.7796742635961
            },
            "energy": {
                "unit": "kWh",
                "cpu": 7.514877954783488e-08,
                "ram": 4.1082792785910214e-08,
                "gpu": 1.3599166997468262e-07,
                "total": 2.5222324230842775e-07
            },
            "efficiency": {
                "unit": "samples/kWh",
                "value": 3964741.674271095
            }
        }
    }
}