IlyasMoutawwakil HF staff commited on
Commit
5b72015
·
verified ·
1 Parent(s): 9917b32

Upload cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.1+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "fill-mask",
9
  "library": "transformers",
@@ -44,9 +44,8 @@
44
  "duration": 1,
45
  "warmup_runs": 1,
46
  "input_shapes": {
47
- "batch_size": 1,
48
- "num_choices": 2,
49
- "sequence_length": 2
50
  },
51
  "new_tokens": null,
52
  "memory": true,
@@ -73,10 +72,10 @@
73
  "environment": {
74
  "cpu": " AMD EPYC 7R32",
75
  "cpu_count": 16,
76
- "cpu_ram_mb": 66697.261056,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
- "platform": "Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.12",
82
  "gpu": [
@@ -86,15 +85,15 @@
86
  "gpu_vram_mb": 24146608128,
87
  "optimum_benchmark_version": "0.5.0.dev0",
88
  "optimum_benchmark_commit": null,
89
- "transformers_version": "4.45.2",
90
  "transformers_commit": null,
91
- "accelerate_version": "1.0.1",
92
  "accelerate_commit": null,
93
- "diffusers_version": "0.30.3",
94
  "diffusers_commit": null,
95
  "optimum_version": null,
96
  "optimum_commit": null,
97
- "timm_version": "1.0.9",
98
  "timm_commit": null,
99
  "peft_version": "0.13.2",
100
  "peft_commit": null
@@ -106,7 +105,7 @@
106
  "load": {
107
  "memory": {
108
  "unit": "MB",
109
- "max_ram": 798.101504,
110
  "max_global_vram": 1226.309632,
111
  "max_process_vram": 0.0,
112
  "max_reserved": 589.299712,
@@ -115,15 +114,15 @@
115
  "latency": {
116
  "unit": "s",
117
  "values": [
118
- 0.07608115386962891
119
  ],
120
  "count": 1,
121
- "total": 0.07608115386962891,
122
- "mean": 0.07608115386962891,
123
- "p50": 0.07608115386962891,
124
- "p90": 0.07608115386962891,
125
- "p95": 0.07608115386962891,
126
- "p99": 0.07608115386962891,
127
  "stdev": 0,
128
  "stdev_": 0
129
  },
@@ -134,224 +133,203 @@
134
  "forward": {
135
  "memory": {
136
  "unit": "MB",
137
- "max_ram": 1053.732864,
138
  "max_global_vram": 1238.892544,
139
  "max_process_vram": 0.0,
140
  "max_reserved": 591.396864,
141
- "max_allocated": 449.025024
142
  },
143
  "latency": {
144
  "unit": "s",
145
  "values": [
146
- 0.005637119770050049,
147
- 0.006034431934356689,
148
- 0.005616640090942383,
149
- 0.005601280212402344,
150
- 0.005536767959594726,
151
- 0.005569536209106446,
152
- 0.005519360065460205,
153
- 0.005493760108947754,
154
- 0.005587967872619629,
155
- 0.00547430419921875,
156
- 0.005815296173095703,
157
- 0.0055101442337036136,
158
- 0.005525504112243652,
159
- 0.005571584224700928,
160
- 0.0055214080810546875,
161
- 0.00552243185043335,
162
- 0.005527552127838135,
163
- 0.005486591815948487,
164
- 0.005512191772460938,
165
- 0.005468160152435303,
166
- 0.005483520030975342,
167
- 0.005526527881622314,
168
- 0.0054876160621643065,
169
- 0.005482495784759522,
170
- 0.005484543800354004,
171
- 0.005469183921813964,
172
- 0.005527552127838135,
173
- 0.005500927925109863,
174
- 0.005493824005126953,
175
- 0.005497856140136719,
176
- 0.005469183921813964,
177
- 0.005458943843841553,
178
- 0.005480447769165039,
179
- 0.005463039875030518,
180
- 0.005500927925109863,
181
- 0.0054579200744628905,
182
- 0.005486591815948487,
183
- 0.0056217598915100095,
184
- 0.005509119987487793,
185
- 0.005508096218109131,
186
- 0.005501952171325684,
187
- 0.005291007995605469,
188
- 0.005198847770690918,
189
- 0.005182464122772217,
190
- 0.005241856098175048,
191
- 0.005370880126953125,
192
- 0.005315584182739258,
193
- 0.005248000144958496,
194
- 0.005259263992309571,
195
- 0.0052930560111999515,
196
- 0.005312511920928955,
197
- 0.00525929594039917,
198
- 0.005336063861846924,
199
- 0.005275648117065429,
200
- 0.005258240222930908,
201
- 0.0052899842262268066,
202
- 0.0052899842262268066,
203
- 0.005291007995605469,
204
- 0.0053043198585510255,
205
- 0.005253119945526123,
206
- 0.005249023914337158,
207
- 0.005281792163848877,
208
- 0.005274623870849609,
209
- 0.005261312007904053,
210
- 0.005282815933227539,
211
- 0.0052633600234985355,
212
- 0.0052633600234985355,
213
- 0.0052705278396606445,
214
- 0.005265408039093018,
215
- 0.0052408318519592285,
216
- 0.005275648117065429,
217
- 0.005282815933227539,
218
- 0.005259263992309571,
219
- 0.0052408318519592285,
220
- 0.0052899842262268066,
221
- 0.005237760066986084,
222
- 0.005284863948822022,
223
- 0.0052674560546875,
224
- 0.005281792163848877,
225
- 0.005248000144958496,
226
- 0.0053012480735778805,
227
- 0.005268479824066162,
228
- 0.005262335777282715,
229
- 0.005269504070281982,
230
- 0.005276671886444092,
231
- 0.005246975898742676,
232
- 0.005288959980010987,
233
- 0.005251071929931641,
234
- 0.005255167961120606,
235
- 0.005252096176147461,
236
- 0.005294079780578613,
237
- 0.005252096176147461,
238
- 0.005253151893615722,
239
- 0.005269504070281982,
240
- 0.005250048160552978,
241
- 0.005239808082580567,
242
- 0.005291007995605469,
243
- 0.005243904113769531,
244
- 0.005254144191741943,
245
- 0.005328896045684814,
246
- 0.005305344104766845,
247
- 0.005311488151550293,
248
- 0.005336063861846924,
249
- 0.005264383792877197,
250
- 0.005256192207336426,
251
- 0.005258240222930908,
252
- 0.005295104026794434,
253
- 0.005249023914337158,
254
- 0.00531763219833374,
255
- 0.005252096176147461,
256
- 0.006014976024627685,
257
- 0.006589439868927002,
258
- 0.006595583915710449,
259
- 0.006597631931304931,
260
- 0.006660096168518067,
261
  0.0061931519508361815,
262
- 0.005755904197692871,
263
- 0.005661695957183838,
264
- 0.005728256225585937,
265
- 0.005796864032745362,
266
- 0.005591040134429932,
267
- 0.0055214080810546875,
268
- 0.005548031806945801,
269
- 0.005583871841430664,
270
- 0.005526527881622314,
271
- 0.005587967872619629,
272
- 0.0055101442337036136,
273
- 0.005554175853729248,
274
- 0.005570559978485107,
275
- 0.005549056053161621,
276
- 0.005550079822540284,
277
- 0.005501952171325684,
278
- 0.005499904155731201,
279
- 0.005485568046569824,
280
- 0.005489664077758789,
281
- 0.005477375984191894,
282
- 0.005533696174621582,
283
- 0.005473279953002929,
284
- 0.00552243185043335,
285
- 0.005518335819244385,
286
- 0.0054988799095153805,
287
- 0.00552243185043335,
288
- 0.005517312049865723,
289
- 0.0054568958282470706,
290
- 0.0055316481590271,
291
- 0.005481472015380859,
292
- 0.005516287803649903,
293
- 0.0054988799095153805,
294
- 0.005492735862731934,
295
- 0.005478400230407715,
296
- 0.005497856140136719,
297
- 0.005488639831542969,
298
- 0.005561344146728516,
299
- 0.005475327968597412,
300
- 0.005516287803649903,
301
- 0.005470208168029785,
302
- 0.005489664077758789,
303
- 0.005516287803649903,
304
- 0.005511168003082275,
305
- 0.0054609918594360355,
306
- 0.00555622386932373,
307
- 0.005512191772460938,
308
- 0.00550707197189331,
309
- 0.0054876160621643065,
310
- 0.005472256183624268,
311
- 0.005505023956298828,
312
- 0.005486591815948487,
313
- 0.005495808124542236,
314
- 0.005541888236999512,
315
- 0.005499904155731201,
316
- 0.005500927925109863,
317
- 0.005469183921813964,
318
- 0.005489664077758789,
319
- 0.005499904155731201,
320
- 0.005492735862731934,
321
- 0.005472256183624268,
322
- 0.005526527881622314,
323
- 0.005459968090057373,
324
- 0.005519360065460205,
325
- 0.00552243185043335,
326
- 0.0054906878471374515,
327
- 0.0054988799095153805,
328
- 0.005488639831542969,
329
- 0.005464064121246338
330
  ],
331
- "count": 184,
332
- "total": 1.0048625931739799,
333
- "mean": 0.005461209745510765,
334
- "p50": 0.005484031915664673,
335
- "p90": 0.0055867390632629394,
336
- "p95": 0.005751757001876831,
337
- "p99": 0.0065959320783615105,
338
- "stdev": 0.00023106751478452694,
339
- "stdev_": 4.231068308161383
340
  },
341
  "throughput": {
342
  "unit": "samples/s",
343
- "value": 183.10961244841806
344
  },
345
  "energy": {
346
  "unit": "kWh",
347
- "cpu": 6.546062232581018e-08,
348
- "ram": 3.5767918808479316e-08,
349
- "gpu": 1.275967871602217e-07,
350
- "total": 2.288253282945112e-07
351
  },
352
  "efficiency": {
353
  "unit": "samples/kWh",
354
- "value": 4370145.5929430295
355
  }
356
  }
357
  }
 
3
  "name": "cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.5.1+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "fill-mask",
9
  "library": "transformers",
 
44
  "duration": 1,
45
  "warmup_runs": 1,
46
  "input_shapes": {
47
+ "batch_size": 2,
48
+ "sequence_length": 16
 
49
  },
50
  "new_tokens": null,
51
  "memory": true,
 
72
  "environment": {
73
  "cpu": " AMD EPYC 7R32",
74
  "cpu_count": 16,
75
+ "cpu_ram_mb": 66697.248768,
76
  "system": "Linux",
77
  "machine": "x86_64",
78
+ "platform": "Linux-5.10.227-219.884.amzn2.x86_64-x86_64-with-glibc2.35",
79
  "processor": "x86_64",
80
  "python_version": "3.10.12",
81
  "gpu": [
 
85
  "gpu_vram_mb": 24146608128,
86
  "optimum_benchmark_version": "0.5.0.dev0",
87
  "optimum_benchmark_commit": null,
88
+ "transformers_version": "4.46.3",
89
  "transformers_commit": null,
90
+ "accelerate_version": "1.1.1",
91
  "accelerate_commit": null,
92
+ "diffusers_version": "0.31.0",
93
  "diffusers_commit": null,
94
  "optimum_version": null,
95
  "optimum_commit": null,
96
+ "timm_version": "1.0.11",
97
  "timm_commit": null,
98
  "peft_version": "0.13.2",
99
  "peft_commit": null
 
105
  "load": {
106
  "memory": {
107
  "unit": "MB",
108
+ "max_ram": 812.716032,
109
  "max_global_vram": 1226.309632,
110
  "max_process_vram": 0.0,
111
  "max_reserved": 589.299712,
 
114
  "latency": {
115
  "unit": "s",
116
  "values": [
117
+ 0.07904969787597656
118
  ],
119
  "count": 1,
120
+ "total": 0.07904969787597656,
121
+ "mean": 0.07904969787597656,
122
+ "p50": 0.07904969787597656,
123
+ "p90": 0.07904969787597656,
124
+ "p95": 0.07904969787597656,
125
+ "p99": 0.07904969787597656,
126
  "stdev": 0,
127
  "stdev_": 0
128
  },
 
133
  "forward": {
134
  "memory": {
135
  "unit": "MB",
136
+ "max_ram": 1058.435072,
137
  "max_global_vram": 1238.892544,
138
  "max_process_vram": 0.0,
139
  "max_reserved": 591.396864,
140
+ "max_allocated": 452.872192
141
  },
142
  "latency": {
143
  "unit": "s",
144
  "values": [
145
+ 0.006323200225830078,
146
+ 0.006179840087890625,
147
+ 0.006223872184753418,
148
+ 0.006214655876159668,
149
+ 0.006227968215942382,
150
+ 0.006159359931945801,
151
+ 0.006173696041107178,
152
+ 0.00636518383026123,
153
+ 0.006120448112487793,
154
+ 0.006044672012329101,
155
+ 0.0061562881469726565,
156
+ 0.006106112003326416,
157
+ 0.0062679038047790524,
158
+ 0.0061521921157836916,
159
+ 0.0061562881469726565,
160
+ 0.006136832237243653,
161
+ 0.006155263900756836,
162
+ 0.006106112003326416,
163
+ 0.006180863857269287,
164
+ 0.006211584091186524,
165
+ 0.006049791812896729,
166
+ 0.006089727878570556,
167
+ 0.006209536075592041,
168
+ 0.0061224961280822755,
169
+ 0.00597811222076416,
170
+ 0.006067200183868408,
171
+ 0.006110208034515381,
172
+ 0.006118400096893311,
173
+ 0.006187071800231933,
174
+ 0.005911551952362061,
175
+ 0.005984255790710449,
176
+ 0.006168575763702393,
177
+ 0.006086656093597412,
178
+ 0.006098944187164307,
179
+ 0.00602623987197876,
180
+ 0.006065152168273926,
181
+ 0.006096896171569824,
182
+ 0.006142975807189942,
183
+ 0.006102015972137451,
184
+ 0.006140927791595459,
185
+ 0.006209536075592041,
186
+ 0.006137856006622314,
187
+ 0.006190080165863037,
188
+ 0.006217728137969971,
189
+ 0.006201344013214111,
190
+ 0.006115327835083008,
191
+ 0.006136832237243653,
192
+ 0.006339583873748779,
193
+ 0.00653926420211792,
194
+ 0.0063272957801818845,
195
+ 0.006456319808959961,
196
+ 0.0060702719688415525,
197
+ 0.006123519897460937,
198
+ 0.006171648025512695,
199
+ 0.006180863857269287,
200
+ 0.006137856006622314,
201
+ 0.006118400096893311,
202
+ 0.006217823982238769,
203
+ 0.0061634559631347655,
204
+ 0.006004735946655273,
205
+ 0.005992447853088379,
206
+ 0.005942272186279297,
207
+ 0.006048768043518066,
208
+ 0.006053887844085694,
209
+ 0.006042623996734619,
210
+ 0.006037504196166992,
211
+ 0.006271135807037353,
212
+ 0.00613478422164917,
213
+ 0.006141952037811279,
214
+ 0.0062362561225891115,
215
+ 0.0062044157981872555,
216
+ 0.005963776111602783,
217
+ 0.006097919940948486,
218
+ 0.006049791812896729,
219
+ 0.0064737281799316405,
220
+ 0.00632422399520874,
221
+ 0.006294528007507324,
222
+ 0.007713791847229004,
223
+ 0.006845439910888672,
224
+ 0.00742195177078247,
225
+ 0.006456319808959961,
226
+ 0.007463935852050781,
227
+ 0.006456319808959961,
228
+ 0.006409215927124024,
229
+ 0.006415359973907471,
230
+ 0.0063508481979370115,
231
+ 0.00760319995880127,
232
+ 0.006593535900115967,
233
+ 0.0063508481979370115,
234
+ 0.006230016231536865,
235
+ 0.006342656135559082,
236
+ 0.006331520080566406,
237
+ 0.006346752166748047,
238
+ 0.006291456222534179,
239
+ 0.006770688056945801,
240
+ 0.00637440013885498,
241
+ 0.006306816101074219,
242
+ 0.006243328094482422,
243
+ 0.006231040000915527,
244
+ 0.006262784004211426,
245
+ 0.006166528224945069,
246
+ 0.006078464031219482,
247
+ 0.00628326416015625,
248
+ 0.005925888061523437,
249
+ 0.006047743797302246,
250
+ 0.006113279819488526,
251
+ 0.00622489595413208,
252
+ 0.006270976066589356,
 
 
 
 
 
 
 
253
  0.0061931519508361815,
254
+ 0.0061521921157836916,
255
+ 0.006329343795776367,
256
+ 0.006281216144561768,
257
+ 0.006367231845855713,
258
+ 0.00679423999786377,
259
+ 0.006273024082183838,
260
+ 0.006048768043518066,
261
+ 0.005971968173980713,
262
+ 0.005984320163726807,
263
+ 0.006171648025512695,
264
+ 0.006013951778411865,
265
+ 0.006000639915466309,
266
+ 0.0062228479385375976,
267
+ 0.00602726411819458,
268
+ 0.005959680080413818,
269
+ 0.005979135990142822,
270
+ 0.0059699201583862304,
271
+ 0.005944320201873779,
272
+ 0.005822463989257813,
273
+ 0.005908480167388916,
274
+ 0.006004735946655273,
275
+ 0.005981184005737304,
276
+ 0.006120448112487793,
277
+ 0.006009856224060059,
278
+ 0.005953536033630371,
279
+ 0.005952511787414551,
280
+ 0.005946368217468262,
281
+ 0.0059955201148986816,
282
+ 0.005949440002441406,
283
+ 0.006097919940948486,
284
+ 0.005941247940063477,
285
+ 0.005856256008148194,
286
+ 0.005893119812011719,
287
+ 0.005864448070526123,
288
+ 0.0058429441452026365,
289
+ 0.005851136207580566,
290
+ 0.005844992160797119,
291
+ 0.00586240005493164,
292
+ 0.0059023361206054685,
293
+ 0.005816319942474365,
294
+ 0.005860352039337159,
295
+ 0.006302720069885254,
296
+ 0.006090752124786377,
297
+ 0.005993472099304199,
298
+ 0.00592793607711792,
299
+ 0.005909503936767578,
300
+ 0.006037504196166992,
301
+ 0.005946368217468262,
302
+ 0.00573747205734253,
303
+ 0.005646336078643799,
304
+ 0.0056442880630493165,
305
+ 0.005658624172210694,
306
+ 0.005645311832427978,
307
+ 0.005700607776641845
 
 
 
 
 
 
 
 
 
 
 
 
 
 
308
  ],
309
+ "count": 163,
310
+ "total": 1.0039158754348754,
311
+ "mean": 0.006158993100827456,
312
+ "p50": 0.006123519897460937,
313
+ "p90": 0.006366822242736816,
314
+ "p95": 0.006532710599899292,
315
+ "p99": 0.007516856212615966,
316
+ "stdev": 0.00029740025778856146,
317
+ "stdev_": 4.828715553336242
318
  },
319
  "throughput": {
320
  "unit": "samples/s",
321
+ "value": 324.728404019693
322
  },
323
  "energy": {
324
  "unit": "kWh",
325
+ "cpu": 7.075395165344066e-08,
326
+ "ram": 3.855912672807783e-08,
327
+ "gpu": 1.641651445595253e-07,
328
+ "total": 2.734782229410438e-07
329
  },
330
  "efficiency": {
331
  "unit": "samples/kWh",
332
+ "value": 7313196.562752122
333
  }
334
  }
335
  }