IlyasMoutawwakil HF staff commited on
Commit
b8b91da
·
verified ·
1 Parent(s): b3aa7fd

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -6,19 +6,17 @@
6
  "version": "2.3.0+cu121",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
- "model": "FacebookAI/roberta-base",
10
  "library": "transformers",
 
 
11
  "device": "cuda",
12
  "device_ids": "0",
13
  "seed": 42,
14
  "inter_op_num_threads": null,
15
  "intra_op_num_threads": null,
16
- "hub_kwargs": {
17
- "revision": "main",
18
- "force_download": false,
19
- "local_files_only": false,
20
- "trust_remote_code": false
21
- },
22
  "no_weights": true,
23
  "device_map": null,
24
  "torch_dtype": null,
@@ -104,7 +102,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 904.118272,
108
  "max_global_vram": 1195.900928,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
@@ -112,180 +110,179 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 149,
116
- "total": 0.9998194217681889,
117
- "mean": 0.006710197461531467,
118
- "stdev": 0.00015346916109906233,
119
- "p50": 0.006713344097137451,
120
- "p90": 0.006843392086029053,
121
- "p95": 0.006929651069641113,
122
- "p99": 0.0070813696289062505,
123
  "values": [
124
- 0.007044095993041993,
125
- 0.006750175952911377,
126
- 0.006914048194885254,
127
- 0.006882304191589355,
128
- 0.006825984001159668,
129
- 0.006831103801727295,
130
- 0.006782976150512696,
 
 
 
 
 
131
  0.006821887969970703,
132
- 0.006767615795135498,
133
- 0.0067420158386230465,
134
- 0.006672383785247803,
135
- 0.006617087841033936,
136
- 0.006654975891113281,
137
- 0.006648831844329834,
138
- 0.00657203197479248,
139
- 0.006540287971496582,
140
- 0.006632448196411133,
141
- 0.006603775978088379,
142
- 0.0066406397819519045,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
143
  0.00672051191329956,
144
- 0.0066744318008422855,
145
- 0.0067010560035705566,
146
- 0.00669593620300293,
147
- 0.006625279903411865,
148
- 0.006594560146331787,
149
- 0.0067051520347595215,
150
- 0.0067573761940002445,
151
  0.006713344097137451,
152
- 0.006699007987976074,
153
- 0.0066744318008422855,
154
- 0.006662144184112549,
155
- 0.006643712043762207,
156
- 0.0066447358131408694,
157
- 0.006665215969085693,
158
- 0.006672383785247803,
159
- 0.006615039825439453,
160
- 0.0065689277648925785,
161
- 0.0065710082054138185,
162
- 0.006589439868927002,
163
- 0.0066304001808166506,
164
- 0.006586368083953857,
165
- 0.006606912136077881,
166
- 0.006685696125030518,
167
- 0.006676544189453125,
 
 
 
 
 
 
 
 
 
 
 
168
  0.006643712043762207,
169
- 0.00673689603805542,
170
- 0.006707200050354004,
 
 
 
 
 
 
 
 
 
 
 
 
 
171
  0.0067348480224609375,
172
- 0.006665215969085693,
173
- 0.006648831844329834,
174
- 0.006618112087249756,
175
- 0.006618112087249756,
176
- 0.0066754879951477055,
177
- 0.0066468482017517086,
178
- 0.0066119680404663084,
179
- 0.006657023906707763,
180
- 0.006590464115142822,
181
- 0.006568960189819336,
182
- 0.006625279903411865,
183
- 0.006615039825439453,
184
- 0.006619135856628418,
185
- 0.006757343769073486,
186
- 0.006660096168518067,
187
- 0.006551551818847656,
188
- 0.006602752208709717,
189
- 0.006633471965789795,
190
- 0.006677504062652588,
191
- 0.006617055892944336,
192
- 0.00662937593460083,
193
- 0.006621183872222901,
194
  0.006614016056060791,
195
- 0.0066826238632202144,
 
 
 
 
 
 
 
 
 
 
 
 
196
  0.006708223819732666,
197
- 0.006724607944488525,
198
- 0.0067041277885437015,
199
- 0.006668288230895996,
200
- 0.006635519981384277,
201
- 0.006662144184112549,
 
202
  0.006605823993682861,
203
- 0.006583295822143555,
204
- 0.006681600093841553,
205
- 0.006493184089660644,
206
- 0.006428671836853027,
207
- 0.00637440013885498,
208
- 0.00638156795501709,
209
- 0.006459392070770263,
210
- 0.0064245758056640625,
211
- 0.006354944229125976,
212
- 0.006260735988616943,
213
- 0.006281216144561768,
214
- 0.006346752166748047,
215
- 0.007601151943206787,
216
- 0.006936639785766602,
217
- 0.006966271877288818,
218
- 0.006987775802612305,
219
- 0.007005184173583984,
220
- 0.0071157760620117185,
221
- 0.006987775802612305,
222
- 0.006860799789428711,
223
- 0.0068853759765625,
224
- 0.006847487926483154,
225
- 0.006890495777130127,
226
- 0.006919167995452881,
227
- 0.006833151817321777,
228
- 0.006830080032348633,
229
- 0.006811647891998291,
230
- 0.006821887969970703,
231
- 0.006820864200592041,
232
- 0.006779903888702392,
233
- 0.006780928134918213,
234
- 0.006766592025756836,
235
- 0.006842368125915528,
236
- 0.0067717118263244626,
237
- 0.006809599876403808,
238
- 0.0067276802062988285,
239
- 0.006751232147216797,
240
- 0.0067573761940002445,
241
- 0.00673689603805542,
242
- 0.006767615795135498,
243
- 0.006773759841918945,
244
- 0.00679423999786377,
245
- 0.006780928134918213,
246
- 0.006760447978973389,
247
- 0.006788095951080322,
248
- 0.006737919807434082,
249
- 0.006776832103729248,
250
- 0.006754303932189941,
251
- 0.006756351947784424,
252
- 0.006761472225189209,
253
- 0.006765567779541016,
254
- 0.006759424209594727,
255
- 0.006773759841918945,
256
- 0.0067645440101623535,
257
- 0.006765567779541016,
258
- 0.006758399963378906,
259
- 0.006768608093261718,
260
- 0.006780928134918213,
261
- 0.006776832103729248,
262
- 0.0067573761940002445,
263
- 0.0067758078575134275,
264
- 0.0067645440101623535,
265
- 0.00677785587310791,
266
- 0.006763519763946534,
267
- 0.006781951904296875,
268
- 0.006760479927062989,
269
- 0.006781951904296875,
270
- 0.006790143966674805,
271
- 0.006751232147216797,
272
- 0.00676966381072998
273
  ]
274
  },
275
  "throughput": {
276
  "unit": "samples/s",
277
- "value": 149.0269110160836
278
  },
279
  "energy": {
280
  "unit": "kWh",
281
- "cpu": 7.823070957821016e-08,
282
- "ram": 4.2766240117567866e-08,
283
- "gpu": 1.390351989473783e-07,
284
- "total": 2.6003214864315634e-07
285
  },
286
  "efficiency": {
287
  "unit": "samples/kWh",
288
- "value": 3845678.3333060327
289
  }
290
  }
291
  }
 
6
  "version": "2.3.0+cu121",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
 
9
  "library": "transformers",
10
+ "model": "FacebookAI/roberta-base",
11
+ "processor": "FacebookAI/roberta-base",
12
  "device": "cuda",
13
  "device_ids": "0",
14
  "seed": 42,
15
  "inter_op_num_threads": null,
16
  "intra_op_num_threads": null,
17
+ "model_kwargs": {},
18
+ "processor_kwargs": {},
19
+ "hub_kwargs": {},
 
 
 
20
  "no_weights": true,
21
  "device_map": null,
22
  "torch_dtype": null,
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 903.745536,
106
  "max_global_vram": 1195.900928,
107
  "max_process_vram": 0.0,
108
  "max_reserved": 555.74528,
 
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 148,
114
+ "total": 1.0018918728828432,
115
+ "mean": 0.006769539681640831,
116
+ "stdev": 0.0001744093525350522,
117
+ "p50": 0.006763520002365112,
118
+ "p90": 0.006992588710784912,
119
+ "p95": 0.007051212763786316,
120
+ "p99": 0.007346770057678222,
121
  "values": [
122
+ 0.007473152160644531,
123
+ 0.006854656219482422,
124
+ 0.006985727787017822,
125
+ 0.006949888229370117,
126
+ 0.006946815967559815,
127
+ 0.006904831886291504,
128
+ 0.006929408073425293,
129
+ 0.006896607875823975,
130
+ 0.0068351998329162595,
131
+ 0.006811647891998291,
132
+ 0.006848512172698974,
133
+ 0.006781951904296875,
134
  0.006821887969970703,
135
+ 0.006792191982269287,
136
+ 0.006700032234191895,
137
+ 0.006665215969085693,
138
+ 0.006723584175109864,
139
+ 0.006773759841918945,
140
+ 0.0067738242149353025,
141
+ 0.00684441614151001,
142
+ 0.006814720153808594,
143
+ 0.006803455829620361,
144
+ 0.00675328016281128,
145
+ 0.006762495994567871,
146
+ 0.006778880119323731,
147
+ 0.006811647891998291,
148
+ 0.006820864200592041,
149
+ 0.006819839954376221,
150
+ 0.0068055038452148435,
151
+ 0.00676966381072998,
152
+ 0.0067717118263244626,
153
+ 0.006782976150512696,
154
+ 0.00690176010131836,
155
+ 0.006836160182952881,
156
+ 0.006788095951080322,
157
+ 0.006693920135498047,
158
+ 0.006657023906707763,
159
+ 0.006722527980804444,
160
+ 0.006747136116027832,
161
+ 0.006719488143920899,
162
+ 0.006806528091430664,
163
+ 0.006781951904296875,
164
+ 0.0067645440101623535,
165
+ 0.007300096035003662,
166
+ 0.0071198720932006835,
167
+ 0.0070522880554199216,
168
+ 0.007134208202362061,
169
+ 0.007017471790313721,
170
+ 0.006989823818206787,
171
+ 0.006961152076721191,
172
+ 0.0068249602317810056,
173
+ 0.006804480075836182,
174
+ 0.006816768169403077,
175
+ 0.006810624122619629,
176
+ 0.006831103801727295,
177
  0.00672051191329956,
 
 
 
 
 
 
 
178
  0.006713344097137451,
179
+ 0.006715392112731934,
180
+ 0.006732800006866455,
181
+ 0.006807551860809326,
182
+ 0.006795263767242431,
183
+ 0.006711296081542969,
184
+ 0.0071157760620117185,
185
+ 0.0070266880989074704,
186
+ 0.007010303974151612,
187
+ 0.007039999961853028,
188
+ 0.007013376235961914,
189
+ 0.006999040126800537,
190
+ 0.007049215793609619,
191
+ 0.006831103801727295,
192
+ 0.006948863983154297,
193
+ 0.006884352207183838,
194
+ 0.006862847805023193,
195
+ 0.006821887969970703,
196
+ 0.006837247848510742,
197
+ 0.006814720153808594,
198
+ 0.0067686400413513184,
199
+ 0.006760447978973389,
200
+ 0.006825984001159668,
201
+ 0.006660096168518067,
202
+ 0.006620160102844238,
203
+ 0.006519807815551758,
204
+ 0.006575104236602783,
205
+ 0.006752255916595459,
206
  0.006643712043762207,
207
+ 0.006392799854278565,
208
+ 0.0064102401733398436,
209
+ 0.00642464017868042,
210
+ 0.006437888145446777,
211
+ 0.006487040042877197,
212
+ 0.006688767910003662,
213
+ 0.006830080032348633,
214
+ 0.006897664070129395,
215
+ 0.006937600135803222,
216
+ 0.007103487968444824,
217
+ 0.006973440170288086,
218
+ 0.006881279945373535,
219
+ 0.006785024166107178,
220
+ 0.006767615795135498,
221
+ 0.006731776237487793,
222
  0.0067348480224609375,
223
+ 0.006595583915710449,
224
+ 0.006816768169403077,
225
+ 0.006680575847625733,
226
+ 0.006692863941192627,
227
+ 0.006694911956787109,
228
+ 0.0066375679969787596,
229
+ 0.00657919979095459,
230
+ 0.007388160228729248,
231
+ 0.006978559970855713,
232
+ 0.006722559928894043,
 
 
 
 
 
 
 
 
 
 
 
 
233
  0.006614016056060791,
234
+ 0.006685696125030518,
235
+ 0.006717440128326416,
236
+ 0.006617087841033936,
237
+ 0.006631423950195312,
238
+ 0.006749184131622315,
239
+ 0.006625279903411865,
240
+ 0.006597631931304931,
241
+ 0.0066119680404663084,
242
+ 0.006583295822143555,
243
+ 0.006612991809844971,
244
+ 0.006617087841033936,
245
+ 0.0065771517753601075,
246
+ 0.0066416640281677245,
247
  0.006708223819732666,
248
+ 0.006398975849151611,
249
+ 0.006576128005981445,
250
+ 0.006651904106140137,
251
+ 0.006591487884521485,
252
+ 0.006654975891113281,
253
+ 0.006614016056060791,
254
  0.006605823993682861,
255
+ 0.006617087841033936,
256
+ 0.006625311851501465,
257
+ 0.0066078720092773435,
258
+ 0.0066447358131408694,
259
+ 0.006616064071655273,
260
+ 0.0066078720092773435,
261
+ 0.006625279903411865,
262
+ 0.006643712043762207,
263
+ 0.006602752208709717,
264
+ 0.006649856090545654,
265
+ 0.006658048152923584,
266
+ 0.006612991809844971,
267
+ 0.0067010560035705566,
268
+ 0.006659071922302246,
269
+ 0.006597631931304931
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
270
  ]
271
  },
272
  "throughput": {
273
  "unit": "samples/s",
274
+ "value": 147.72053153215515
275
  },
276
  "energy": {
277
  "unit": "kWh",
278
+ "cpu": 7.713026814646536e-08,
279
+ "ram": 4.215088005626707e-08,
280
+ "gpu": 1.5341823240259891e-07,
281
+ "total": 2.7269938060533133e-07
282
  },
283
  "efficiency": {
284
  "unit": "samples/kWh",
285
+ "value": 3667041.6991055305
286
  }
287
  }
288
  }