IlyasMoutawwakil HF staff commited on
Commit
c8a1773
·
verified ·
1 Parent(s): 6a31fb9

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 910.036992,
108
  "max_global_vram": 1195.900928,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
@@ -112,169 +112,165 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 138,
116
- "total": 0.9998775987625124,
117
- "mean": 0.0072454898461051616,
118
- "stdev": 0.00023112556359064607,
119
- "p50": 0.0072386240959167476,
120
- "p90": 0.007369216012954712,
121
- "p95": 0.007695462274551392,
122
- "p99": 0.008186562156677245,
123
  "values": [
124
- 0.00808140754699707,
125
- 0.007606272220611572,
126
- 0.007759871959686279,
127
- 0.007662591934204102,
128
- 0.007656447887420655,
129
- 0.007911424160003662,
130
- 0.007995391845703125,
131
- 0.0076267518997192385,
132
- 0.007865344047546387,
133
- 0.00830668830871582,
134
- 0.008248319625854492,
135
- 0.007328767776489258,
136
- 0.0071198720932006835,
137
- 0.007127039909362793,
138
- 0.007132160186767578,
139
- 0.007088128089904785,
140
- 0.0070594558715820314,
141
- 0.007116799831390381,
142
- 0.007094272136688232,
 
 
 
 
 
143
  0.007156735897064209,
144
- 0.007090176105499267,
145
- 0.007080959796905518,
146
- 0.0072724480628967286,
147
  0.007104512214660645,
148
- 0.007111680030822754,
149
- 0.007051263809204102,
150
- 0.007073791980743408,
151
- 0.007118783950805664,
152
- 0.007095295906066895,
153
- 0.0070594558715820314,
154
- 0.007072768211364746,
155
- 0.0070830078125,
156
- 0.007096320152282715,
157
- 0.007079936027526855,
158
- 0.0070553598403930665,
159
- 0.007071743965148926,
160
- 0.00707583999633789,
161
- 0.0070891518592834475,
 
 
 
 
 
 
 
162
  0.007074816226959229,
163
- 0.007050240039825439,
164
- 0.007070720195770264,
165
- 0.0070860800743103025,
166
- 0.007071743965148926,
167
- 0.007051263809204102,
168
- 0.007073791980743408,
169
- 0.007096320152282715,
170
- 0.007073791980743408,
171
- 0.007039040088653564,
172
- 0.00708403205871582,
173
- 0.007080959796905518,
174
- 0.0070830078125,
175
- 0.007067647933959961,
176
- 0.007019519805908203,
 
 
 
177
  0.007070720195770264,
178
- 0.007088128089904785,
179
- 0.00707583999633789,
180
- 0.007041024208068848,
181
- 0.007051263809204102,
182
- 0.007065599918365479,
183
- 0.0070829758644104,
184
- 0.007071712017059326,
185
- 0.007048192024230957,
186
- 0.007076863765716553,
187
- 0.007069695949554444,
188
- 0.007060480117797851,
189
- 0.0070594558715820314,
190
- 0.007684095859527588,
191
- 0.007074816226959229,
192
  0.00708403205871582,
193
- 0.007094272136688232,
194
- 0.007054336071014404,
195
- 0.0070891518592834475,
196
- 0.00710041618347168,
197
- 0.007101439952850342,
198
- 0.007066624164581299,
199
- 0.007307263851165771,
200
- 0.007304192066192627,
201
- 0.007304192066192627,
202
- 0.0072979841232299805,
203
- 0.007301119804382325,
204
- 0.007251967906951904,
205
- 0.0072837119102478025,
206
- 0.007274496078491211,
207
- 0.0073359360694885255,
208
- 0.007282688140869141,
209
- 0.007277567863464355,
210
- 0.007270400047302246,
211
- 0.007263232231140137,
212
- 0.007277632236480713,
213
- 0.007238592147827148,
214
- 0.00722431993484497,
215
- 0.0071823358535766605,
216
  0.007142399787902832,
217
- 0.007177216053009033,
218
- 0.007473184108734131,
219
- 0.007361536026000977,
220
- 0.0073359360694885255,
221
- 0.0073359360694885255,
222
- 0.007327744007110596,
223
- 0.007387135982513428,
224
- 0.007361536026000977,
225
- 0.007345151901245117,
226
- 0.00733081579208374,
227
- 0.007355391979217529,
228
- 0.007290880203247071,
229
- 0.007326784133911132,
230
- 0.00733081579208374,
231
- 0.007325695991516113,
232
- 0.0073359360694885255,
233
- 0.007279615879058838,
234
- 0.0072837119102478025,
235
- 0.0072765440940856935,
236
- 0.0072837119102478025,
237
- 0.007336959838867187,
238
- 0.007296000003814697,
239
- 0.007279615879058838,
240
- 0.007265279769897461,
241
- 0.007302144050598145,
242
- 0.007354368209838867,
243
- 0.007282688140869141,
244
- 0.007267327785491944,
245
- 0.007263264179229736,
246
- 0.00724070405960083,
247
- 0.007358463764190673,
248
- 0.007266304016113281,
249
- 0.007279615879058838,
250
- 0.007323647975921631,
251
- 0.007252992153167725,
252
- 0.007251967906951904,
253
- 0.007284671783447266,
254
- 0.007290847778320312,
255
- 0.007296000003814697,
256
- 0.007278592109680176,
257
- 0.007282688140869141,
258
- 0.007238656044006348,
259
- 0.00722431993484497,
260
- 0.007065599918365479,
261
- 0.007089216232299805
 
 
 
 
 
 
 
 
 
 
262
  ]
263
  },
264
  "throughput": {
265
  "unit": "samples/s",
266
- "value": 138.01689343855108
267
  },
268
  "energy": {
269
  "unit": "kWh",
270
- "cpu": 8.642965473011077e-08,
271
- "ram": 4.724945486413806e-08,
272
- "gpu": 1.596027229781017e-07,
273
- "total": 2.932818325723505e-07
274
  },
275
  "efficiency": {
276
  "unit": "samples/kWh",
277
- "value": 3409689.5509315506
278
  }
279
  }
280
  }
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 908.791808,
108
  "max_global_vram": 1195.900928,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 134,
116
+ "total": 0.9972783656120301,
117
+ "mean": 0.007442375862776344,
118
+ "stdev": 0.0006729317768407098,
119
+ "p50": 0.007189503908157348,
120
+ "p90": 0.007636889410018921,
121
+ "p95": 0.00880588812828064,
122
+ "p99": 0.010332610874176024,
123
  "values": [
124
+ 0.01092300796508789,
125
+ 0.010377216339111327,
126
+ 0.01022156810760498,
127
+ 0.010242048263549805,
128
+ 0.01021132755279541,
129
+ 0.009382911682128906,
130
+ 0.0077669119834899904,
131
+ 0.008253439903259278,
132
+ 0.0073359360694885255,
133
+ 0.0071905279159545895,
134
+ 0.007132224082946777,
135
+ 0.007144447803497315,
136
+ 0.007146495819091797,
137
+ 0.007188479900360107,
138
+ 0.007157760143280029,
139
+ 0.007154687881469727,
140
+ 0.007113728046417236,
141
+ 0.007173120021820068,
142
+ 0.007154687881469727,
143
+ 0.007200767993927002,
144
+ 0.0071905279159545895,
145
+ 0.00714137601852417,
146
+ 0.0071393918991088864,
147
+ 0.007151616096496582,
148
  0.007156735897064209,
 
 
 
149
  0.007104512214660645,
150
+ 0.007121920108795166,
151
+ 0.007174143791198731,
152
+ 0.007159808158874512,
153
+ 0.00719974422454834,
154
+ 0.007131135940551757,
155
+ 0.007205887794494629,
156
+ 0.007391232013702393,
157
+ 0.007404543876647949,
158
+ 0.007153664112091064,
159
+ 0.007108607769012451,
160
+ 0.007121984004974365,
161
+ 0.007168000221252442,
162
+ 0.007130112171173096,
163
+ 0.00722441577911377,
164
+ 0.007134208202362061,
165
+ 0.007175168037414551,
166
+ 0.007122047901153564,
167
+ 0.007152480125427246,
168
+ 0.007126016139984131,
169
+ 0.007049215793609619,
170
+ 0.007098368167877197,
171
  0.007074816226959229,
172
+ 0.007251967906951904,
173
+ 0.0070553598403930665,
174
+ 0.007047167778015137,
175
+ 0.00710041618347168,
176
+ 0.007122943878173828,
177
+ 0.007139328002929687,
178
+ 0.007117824077606201,
179
+ 0.007104479789733887,
180
+ 0.00714035177230835,
181
+ 0.007173120021820068,
182
+ 0.00703385591506958,
183
+ 0.007105535984039306,
184
+ 0.007085055828094483,
185
+ 0.007169023990631103,
186
+ 0.007156735897064209,
187
+ 0.007105535984039306,
188
+ 0.007124032020568848,
189
  0.007070720195770264,
190
+ 0.007713791847229004,
 
 
 
 
 
 
 
 
 
 
 
 
 
191
  0.00708403205871582,
192
+ 0.007058432102203369,
193
+ 0.007126016139984131,
194
+ 0.007151616096496582,
195
+ 0.007157760143280029,
196
+ 0.007153664112091064,
197
+ 0.007154687881469727,
198
+ 0.007124991893768311,
199
+ 0.007150591850280762,
200
+ 0.007095295906066895,
201
+ 0.007156735897064209,
 
 
 
 
 
 
 
 
 
 
 
 
 
202
  0.007142399787902832,
203
+ 0.007138304233551026,
204
+ 0.007151616096496582,
205
+ 0.007192575931549072,
206
+ 0.007114751815795899,
207
+ 0.007131135940551757,
208
+ 0.0070563840866088865,
209
+ 0.007063551902770996,
210
+ 0.007054336071014404,
211
+ 0.007464799880981445,
212
+ 0.007526400089263916,
213
+ 0.0073851199150085445,
214
+ 0.007414783954620361,
215
+ 0.007463935852050781,
216
+ 0.007476287841796875,
217
+ 0.007456768035888672,
218
+ 0.007624703884124756,
219
+ 0.00759603214263916,
220
+ 0.008812543869018554,
221
+ 0.008802304267883301,
222
+ 0.008266752243041992,
223
+ 0.00759500789642334,
224
+ 0.007642111778259277,
225
+ 0.007684095859527588,
226
+ 0.007610367774963379,
227
+ 0.0075939841270446775,
228
+ 0.007520256042480469,
229
+ 0.007414783954620361,
230
+ 0.007298975944519043,
231
+ 0.007364416122436523,
232
+ 0.0074967041015625,
233
+ 0.007482367992401123,
234
+ 0.007448575973510742,
235
+ 0.007517183780670166,
236
+ 0.007412735939025879,
237
+ 0.007415775775909424,
238
+ 0.007488512039184571,
239
+ 0.007492640018463135,
240
+ 0.007547904014587403,
241
+ 0.007535615921020508,
242
+ 0.007515103816986084,
243
+ 0.00753766393661499,
244
+ 0.007512063980102539,
245
+ 0.007469056129455566,
246
+ 0.007472127914428711,
247
+ 0.007472095966339111,
248
+ 0.007507967948913574,
249
+ 0.007557119846343994,
250
+ 0.007510015964508057,
251
+ 0.007444384098052978,
252
+ 0.0074291200637817386,
253
+ 0.007434239864349365,
254
+ 0.0072724480628967286,
255
+ 0.007353343963623047,
256
+ 0.007312384128570557,
257
+ 0.007367680072784424
258
  ]
259
  },
260
  "throughput": {
261
  "unit": "samples/s",
262
+ "value": 134.36569429415442
263
  },
264
  "energy": {
265
  "unit": "kWh",
266
+ "cpu": 8.774418276523858e-08,
267
+ "ram": 4.7966484885869755e-08,
268
+ "gpu": 1.5749806838518408e-07,
269
+ "total": 2.9320873603629243e-07
270
  },
271
  "efficiency": {
272
  "unit": "samples/kWh",
273
+ "value": 3410539.581863697
274
  }
275
  }
276
  }