IlyasMoutawwakil HF staff commited on
Commit
e06fe9f
·
verified ·
1 Parent(s): c3a7476

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 959.647744,
6
  "max_global_vram": 1434.976256,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 794.820608,
@@ -10,106 +10,103 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 75,
14
- "total": 1.002491840362549,
15
- "mean": 0.013366557871500649,
16
- "stdev": 0.0008227278964229968,
17
- "p50": 0.0132741117477417,
18
- "p90": 0.013433651351928711,
19
- "p95": 0.01408081922531128,
20
- "p99": 0.017026191139221207,
21
  "values": [
22
- 0.019366912841796875,
23
- 0.01620377540588379,
24
- 0.014156800270080566,
25
- 0.014048255920410157,
26
- 0.01457151985168457,
27
- 0.013172736167907715,
28
- 0.013194239616394043,
29
- 0.013156352043151855,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  0.013207551956176757,
31
- 0.013339679718017578,
32
- 0.013401087760925292,
33
- 0.013302783966064453,
34
- 0.013305855751037597,
35
- 0.013332480430603028,
36
- 0.013360128402709961,
37
- 0.013196288108825683,
38
- 0.013156352043151855,
39
- 0.013200384140014648,
40
- 0.013269023895263672,
41
- 0.013164544105529785,
42
- 0.013138943672180176,
43
- 0.013159423828125,
44
- 0.01323519992828369,
45
- 0.013295616149902344,
46
- 0.0132741117477417,
47
- 0.013332480430603028,
48
- 0.013293567657470704,
49
- 0.013475839614868163,
50
- 0.013637632369995116,
51
- 0.013455360412597657,
52
- 0.01334169578552246,
53
- 0.013234175682067872,
54
- 0.01316659164428711,
55
- 0.013387776374816895,
56
- 0.013316096305847168,
57
- 0.013296640396118165,
58
- 0.013282303810119628,
59
- 0.013297663688659669,
60
- 0.013254688262939454,
61
- 0.0132741117477417,
62
- 0.013175807952880859,
63
- 0.013119487762451172,
64
- 0.013099007606506348,
65
- 0.013097984313964844,
66
- 0.0132259521484375,
67
- 0.013283328056335449,
68
- 0.013299712181091309,
69
- 0.013360128402709961,
70
- 0.013326335906982421,
71
- 0.013277183532714844,
72
- 0.013288448333740235,
73
- 0.013318143844604492,
74
- 0.013007871627807617,
75
- 0.012816384315490722,
76
- 0.01286348819732666,
77
- 0.012795904159545898,
78
- 0.012805120468139648,
79
- 0.012816384315490722,
80
- 0.012896256446838379,
81
- 0.012797951698303223,
82
- 0.012823552131652831,
83
- 0.012819392204284668,
84
- 0.012859392166137695,
85
- 0.012997632026672363,
86
- 0.013349823951721192,
87
- 0.013234175682067872,
88
- 0.013318143844604492,
89
- 0.013297663688659669,
90
- 0.013244416236877441,
91
- 0.013275135993957519,
92
- 0.013277183532714844,
93
- 0.013283328056335449,
94
- 0.013263872146606445,
95
- 0.013285375595092774,
96
- 0.01323519992828369
97
  ]
98
  },
99
  "throughput": {
100
  "unit": "samples/s",
101
- "value": 74.8135765103848
102
  },
103
  "energy": {
104
  "unit": "kWh",
105
- "cpu": 1.5209295556076575e-07,
106
- "ram": 8.311950711616262e-08,
107
- "gpu": 3.271655038977323e-07,
108
- "total": 5.623779665746606e-07
109
  },
110
  "efficiency": {
111
  "unit": "samples/kWh",
112
- "value": 1778163.5473573292
113
  }
114
  }
115
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 959.885312,
6
  "max_global_vram": 1434.976256,
7
  "max_process_vram": 0.0,
8
  "max_reserved": 794.820608,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 72,
14
+ "total": 1.0025584964752199,
15
+ "mean": 0.013924423562155829,
16
+ "stdev": 0.00048675710230325304,
17
+ "p50": 0.01393612813949585,
18
+ "p90": 0.014347980403900147,
19
+ "p95": 0.014643711805343629,
20
+ "p99": 0.015418695716857918,
21
  "values": [
22
+ 0.01416806411743164,
23
+ 0.014784511566162109,
24
+ 0.014528512001037597,
25
+ 0.015007712364196778,
26
+ 0.014243840217590332,
27
+ 0.014041152000427246,
28
+ 0.01415167999267578,
29
+ 0.013958144187927245,
30
+ 0.014018560409545898,
31
+ 0.013723648071289063,
32
+ 0.014250975608825683,
33
+ 0.014004223823547364,
34
+ 0.014238816261291504,
35
+ 0.014345215797424317,
36
+ 0.013989888191223144,
37
+ 0.014053376197814941,
38
+ 0.014063648223876953,
39
+ 0.014000127792358399,
40
+ 0.013897727966308594,
41
+ 0.013801471710205078,
42
+ 0.014261247634887696,
43
+ 0.014424063682556153,
44
+ 0.014286848068237304,
45
+ 0.0140697603225708,
46
+ 0.01423971176147461,
47
+ 0.014016511917114258,
48
+ 0.013916159629821777,
49
+ 0.014004223823547364,
50
+ 0.014132224082946777,
51
+ 0.013883392333984374,
52
+ 0.013894656181335448,
53
+ 0.013944831848144532,
54
+ 0.01408614444732666,
55
+ 0.014446592330932618,
56
+ 0.014348287582397461,
57
+ 0.014152704238891601,
58
+ 0.014036992073059081,
59
+ 0.013924351692199707,
60
+ 0.014114815711975098,
61
+ 0.013927424430847168,
62
+ 0.014320575714111328,
63
+ 0.016151552200317384,
64
+ 0.015119359970092774,
65
+ 0.014119935989379882,
66
+ 0.013737983703613281,
67
+ 0.013697024345397948,
68
+ 0.013517824172973633,
69
+ 0.01355059242248535,
70
+ 0.013536255836486816,
71
+ 0.013575167655944824,
72
+ 0.013433856010437012,
73
+ 0.013421567916870117,
74
+ 0.013439999580383302,
75
+ 0.013533184051513672,
76
+ 0.013427712440490723,
77
+ 0.013421567916870117,
78
+ 0.013828096389770507,
79
+ 0.013633536338806153,
80
+ 0.013431808471679688,
81
+ 0.013399040222167969,
82
+ 0.013709312438964843,
83
+ 0.013415424346923829,
84
+ 0.013543423652648925,
85
+ 0.013690879821777344,
86
+ 0.013325311660766602,
87
+ 0.01325055980682373,
88
  0.013207551956176757,
89
+ 0.013238271713256837,
90
+ 0.013398015975952148,
91
+ 0.013467647552490235,
92
+ 0.013437952041625977,
93
+ 0.013195263862609862
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
94
  ]
95
  },
96
  "throughput": {
97
  "unit": "samples/s",
98
+ "value": 71.81625835613237
99
  },
100
  "energy": {
101
  "unit": "kWh",
102
+ "cpu": 1.5815010335710316e-07,
103
+ "ram": 8.645012903434691e-08,
104
+ "gpu": 3.2874100373332195e-07,
105
+ "total": 5.73341236124772e-07
106
  },
107
  "efficiency": {
108
  "unit": "samples/kWh",
109
+ "value": 1744162.0050897181
110
  }
111
  }
112
  }