File size: 2,077 Bytes
9f80cbd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
{
"dataset_revision": "fad2c6e8459f9e1c45d9315f4953d921437d70f8",
"evaluation_time": 3.188656806945801,
"kg_co2_emissions": null,
"mteb_version": "1.14.5",
"scores": {
"test": [
{
"accuracy": 0.8599529253530598,
"f1": 0.8341737507780422,
"f1_weighted": 0.8468706523062558,
"hf_subset": "en",
"languages": [
"eng-Latn"
],
"main_score": 0.8599529253530598,
"scores_per_experiment": [
{
"accuracy": 0.8628110289172831,
"f1": 0.840327456348215,
"f1_weighted": 0.8527085032183853
},
{
"accuracy": 0.8691997310020175,
"f1": 0.8434112706570907,
"f1_weighted": 0.8539697720132201
},
{
"accuracy": 0.8564223268325487,
"f1": 0.8298460874416576,
"f1_weighted": 0.8396552390600999
},
{
"accuracy": 0.8668459986550101,
"f1": 0.8413340840209095,
"f1_weighted": 0.8530342211034073
},
{
"accuracy": 0.8537323470073974,
"f1": 0.821452993783254,
"f1_weighted": 0.8379094448929613
},
{
"accuracy": 0.8540685944855414,
"f1": 0.8335631165394927,
"f1_weighted": 0.8451133995248513
},
{
"accuracy": 0.8433086751849361,
"f1": 0.8143331643851046,
"f1_weighted": 0.8273110347470976
},
{
"accuracy": 0.8715534633490248,
"f1": 0.8484125803429824,
"f1_weighted": 0.8625693698048951
},
{
"accuracy": 0.8658372562205784,
"f1": 0.839216062746407,
"f1_weighted": 0.8537159107404352
},
{
"accuracy": 0.855749831876261,
"f1": 0.8298406915153087,
"f1_weighted": 0.842719627957205
}
]
}
]
},
"task_name": "MassiveScenarioClassification"
} |