KaLM-embedding-multilingual-mini-instruct-v2 / eval /mteb_eng /MassiveScenarioClassification.json
Yuki131's picture
Upload 98 files
9f80cbd verified
raw
history blame
2.08 kB
{
"dataset_revision": "fad2c6e8459f9e1c45d9315f4953d921437d70f8",
"evaluation_time": 3.188656806945801,
"kg_co2_emissions": null,
"mteb_version": "1.14.5",
"scores": {
"test": [
{
"accuracy": 0.8599529253530598,
"f1": 0.8341737507780422,
"f1_weighted": 0.8468706523062558,
"hf_subset": "en",
"languages": [
"eng-Latn"
],
"main_score": 0.8599529253530598,
"scores_per_experiment": [
{
"accuracy": 0.8628110289172831,
"f1": 0.840327456348215,
"f1_weighted": 0.8527085032183853
},
{
"accuracy": 0.8691997310020175,
"f1": 0.8434112706570907,
"f1_weighted": 0.8539697720132201
},
{
"accuracy": 0.8564223268325487,
"f1": 0.8298460874416576,
"f1_weighted": 0.8396552390600999
},
{
"accuracy": 0.8668459986550101,
"f1": 0.8413340840209095,
"f1_weighted": 0.8530342211034073
},
{
"accuracy": 0.8537323470073974,
"f1": 0.821452993783254,
"f1_weighted": 0.8379094448929613
},
{
"accuracy": 0.8540685944855414,
"f1": 0.8335631165394927,
"f1_weighted": 0.8451133995248513
},
{
"accuracy": 0.8433086751849361,
"f1": 0.8143331643851046,
"f1_weighted": 0.8273110347470976
},
{
"accuracy": 0.8715534633490248,
"f1": 0.8484125803429824,
"f1_weighted": 0.8625693698048951
},
{
"accuracy": 0.8658372562205784,
"f1": 0.839216062746407,
"f1_weighted": 0.8537159107404352
},
{
"accuracy": 0.855749831876261,
"f1": 0.8298406915153087,
"f1_weighted": 0.842719627957205
}
]
}
]
},
"task_name": "MassiveScenarioClassification"
}