{ "dataset_revision": "4672e20407010da34463acc759c162ca9734bca6", "evaluation_time": 16.37773036956787, "kg_co2_emissions": null, "mteb_version": "1.14.5", "scores": { "test": [ { "accuracy": 0.7779757901815737, "f1": 0.7419687607780208, "f1_weighted": 0.7593512915591463, "hf_subset": "en", "languages": [ "eng-Latn" ], "main_score": 0.7779757901815737, "scores_per_experiment": [ { "accuracy": 0.7800941492938803, "f1": 0.730188175301622, "f1_weighted": 0.7576404970196927 }, { "accuracy": 0.8026227303295226, "f1": 0.7627541481660578, "f1_weighted": 0.7871016875390036 }, { "accuracy": 0.769334229993275, "f1": 0.7431100058985453, "f1_weighted": 0.7578586887925545 }, { "accuracy": 0.8012777404169469, "f1": 0.7660061395800952, "f1_weighted": 0.784904340998894 }, { "accuracy": 0.7663080026899798, "f1": 0.7103446817534063, "f1_weighted": 0.745628516068683 }, { "accuracy": 0.7397444519166106, "f1": 0.7352237604134944, "f1_weighted": 0.7053291437765387 }, { "accuracy": 0.7760591795561533, "f1": 0.739693425630967, "f1_weighted": 0.7580139292543245 }, { "accuracy": 0.7858103564223269, "f1": 0.7350423712161895, "f1_weighted": 0.7666591669603378 }, { "accuracy": 0.7673167451244116, "f1": 0.7362994052738329, "f1_weighted": 0.7498796324450345 }, { "accuracy": 0.7911903160726295, "f1": 0.7610254945459979, "f1_weighted": 0.7804973127363991 } ] } ] }, "task_name": "MassiveIntentClassification" }