{ "dataset_revision": "4f58c6b202a23cf9a4da393831edf4f9183cad37", "evaluation_time": 5.969003677368164, "kg_co2_emissions": null, "mteb_version": "1.14.5", "scores": { "test": [ { "accuracy": 0.9249500000000002, "f1": 0.886168158340735, "f1_weighted": 0.926145188160579, "hf_subset": "default", "languages": [ "eng-Latn" ], "main_score": 0.9249500000000002, "scores_per_experiment": [ { "accuracy": 0.927, "f1": 0.890449999027623, "f1_weighted": 0.9282378345087132 }, { "accuracy": 0.924, "f1": 0.880560297531507, "f1_weighted": 0.9248869581905886 }, { "accuracy": 0.9255, "f1": 0.8862240737974926, "f1_weighted": 0.9265052775253043 }, { "accuracy": 0.9265, "f1": 0.8869815746847861, "f1_weighted": 0.9275741814511608 }, { "accuracy": 0.923, "f1": 0.8823169403495251, "f1_weighted": 0.9241233378603649 }, { "accuracy": 0.924, "f1": 0.8859209981064109, "f1_weighted": 0.9252192205795762 }, { "accuracy": 0.9245, "f1": 0.8881125763299339, "f1_weighted": 0.9258067333726725 }, { "accuracy": 0.924, "f1": 0.8857149871893908, "f1_weighted": 0.9255862666355613 }, { "accuracy": 0.926, "f1": 0.8904000379387419, "f1_weighted": 0.9272615131574828 }, { "accuracy": 0.925, "f1": 0.885000098451938, "f1_weighted": 0.9262505583243663 } ] } ] }, "task_name": "EmotionClassification" }