{ "dataset_revision": "0fd18e25b25c072e09e0d92ab615fda904d66300", "evaluation_time": 15.749364614486694, "kg_co2_emissions": null, "mteb_version": "1.14.5", "scores": { "test": [ { "accuracy": 0.8947727272727273, "f1": 0.8927814753928962, "f1_weighted": 0.8927814753928965, "hf_subset": "default", "languages": [ "eng-Latn" ], "main_score": 0.8947727272727273, "scores_per_experiment": [ { "accuracy": 0.9, "f1": 0.8989656383883432, "f1_weighted": 0.8989656383883434 }, { "accuracy": 0.8954545454545455, "f1": 0.8936541161043582, "f1_weighted": 0.8936541161043584 }, { "accuracy": 0.8915584415584416, "f1": 0.8863383353952968, "f1_weighted": 0.8863383353952972 }, { "accuracy": 0.8964285714285715, "f1": 0.8954671538394995, "f1_weighted": 0.8954671538394998 }, { "accuracy": 0.8961038961038961, "f1": 0.8950318712883112, "f1_weighted": 0.8950318712883112 }, { "accuracy": 0.8987012987012987, "f1": 0.8983531442053102, "f1_weighted": 0.8983531442053105 }, { "accuracy": 0.9, "f1": 0.89895146129248, "f1_weighted": 0.8989514612924802 }, { "accuracy": 0.8805194805194805, "f1": 0.8752873457163935, "f1_weighted": 0.8752873457163938 }, { "accuracy": 0.9022727272727272, "f1": 0.9009498033909109, "f1_weighted": 0.9009498033909112 }, { "accuracy": 0.8866883116883116, "f1": 0.8848158843080594, "f1_weighted": 0.8848158843080596 } ] } ] }, "task_name": "Banking77Classification" }