{ "dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d", "evaluation_time": 17.431034326553345, "kg_co2_emissions": null, "mteb_version": "1.14.5", "scores": { "test": [ { "accuracy": 0.5788599999999999, "f1": 0.5371145780531652, "f1_weighted": 0.5371145780531654, "hf_subset": "en", "languages": [ "eng-Latn" ], "main_score": 0.5788599999999999, "scores_per_experiment": [ { "accuracy": 0.5784, "f1": 0.5407664467082649, "f1_weighted": 0.540766446708265 }, { "accuracy": 0.5822, "f1": 0.5497041229546106, "f1_weighted": 0.5497041229546107 }, { "accuracy": 0.6036, "f1": 0.5691304466428295, "f1_weighted": 0.5691304466428295 }, { "accuracy": 0.5938, "f1": 0.5594552253489669, "f1_weighted": 0.5594552253489669 }, { "accuracy": 0.5736, "f1": 0.533127611084584, "f1_weighted": 0.533127611084584 }, { "accuracy": 0.574, "f1": 0.5248260011994395, "f1_weighted": 0.5248260011994395 }, { "accuracy": 0.5982, "f1": 0.5657984493080271, "f1_weighted": 0.5657984493080271 }, { "accuracy": 0.6068, "f1": 0.5783878518868237, "f1_weighted": 0.5783878518868237 }, { "accuracy": 0.5826, "f1": 0.5403621518792942, "f1_weighted": 0.5403621518792942 }, { "accuracy": 0.4954, "f1": 0.4095874735188125, "f1_weighted": 0.4095874735188124 } ] } ] }, "task_name": "AmazonReviewsClassification" }