| { | |
| "dataset_revision": "3b276f1df9adaf707be6bded9b2fbee03b504489", | |
| "evaluation_time": 2034.5332243442535, | |
| "kg_co2_emissions": null, | |
| "mteb_version": "1.14.5", | |
| "scores": { | |
| "test": [ | |
| { | |
| "hf_subset": "default", | |
| "languages": [ | |
| "eng-Latn" | |
| ], | |
| "main_score": 0.32035669641471426, | |
| "map": 0.32035669641471426, | |
| "mrr": 0.3320696040405505, | |
| "nAUC_map_diff1": 0.11970213945184797, | |
| "nAUC_map_max": -0.2065323436756871, | |
| "nAUC_map_std": -0.028432147627656747, | |
| "nAUC_mrr_diff1": 0.11170275527238203, | |
| "nAUC_mrr_max": -0.1513910752400865, | |
| "nAUC_mrr_std": -0.010907679433020777 | |
| } | |
| ] | |
| }, | |
| "task_name": "MindSmallReranking" | |
| } |