Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
939 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 69.17,
"map_at_1": 0.03774,
"map_at_10": 0.07615,
"map_at_100": 0.09574,
"map_at_1000": 0.10711,
"map_at_3": 0.05754,
"map_at_5": 0.06666,
"mrr_at_1": 0.33127,
"mrr_at_10": 0.40351,
"mrr_at_100": 0.41144,
"mrr_at_1000": 0.41202,
"mrr_at_3": 0.38029,
"mrr_at_5": 0.3919,
"ndcg_at_1": 0.31579,
"ndcg_at_10": 0.22792,
"ndcg_at_100": 0.21699,
"ndcg_at_1000": 0.30893,
"ndcg_at_3": 0.26829,
"ndcg_at_5": 0.25119,
"precision_at_1": 0.33127,
"precision_at_10": 0.16718,
"precision_at_100": 0.05709,
"precision_at_1000": 0.01836,
"precision_at_3": 0.24768,
"precision_at_5": 0.213,
"recall_at_1": 0.03774,
"recall_at_10": 0.10303,
"recall_at_100": 0.23013,
"recall_at_1000": 0.54865,
"recall_at_3": 0.06554,
"recall_at_5": 0.08087
}
}