Muennighoff's picture
Add MTEB evaluation
039f451
raw
history blame
943 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 46237.87,
"map_at_1": 0.35726,
"map_at_10": 0.50207,
"map_at_100": 0.51055,
"map_at_1000": 0.51128,
"map_at_3": 0.47576,
"map_at_5": 0.49172,
"mrr_at_1": 0.71452,
"mrr_at_10": 0.77419,
"mrr_at_100": 0.77711,
"mrr_at_1000": 0.77723,
"mrr_at_3": 0.76394,
"mrr_at_5": 0.77001,
"ndcg_at_1": 0.71452,
"ndcg_at_10": 0.59261,
"ndcg_at_100": 0.62424,
"ndcg_at_1000": 0.63951,
"ndcg_at_3": 0.55327,
"ndcg_at_5": 0.57417,
"precision_at_1": 0.71452,
"precision_at_10": 0.12061,
"precision_at_100": 0.01455,
"precision_at_1000": 0.00166,
"precision_at_3": 0.3436,
"precision_at_5": 0.22266,
"recall_at_1": 0.35726,
"recall_at_10": 0.60304,
"recall_at_100": 0.72755,
"recall_at_1000": 0.82978,
"recall_at_3": 0.5154,
"recall_at_5": 0.55665
}
}