Muennighoff's picture
Add MTEB evaluation
039f451
raw
history blame
472 Bytes
{
"test": {
"cos_sim": {
"pearson": 0.7951125593897028,
"spearman": 0.7446048326701329
},
"euclidean": {
"pearson": 0.7087726087052986,
"spearman": 0.677721470654411
},
"evaluation_time": 107.32,
"manhattan": {
"pearson": 0.7105892792135637,
"spearman": 0.6793472619779036
}
},
"dataset_version": null,
"mteb_version": "0.0.2"
}