Muennighoff's picture
Add MTEB evaluation
5d4e702
raw
history blame contribute delete
357 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"accuracy": 0.6404799999999999,
"accuracy_stderr": 0.03197141223030349,
"ap": 0.5918525145533928,
"ap_stderr": 0.024531019346391677,
"evaluation_time": 915.58,
"f1": 0.6394712318134925,
"f1_stderr": 0.0317267000377415,
"main_score": 0.6404799999999999
}
}