File size: 2,424 Bytes
81d107a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 |
{
"dataset_revision": "3d86128a09e091d6018b6d26cad27f2739fc2db7",
"task_name": "ImdbClassification",
"mteb_version": "1.25.1",
"scores": {
"test": [
{
"accuracy": 0.61454,
"f1": 0.611751,
"f1_weighted": 0.611751,
"ap": 0.572238,
"ap_weighted": 0.572238,
"scores_per_experiment": [
{
"accuracy": 0.64184,
"f1": 0.641046,
"f1_weighted": 0.641046,
"ap": 0.593128,
"ap_weighted": 0.593128
},
{
"accuracy": 0.64316,
"f1": 0.643142,
"f1_weighted": 0.643142,
"ap": 0.591785,
"ap_weighted": 0.591785
},
{
"accuracy": 0.5602,
"f1": 0.556495,
"f1_weighted": 0.556495,
"ap": 0.534535,
"ap_weighted": 0.534535
},
{
"accuracy": 0.63604,
"f1": 0.635737,
"f1_weighted": 0.635737,
"ap": 0.58766,
"ap_weighted": 0.58766
},
{
"accuracy": 0.60992,
"f1": 0.604073,
"f1_weighted": 0.604073,
"ap": 0.56468,
"ap_weighted": 0.56468
},
{
"accuracy": 0.60764,
"f1": 0.606284,
"f1_weighted": 0.606284,
"ap": 0.566947,
"ap_weighted": 0.566947
},
{
"accuracy": 0.60716,
"f1": 0.592887,
"f1_weighted": 0.592887,
"ap": 0.571938,
"ap_weighted": 0.571938
},
{
"accuracy": 0.60104,
"f1": 0.599904,
"f1_weighted": 0.599904,
"ap": 0.561947,
"ap_weighted": 0.561947
},
{
"accuracy": 0.5806,
"f1": 0.580167,
"f1_weighted": 0.580167,
"ap": 0.546404,
"ap_weighted": 0.546404
},
{
"accuracy": 0.6578,
"f1": 0.657772,
"f1_weighted": 0.657772,
"ap": 0.603361,
"ap_weighted": 0.603361
}
],
"main_score": 0.61454,
"hf_subset": "default",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 65.28035974502563,
"kg_co2_emissions": null
} |