Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
941 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 4114.33,
"map_at_1": 0.4352,
"map_at_10": 0.54368,
"map_at_100": 0.54918,
"map_at_1000": 0.54942,
"map_at_3": 0.51712,
"map_at_5": 0.53336,
"mrr_at_1": 0.46955,
"mrr_at_10": 0.58219,
"mrr_at_100": 0.58735,
"mrr_at_1000": 0.58753,
"mrr_at_3": 0.55518,
"mrr_at_5": 0.57191,
"ndcg_at_1": 0.46955,
"ndcg_at_10": 0.6045,
"ndcg_at_100": 0.63047,
"ndcg_at_1000": 0.63713,
"ndcg_at_3": 0.55233,
"ndcg_at_5": 0.58072,
"precision_at_1": 0.46955,
"precision_at_10": 0.08267,
"precision_at_100": 0.00962,
"precision_at_1000": 0.00103,
"precision_at_3": 0.22327,
"precision_at_5": 0.14941,
"recall_at_1": 0.4352,
"recall_at_10": 0.75632,
"recall_at_100": 0.87416,
"recall_at_1000": 0.92557,
"recall_at_3": 0.61597,
"recall_at_5": 0.68518
}
}