SGPT-125M-weightedmean-msmarco-specb-bitfit / evaluation /mteb /CQADupstackMathematicaRetrieval.json
Muennighoff's picture
Add MTEB evaluation
154c4e9
raw
history blame
937 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 78.71,
"map_at_1": 0.10133,
"map_at_10": 0.15666,
"map_at_100": 0.16592,
"map_at_1000": 0.16734,
"map_at_3": 0.13625,
"map_at_5": 0.14721,
"mrr_at_1": 0.12562,
"mrr_at_10": 0.18487,
"mrr_at_100": 0.19391,
"mrr_at_1000": 0.19487,
"mrr_at_3": 0.16418,
"mrr_at_5": 0.176,
"ndcg_at_1": 0.12562,
"ndcg_at_10": 0.1943,
"ndcg_at_100": 0.24546,
"ndcg_at_1000": 0.28193,
"ndcg_at_3": 0.1551,
"ndcg_at_5": 0.17322,
"precision_at_1": 0.12562,
"precision_at_10": 0.03794,
"precision_at_100": 0.0074,
"precision_at_1000": 0.00122,
"precision_at_3": 0.07546,
"precision_at_5": 0.05721,
"recall_at_1": 0.10133,
"recall_at_10": 0.28262,
"recall_at_100": 0.51743,
"recall_at_1000": 0.78075,
"recall_at_3": 0.17634,
"recall_at_5": 0.22129
}
}