datasetId
stringlengths 5
121
| author
stringlengths 2
42
| last_modified
unknown | downloads
int64 0
2.85M
| likes
int64 0
6.72k
| tags
sequencelengths 1
7.92k
| task_categories
sequencelengths 0
47
⌀ | createdAt
unknown | card
stringlengths 15
1M
|
---|---|---|---|---|---|---|---|---|
Asap7772/elix_persona_eval_4shot_infinstruct1e6 | Asap7772 | "2024-12-30T23:59:50Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-30T23:58:35Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: responses
sequence: string
- name: pairs
sequence:
sequence: int64
- name: sft_target
dtype: string
- name: level
sequence: string
- name: last_q
dtype: string
splits:
- name: train
num_bytes: 1126387766
num_examples: 31700
download_size: 541550814
dataset_size: 1126387766
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Hake22/test | Hake22 | "2024-12-31T00:17:36Z" | 5 | 0 | [
"language:en",
"license:llama3.3",
"region:us"
] | null | "2024-12-31T00:17:18Z" | ---
license: llama3.3
language:
- en
--- |
Asap7772/elix_persona_eval_4shot_infinstruct5e7 | Asap7772 | "2024-12-31T00:32:55Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T00:32:08Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: responses
sequence: string
- name: pairs
sequence:
sequence: int64
- name: sft_target
dtype: string
- name: level
sequence: string
- name: last_q
dtype: string
splits:
- name: train
num_bytes: 1142450195
num_examples: 31700
download_size: 544309041
dataset_size: 1142450195
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Asap7772/elix_eval_4shot_infdpo_fixed_winrate_gpt4o_pref_train | Asap7772 | "2024-12-31T00:39:35Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T00:39:32Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: level_x
dtype: string
- name: level_id_x
dtype: int64
- name: model_name_x
dtype: string
- name: response_x
dtype: string
- name: level_y
dtype: string
- name: level_id_y
dtype: int64
- name: model_name_y
dtype: string
- name: response_y
dtype: string
- name: scorer_level
dtype: string
- name: scorer_level_id
dtype: int64
- name: label
dtype: int64
- name: det_choice
dtype: int64
- name: choice1
dtype: string
- name: reason1
dtype: string
- name: choice2
dtype: string
- name: reason2
dtype: string
splits:
- name: train
num_bytes: 4607874
num_examples: 1023
download_size: 1033083
dataset_size: 4607874
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Asap7772/hh_length_persona_teacherforced_eval_4shot_infdpo | Asap7772 | "2024-12-31T00:49:17Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T00:49:11Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: responses
sequence: string
- name: pairs
sequence:
sequence: int64
- name: sft_target
dtype: string
- name: level
sequence: string
- name: last_q
dtype: string
splits:
- name: train
num_bytes: 91186874
num_examples: 8800
download_size: 47327018
dataset_size: 91186874
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Clean_Mice_Orange_10samples_1constraint | haorandai | "2024-12-31T01:29:50Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T01:29:48Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 90898.0
num_examples: 11
download_size: 88314
dataset_size: 90898.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Asap7772/elix_persona_eval_infbase1e5early | Asap7772 | "2024-12-31T01:33:39Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T01:33:32Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: responses
sequence: string
- name: pairs
sequence:
sequence: int64
- name: sft_target
dtype: string
- name: level
sequence: string
- name: last_q
dtype: string
splits:
- name: train
num_bytes: 111986000
num_examples: 3170
download_size: 54553259
dataset_size: 111986000
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Asap7772/elix_persona_eval_infbase5e6early | Asap7772 | "2024-12-31T01:38:03Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T01:37:55Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: responses
sequence: string
- name: pairs
sequence:
sequence: int64
- name: sft_target
dtype: string
- name: level
sequence: string
- name: last_q
dtype: string
splits:
- name: train
num_bytes: 111593052
num_examples: 3170
download_size: 54314159
dataset_size: 111593052
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Asap7772/elix_persona_eval_infbase5e6early_extracted | Asap7772 | "2024-12-31T01:41:36Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T01:41:28Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: responses
sequence: string
- name: pairs
sequence:
sequence: int64
- name: sft_target
dtype: string
- name: level
sequence: string
- name: last_q
dtype: string
- name: extracted_persona
sequence: string
splits:
- name: train
num_bytes: 111470047
num_examples: 3170
download_size: 54571395
dataset_size: 111470047
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep-details | open-llm-leaderboard | "2024-12-31T01:58:13Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T01:54:55Z" | ---
pretty_name: Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep-details\"\
,\n\tname=\"JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-12-31T01-54-54.632537](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep/results_2024-12-31T01-54-54.632537.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"prompt_level_strict_acc,none\": 0.1977818853974122,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.01714125471908489,\n \"\
inst_level_strict_acc,none\": 0.3117505995203837,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.21072088724584104,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.017549801883664215,\n \
\ \"exact_match,none\": 0.0007552870090634441,\n \"exact_match_stderr,none\"\
: 0.0007553271226197802,\n \"acc,none\": 0.15616688829787234,\n \
\ \"acc_stderr,none\": 0.003309572479029828,\n \"inst_level_loose_acc,none\"\
: 0.3213429256594724,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\
,\n \"acc_norm,none\": 0.31171358152808404,\n \"acc_norm_stderr,none\"\
: 0.00496522477211175,\n \"alias\": \"leaderboard\"\n },\n \
\ \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.31730602325985074,\n\
\ \"acc_norm_stderr,none\": 0.005681506961573983,\n \"alias\"\
: \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.712,\n \"acc_norm_stderr,none\": 0.028697004587398257\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.45989304812834225,\n\
\ \"acc_norm_stderr,none\": 0.036543642520475775\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.208,\n \"acc_norm_stderr,none\":\
\ 0.02572139890141637\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401222\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.484,\n\
\ \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.084,\n \"acc_norm_stderr,none\":\
\ 0.017578738526776348\n },\n \"leaderboard_bbh_hyperbaton\": {\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n },\n\
\ \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401222\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.164,\n\
\ \"acc_norm_stderr,none\": 0.02346526100207671\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.273972602739726,\n \"acc_norm_stderr,none\"\
: 0.03703787583167248\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\":\
\ 0.021450980824038166\n },\n \"leaderboard_bbh_ruin_names\": {\n\
\ \"alias\": \" - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\"\
: 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n },\n\
\ \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.47191011235955055,\n\
\ \"acc_norm_stderr,none\": 0.03752294651708463\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\":\
\ 0.03166998503010743\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \
\ \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\": 0.022249407735450245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.336,\n \"acc_norm_stderr,none\":\
\ 0.02993325909419153\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2651006711409396,\n\
\ \"acc_norm_stderr,none\": 0.012798461285699829,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2474747474747475,\n \"acc_norm_stderr,none\": 0.030746300742124484\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2673992673992674,\n\
\ \"acc_norm_stderr,none\": 0.018959004502646776\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2700892857142857,\n \"acc_norm_stderr,none\"\
: 0.021000749078822437\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.1977818853974122,\n \"prompt_level_strict_acc_stderr,none\": 0.017141254719084894,\n\
\ \"inst_level_strict_acc,none\": 0.3117505995203837,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.21072088724584104,\n \"prompt_level_loose_acc_stderr,none\": 0.017549801883664215,\n\
\ \"inst_level_loose_acc,none\": 0.3213429256594724,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0007552870090634441,\n \"exact_match_stderr,none\"\
: 0.0007553271226197802,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_geometry_hard\": {\n \"\
alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0051813471502590676,\n \"exact_match_stderr,none\"\
: 0.0051813471502590676\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.15616688829787234,\n \"acc_stderr,none\"\
: 0.003309572479029828\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.3425925925925926,\n \"acc_norm_stderr,none\"\
: 0.016785990347188025,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.5,\n\
\ \"acc_norm_stderr,none\": 0.031686212526223896\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.23046875,\n \"acc_norm_stderr,none\"\
: 0.026372364120563745\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.3,\n \"acc_norm_stderr,none\": 0.029040893477575783\n\
\ }\n },\n \"leaderboard\": {\n \"prompt_level_strict_acc,none\"\
: 0.1977818853974122,\n \"prompt_level_strict_acc_stderr,none\": 0.01714125471908489,\n\
\ \"inst_level_strict_acc,none\": 0.3117505995203837,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.21072088724584104,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.017549801883664215,\n \"exact_match,none\"\
: 0.0007552870090634441,\n \"exact_match_stderr,none\": 0.0007553271226197802,\n\
\ \"acc,none\": 0.15616688829787234,\n \"acc_stderr,none\": 0.003309572479029828,\n\
\ \"inst_level_loose_acc,none\": 0.3213429256594724,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"acc_norm,none\": 0.31171358152808404,\n \"acc_norm_stderr,none\"\
: 0.00496522477211175,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.31730602325985074,\n \"acc_norm_stderr,none\"\
: 0.005681506961573983,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.712,\n \"acc_norm_stderr,none\": 0.028697004587398257\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.45989304812834225,\n \"acc_norm_stderr,none\"\
: 0.036543642520475775\n },\n \"leaderboard_bbh_date_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.208,\n \"acc_norm_stderr,none\": 0.02572139890141637\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401222\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.484,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.084,\n \"acc_norm_stderr,none\": 0.017578738526776348\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401222\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.273972602739726,\n\
\ \"acc_norm_stderr,none\": 0.03703787583167248\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038166\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.47191011235955055,\n \"acc_norm_stderr,none\"\
: 0.03752294651708463\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\": 0.022249407735450245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.336,\n \"acc_norm_stderr,none\": 0.02993325909419153\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2651006711409396,\n\
\ \"acc_norm_stderr,none\": 0.012798461285699829,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2474747474747475,\n\
\ \"acc_norm_stderr,none\": 0.030746300742124484\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2673992673992674,\n \"acc_norm_stderr,none\": 0.018959004502646776\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2700892857142857,\n \"acc_norm_stderr,none\"\
: 0.021000749078822437\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.1977818853974122,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.017141254719084894,\n \
\ \"inst_level_strict_acc,none\": 0.3117505995203837,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.21072088724584104,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.017549801883664215,\n \"inst_level_loose_acc,none\"\
: 0.3213429256594724,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0007552870090634441,\n\
\ \"exact_match_stderr,none\": 0.0007553271226197802,\n \"alias\"\
: \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n\
\ \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_num_theory_hard\"\
: {\n \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \"exact_match,none\"\
: 0.0051813471502590676,\n \"exact_match_stderr,none\": 0.0051813471502590676\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\": {\n\
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.15616688829787234,\n\
\ \"acc_stderr,none\": 0.003309572479029828\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.3425925925925926,\n \"acc_norm_stderr,none\"\
: 0.016785990347188025,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.5,\n \"acc_norm_stderr,none\": 0.031686212526223896\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.23046875,\n\
\ \"acc_norm_stderr,none\": 0.026372364120563745\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.3,\n \"acc_norm_stderr,none\": 0.029040893477575783\n }\n}\n```"
repo_url: https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_date_understanding
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_navigate
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_object_counting
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_ruin_names
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_snarks
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_gpqa_diamond
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_gpqa_extended
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_gpqa_main
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_ifeval
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_ifeval_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_math_algebra_hard
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_math_geometry_hard
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_math_num_theory_hard
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_math_precalculus_hard
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_mmlu_pro
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_musr_object_placements
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T01-54-54.632537.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_musr_team_allocation
data_files:
- split: 2024_12_31T01_54_54.632537
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T01-54-54.632537.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T01-54-54.632537.jsonl'
---
# Dataset Card for Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep-details",
name="JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-12-31T01-54-54.632537](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_2ep/results_2024-12-31T01-54-54.632537.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"prompt_level_strict_acc,none": 0.1977818853974122,
"prompt_level_strict_acc_stderr,none": 0.01714125471908489,
"inst_level_strict_acc,none": 0.3117505995203837,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007553271226197802,
"acc,none": 0.15616688829787234,
"acc_stderr,none": 0.003309572479029828,
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A",
"acc_norm,none": 0.31171358152808404,
"acc_norm_stderr,none": 0.00496522477211175,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31730602325985074,
"acc_norm_stderr,none": 0.005681506961573983,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.712,
"acc_norm_stderr,none": 0.028697004587398257
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.45989304812834225,
"acc_norm_stderr,none": 0.036543642520475775
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.084,
"acc_norm_stderr,none": 0.017578738526776348
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.273972602739726,
"acc_norm_stderr,none": 0.03703787583167248
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.47191011235955055,
"acc_norm_stderr,none": 0.03752294651708463
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.02993325909419153
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2651006711409396,
"acc_norm_stderr,none": 0.012798461285699829,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2474747474747475,
"acc_norm_stderr,none": 0.030746300742124484
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2673992673992674,
"acc_norm_stderr,none": 0.018959004502646776
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2700892857142857,
"acc_norm_stderr,none": 0.021000749078822437
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.1977818853974122,
"prompt_level_strict_acc_stderr,none": 0.017141254719084894,
"inst_level_strict_acc,none": 0.3117505995203837,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007553271226197802,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15616688829787234,
"acc_stderr,none": 0.003309572479029828
},
"leaderboard_musr": {
"acc_norm,none": 0.3425925925925926,
"acc_norm_stderr,none": 0.016785990347188025,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.23046875,
"acc_norm_stderr,none": 0.026372364120563745
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.3,
"acc_norm_stderr,none": 0.029040893477575783
}
},
"leaderboard": {
"prompt_level_strict_acc,none": 0.1977818853974122,
"prompt_level_strict_acc_stderr,none": 0.01714125471908489,
"inst_level_strict_acc,none": 0.3117505995203837,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007553271226197802,
"acc,none": 0.15616688829787234,
"acc_stderr,none": 0.003309572479029828,
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A",
"acc_norm,none": 0.31171358152808404,
"acc_norm_stderr,none": 0.00496522477211175,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31730602325985074,
"acc_norm_stderr,none": 0.005681506961573983,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.712,
"acc_norm_stderr,none": 0.028697004587398257
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.45989304812834225,
"acc_norm_stderr,none": 0.036543642520475775
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.084,
"acc_norm_stderr,none": 0.017578738526776348
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.273972602739726,
"acc_norm_stderr,none": 0.03703787583167248
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.47191011235955055,
"acc_norm_stderr,none": 0.03752294651708463
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.02993325909419153
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2651006711409396,
"acc_norm_stderr,none": 0.012798461285699829,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2474747474747475,
"acc_norm_stderr,none": 0.030746300742124484
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2673992673992674,
"acc_norm_stderr,none": 0.018959004502646776
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2700892857142857,
"acc_norm_stderr,none": 0.021000749078822437
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.1977818853974122,
"prompt_level_strict_acc_stderr,none": 0.017141254719084894,
"inst_level_strict_acc,none": 0.3117505995203837,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007553271226197802,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15616688829787234,
"acc_stderr,none": 0.003309572479029828
},
"leaderboard_musr": {
"acc_norm,none": 0.3425925925925926,
"acc_norm_stderr,none": 0.016785990347188025,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.23046875,
"acc_norm_stderr,none": 0.026372364120563745
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.3,
"acc_norm_stderr,none": 0.029040893477575783
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam-details | open-llm-leaderboard | "2024-12-31T02:04:08Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:00:45Z" | ---
pretty_name: Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam-details\"\
,\n\tname=\"JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-12-31T02-00-42.876223](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam/results_2024-12-31T02-00-42.876223.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_strict_acc,none\": 0.3129496402877698,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"exact_match,none\"\
: 0.0015105740181268882,\n \"exact_match_stderr,none\": 0.0010687102182054776,\n\
\ \"acc,none\": 0.15741356382978725,\n \"acc_stderr,none\"\
: 0.0033203009076807686,\n \"prompt_level_strict_acc,none\": 0.1922365988909427,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01695755534321269,\n \
\ \"inst_level_loose_acc,none\": 0.3225419664268585,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.20517560073937152,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.01737807119675965,\n \
\ \"acc_norm,none\": 0.3162537294071864,\n \"acc_norm_stderr,none\"\
: 0.004974097225829995,\n \"alias\": \"leaderboard\"\n },\n \
\ \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.32164554764797776,\n\
\ \"acc_norm_stderr,none\": 0.005689119502687034,\n \"alias\"\
: \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.712,\n \"acc_norm_stderr,none\": 0.028697004587398257\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.46524064171123,\n\
\ \"acc_norm_stderr,none\": 0.036573080985189216\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.424,\n\
\ \"acc_norm_stderr,none\": 0.03131803437491622\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\":\
\ 0.03166998503010743\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.516,\n \
\ \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\":\
\ 0.024960691989171963\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\":\
\ 0.022752024491765464\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\
,\n \"acc_norm,none\": 0.344,\n \"acc_norm_stderr,none\":\
\ 0.03010450339231644\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \
\ \"acc_norm,none\": 0.396,\n \"acc_norm_stderr,none\": 0.030993197854577898\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \"\
\ - leaderboard_bbh_navigate\",\n \"acc_norm,none\": 0.58,\n \
\ \"acc_norm_stderr,none\": 0.03127799950463661\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.2671232876712329,\n \"acc_norm_stderr,none\": 0.03674407640319397\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.128,\n \
\ \"acc_norm_stderr,none\": 0.021172081336336534\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4550561797752809,\n \"acc_norm_stderr,none\"\
: 0.03743016495716991\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725574\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.136,\n\
\ \"acc_norm_stderr,none\": 0.021723342617052086\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\":\
\ 0.022249407735450245\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\":\
\ 0.029752391824475363\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.528,\n \"acc_norm_stderr,none\": 0.031636489531544396\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2676174496644295,\n\
\ \"acc_norm_stderr,none\": 0.012833133016475175,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.23737373737373738,\n \"acc_norm_stderr,none\": 0.030313710538198924\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.27289377289377287,\n\
\ \"acc_norm_stderr,none\": 0.019080840171987832\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27455357142857145,\n \"acc_norm_stderr,none\"\
: 0.021108747290633768\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.1922365988909427,\n \"prompt_level_strict_acc_stderr,none\": 0.01695755534321269,\n\
\ \"inst_level_strict_acc,none\": 0.3129496402877698,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.20517560073937152,\n \"prompt_level_loose_acc_stderr,none\": 0.01737807119675965,\n\
\ \"inst_level_loose_acc,none\": 0.3225419664268585,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0015105740181268882,\n \"exact_match_stderr,none\"\
: 0.0010687102182054776,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.003257328990228013,\n\
\ \"exact_match_stderr,none\": 0.003257328990228013\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \
\ \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0051813471502590676,\n \"exact_match_stderr,none\"\
: 0.0051813471502590676\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.15741356382978725,\n \"acc_stderr,none\"\
: 0.0033203009076807686\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.35185185185185186,\n \"acc_norm_stderr,none\"\
: 0.01684344911189693,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \"\
\ - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.516,\n\
\ \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.23046875,\n \"acc_norm_stderr,none\"\
: 0.026372364120563745\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.312,\n \"acc_norm_stderr,none\": 0.02936106757521985\n\
\ }\n },\n \"leaderboard\": {\n \"inst_level_strict_acc,none\"\
: 0.3129496402877698,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"exact_match,none\": 0.0015105740181268882,\n \"exact_match_stderr,none\"\
: 0.0010687102182054776,\n \"acc,none\": 0.15741356382978725,\n \"\
acc_stderr,none\": 0.0033203009076807686,\n \"prompt_level_strict_acc,none\"\
: 0.1922365988909427,\n \"prompt_level_strict_acc_stderr,none\": 0.01695755534321269,\n\
\ \"inst_level_loose_acc,none\": 0.3225419664268585,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.20517560073937152,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.01737807119675965,\n \"acc_norm,none\"\
: 0.3162537294071864,\n \"acc_norm_stderr,none\": 0.004974097225829995,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \
\ \"acc_norm,none\": 0.32164554764797776,\n \"acc_norm_stderr,none\": 0.005689119502687034,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"\
acc_norm,none\": 0.712,\n \"acc_norm_stderr,none\": 0.028697004587398257\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.46524064171123,\n \"acc_norm_stderr,none\"\
: 0.036573080985189216\n },\n \"leaderboard_bbh_date_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.424,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.344,\n \"acc_norm_stderr,none\": 0.03010450339231644\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.396,\n \"acc_norm_stderr,none\": 0.030993197854577898\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.2671232876712329,\n\
\ \"acc_norm_stderr,none\": 0.03674407640319397\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4550561797752809,\n \"acc_norm_stderr,none\"\
: 0.03743016495716991\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725574\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\": 0.022249407735450245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.528,\n \"acc_norm_stderr,none\": 0.031636489531544396\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2676174496644295,\n\
\ \"acc_norm_stderr,none\": 0.012833133016475175,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.23737373737373738,\n\
\ \"acc_norm_stderr,none\": 0.030313710538198924\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.27289377289377287,\n \"acc_norm_stderr,none\": 0.019080840171987832\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27455357142857145,\n \"acc_norm_stderr,none\"\
: 0.021108747290633768\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.1922365988909427,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01695755534321269,\n \
\ \"inst_level_strict_acc,none\": 0.3129496402877698,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.20517560073937152,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.01737807119675965,\n \"inst_level_loose_acc,none\"\
: 0.3225419664268585,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0015105740181268882,\n\
\ \"exact_match_stderr,none\": 0.0010687102182054776,\n \"alias\"\
: \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.003257328990228013,\n \"exact_match_stderr,none\": 0.003257328990228013\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.0051813471502590676,\n \"exact_match_stderr,none\"\
: 0.0051813471502590676\n },\n \"leaderboard_math_precalculus_hard\": {\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.15741356382978725,\n\
\ \"acc_stderr,none\": 0.0033203009076807686\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.35185185185185186,\n \"acc_norm_stderr,none\"\
: 0.01684344911189693,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.23046875,\n\
\ \"acc_norm_stderr,none\": 0.026372364120563745\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.312,\n \"acc_norm_stderr,none\": 0.02936106757521985\n }\n}\n```"
repo_url: https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_date_understanding
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_navigate
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_object_counting
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_ruin_names
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_snarks
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_gpqa_diamond
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_gpqa_extended
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_gpqa_main
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_ifeval
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_ifeval_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_math_algebra_hard
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_math_geometry_hard
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_math_num_theory_hard
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_math_precalculus_hard
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_mmlu_pro
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_musr_object_placements
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T02-00-42.876223.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_musr_team_allocation
data_files:
- split: 2024_12_31T02_00_42.876223
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T02-00-42.876223.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T02-00-42.876223.jsonl'
---
# Dataset Card for Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam-details",
name="JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-12-31T02-00-42.876223](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam/results_2024-12-31T02-00-42.876223.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_strict_acc,none": 0.3129496402877698,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.0015105740181268882,
"exact_match_stderr,none": 0.0010687102182054776,
"acc,none": 0.15741356382978725,
"acc_stderr,none": 0.0033203009076807686,
"prompt_level_strict_acc,none": 0.1922365988909427,
"prompt_level_strict_acc_stderr,none": 0.01695755534321269,
"inst_level_loose_acc,none": 0.3225419664268585,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.20517560073937152,
"prompt_level_loose_acc_stderr,none": 0.01737807119675965,
"acc_norm,none": 0.3162537294071864,
"acc_norm_stderr,none": 0.004974097225829995,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.32164554764797776,
"acc_norm_stderr,none": 0.005689119502687034,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.712,
"acc_norm_stderr,none": 0.028697004587398257
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.46524064171123,
"acc_norm_stderr,none": 0.036573080985189216
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.424,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.344,
"acc_norm_stderr,none": 0.03010450339231644
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2671232876712329,
"acc_norm_stderr,none": 0.03674407640319397
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4550561797752809,
"acc_norm_stderr,none": 0.03743016495716991
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.031636489531544396
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2676174496644295,
"acc_norm_stderr,none": 0.012833133016475175,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.23737373737373738,
"acc_norm_stderr,none": 0.030313710538198924
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.27289377289377287,
"acc_norm_stderr,none": 0.019080840171987832
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27455357142857145,
"acc_norm_stderr,none": 0.021108747290633768
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.1922365988909427,
"prompt_level_strict_acc_stderr,none": 0.01695755534321269,
"inst_level_strict_acc,none": 0.3129496402877698,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.20517560073937152,
"prompt_level_loose_acc_stderr,none": 0.01737807119675965,
"inst_level_loose_acc,none": 0.3225419664268585,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0015105740181268882,
"exact_match_stderr,none": 0.0010687102182054776,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15741356382978725,
"acc_stderr,none": 0.0033203009076807686
},
"leaderboard_musr": {
"acc_norm,none": 0.35185185185185186,
"acc_norm_stderr,none": 0.01684344911189693,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.23046875,
"acc_norm_stderr,none": 0.026372364120563745
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
}
},
"leaderboard": {
"inst_level_strict_acc,none": 0.3129496402877698,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.0015105740181268882,
"exact_match_stderr,none": 0.0010687102182054776,
"acc,none": 0.15741356382978725,
"acc_stderr,none": 0.0033203009076807686,
"prompt_level_strict_acc,none": 0.1922365988909427,
"prompt_level_strict_acc_stderr,none": 0.01695755534321269,
"inst_level_loose_acc,none": 0.3225419664268585,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.20517560073937152,
"prompt_level_loose_acc_stderr,none": 0.01737807119675965,
"acc_norm,none": 0.3162537294071864,
"acc_norm_stderr,none": 0.004974097225829995,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.32164554764797776,
"acc_norm_stderr,none": 0.005689119502687034,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.712,
"acc_norm_stderr,none": 0.028697004587398257
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.46524064171123,
"acc_norm_stderr,none": 0.036573080985189216
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.424,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.344,
"acc_norm_stderr,none": 0.03010450339231644
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2671232876712329,
"acc_norm_stderr,none": 0.03674407640319397
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4550561797752809,
"acc_norm_stderr,none": 0.03743016495716991
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.031636489531544396
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2676174496644295,
"acc_norm_stderr,none": 0.012833133016475175,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.23737373737373738,
"acc_norm_stderr,none": 0.030313710538198924
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.27289377289377287,
"acc_norm_stderr,none": 0.019080840171987832
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27455357142857145,
"acc_norm_stderr,none": 0.021108747290633768
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.1922365988909427,
"prompt_level_strict_acc_stderr,none": 0.01695755534321269,
"inst_level_strict_acc,none": 0.3129496402877698,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.20517560073937152,
"prompt_level_loose_acc_stderr,none": 0.01737807119675965,
"inst_level_loose_acc,none": 0.3225419664268585,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0015105740181268882,
"exact_match_stderr,none": 0.0010687102182054776,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15741356382978725,
"acc_stderr,none": 0.0033203009076807686
},
"leaderboard_musr": {
"acc_norm,none": 0.35185185185185186,
"acc_norm_stderr,none": 0.01684344911189693,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.23046875,
"acc_norm_stderr,none": 0.026372364120563745
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam-details | open-llm-leaderboard | "2024-12-31T02:05:15Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:01:57Z" | ---
pretty_name: Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam-details\"\
,\n\tname=\"JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-12-31T02-01-56.288497](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam/results_2024-12-31T02-01-56.288497.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"exact_match,none\": 0.0007552870090634441,\n \"exact_match_stderr,none\"\
: 0.0007553271226197802,\n \"inst_level_loose_acc,none\": 0.30935251798561153,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"acc,none\"\
: 0.15633311170212766,\n \"acc_stderr,none\": 0.0033110071989607523,\n\
\ \"acc_norm,none\": 0.31469710727720845,\n \"acc_norm_stderr,none\"\
: 0.00497415167471417,\n \"inst_level_strict_acc,none\": 0.2997601918465228,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.19593345656192238,\n \"prompt_level_loose_acc_stderr,none\": 0.017080611553455465,\n\
\ \"prompt_level_strict_acc,none\": 0.18484288354898337,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.01670417955850395,\n \"\
alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \
\ \"acc_norm,none\": 0.31921541399062664,\n \"acc_norm_stderr,none\"\
: 0.005685735777527928,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.708,\n\
\ \"acc_norm_stderr,none\": 0.028814320402205634\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.46524064171123,\n \"acc_norm_stderr,none\"\
: 0.036573080985189216\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.412,\n\
\ \"acc_norm_stderr,none\": 0.03119159602602282\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\":\
\ 0.03167708558254714\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.516,\n \
\ \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\":\
\ 0.025537121574548162\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\":\
\ 0.022752024491765464\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\":\
\ 0.029462657598578648\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \
\ \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \"\
\ - leaderboard_bbh_navigate\",\n \"acc_norm,none\": 0.58,\n \
\ \"acc_norm_stderr,none\": 0.03127799950463661\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.273972602739726,\n \"acc_norm_stderr,none\": 0.03703787583167248\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.132,\n \
\ \"acc_norm_stderr,none\": 0.021450980824038166\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.48314606741573035,\n \"acc_norm_stderr,none\"\
: 0.037560944447344834\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.136,\n\
\ \"acc_norm_stderr,none\": 0.021723342617052086\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038166\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\":\
\ 0.029752391824475363\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.5,\n \"acc_norm_stderr,none\": 0.031686212526223896\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2701342281879195,\n\
\ \"acc_norm_stderr,none\": 0.01287285387039935,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2474747474747475,\n \"acc_norm_stderr,none\": 0.030746300742124484\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2692307692307692,\n\
\ \"acc_norm_stderr,none\": 0.019000027142915304\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.28125,\n \"acc_norm_stderr,none\"\
: 0.021265785688273954\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.18484288354898337,\n \"prompt_level_strict_acc_stderr,none\": 0.01670417955850395,\n\
\ \"inst_level_strict_acc,none\": 0.2997601918465228,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.19593345656192238,\n \"prompt_level_loose_acc_stderr,none\": 0.017080611553455465,\n\
\ \"inst_level_loose_acc,none\": 0.30935251798561153,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0007552870090634441,\n \"exact_match_stderr,none\"\
: 0.0007553271226197802,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_geometry_hard\": {\n \"\
alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0051813471502590676,\n \"exact_match_stderr,none\"\
: 0.0051813471502590676\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.15633311170212766,\n \"acc_stderr,none\"\
: 0.0033110071989607523\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.3505291005291005,\n \"acc_norm_stderr,none\"\
: 0.016835992192137873,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.512,\n\
\ \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.2265625,\n \"acc_norm_stderr,none\"\
: 0.026214195644894838\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n\
\ }\n },\n \"leaderboard\": {\n \"exact_match,none\": 0.0007552870090634441,\n\
\ \"exact_match_stderr,none\": 0.0007553271226197802,\n \"inst_level_loose_acc,none\"\
: 0.30935251798561153,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"acc,none\": 0.15633311170212766,\n \"acc_stderr,none\": 0.0033110071989607523,\n\
\ \"acc_norm,none\": 0.31469710727720845,\n \"acc_norm_stderr,none\"\
: 0.00497415167471417,\n \"inst_level_strict_acc,none\": 0.2997601918465228,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.19593345656192238,\n \"prompt_level_loose_acc_stderr,none\": 0.017080611553455465,\n\
\ \"prompt_level_strict_acc,none\": 0.18484288354898337,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.01670417955850395,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.31921541399062664,\n \"acc_norm_stderr,none\"\
: 0.005685735777527928,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.708,\n \"acc_norm_stderr,none\": 0.028814320402205634\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.46524064171123,\n \"acc_norm_stderr,none\"\
: 0.036573080985189216\n },\n \"leaderboard_bbh_date_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\"\
: 0.412,\n \"acc_norm_stderr,none\": 0.03119159602602282\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.273972602739726,\n\
\ \"acc_norm_stderr,none\": 0.03703787583167248\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038166\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.48314606741573035,\n \"acc_norm_stderr,none\"\
: 0.037560944447344834\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038166\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.5,\n \"acc_norm_stderr,none\": 0.031686212526223896\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2701342281879195,\n\
\ \"acc_norm_stderr,none\": 0.01287285387039935,\n \"alias\": \" -\
\ leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"alias\"\
: \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2474747474747475,\n\
\ \"acc_norm_stderr,none\": 0.030746300742124484\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2692307692307692,\n \"acc_norm_stderr,none\": 0.019000027142915304\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.28125,\n \"acc_norm_stderr,none\": 0.021265785688273954\n\
\ },\n \"leaderboard_ifeval\": {\n \"alias\": \" - leaderboard_ifeval\"\
,\n \"prompt_level_strict_acc,none\": 0.18484288354898337,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.01670417955850395,\n \"inst_level_strict_acc,none\": 0.2997601918465228,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.19593345656192238,\n \"prompt_level_loose_acc_stderr,none\": 0.017080611553455465,\n\
\ \"inst_level_loose_acc,none\": 0.30935251798561153,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\"\n },\n \"leaderboard_math_hard\": {\n \"exact_match,none\"\
: 0.0007552870090634441,\n \"exact_match_stderr,none\": 0.0007553271226197802,\n\
\ \"alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n\
\ \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_num_theory_hard\"\
: {\n \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \"exact_match,none\"\
: 0.0051813471502590676,\n \"exact_match_stderr,none\": 0.0051813471502590676\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\": {\n\
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.15633311170212766,\n\
\ \"acc_stderr,none\": 0.0033110071989607523\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.3505291005291005,\n \"acc_norm_stderr,none\"\
: 0.016835992192137873,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2265625,\n\
\ \"acc_norm_stderr,none\": 0.026214195644894838\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n }\n}\n```"
repo_url: https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_date_understanding
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_navigate
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_object_counting
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_ruin_names
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_snarks
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_gpqa_diamond
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_gpqa_extended
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_gpqa_main
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_ifeval
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_ifeval_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_math_algebra_hard
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_math_geometry_hard
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_math_num_theory_hard
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_math_precalculus_hard
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_mmlu_pro
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_musr_object_placements
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T02-01-56.288497.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_musr_team_allocation
data_files:
- split: 2024_12_31T02_01_56.288497
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T02-01-56.288497.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T02-01-56.288497.jsonl'
---
# Dataset Card for Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam-details",
name="JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-12-31T02-01-56.288497](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam/results_2024-12-31T02-01-56.288497.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007553271226197802,
"inst_level_loose_acc,none": 0.30935251798561153,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.15633311170212766,
"acc_stderr,none": 0.0033110071989607523,
"acc_norm,none": 0.31469710727720845,
"acc_norm_stderr,none": 0.00497415167471417,
"inst_level_strict_acc,none": 0.2997601918465228,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.19593345656192238,
"prompt_level_loose_acc_stderr,none": 0.017080611553455465,
"prompt_level_strict_acc,none": 0.18484288354898337,
"prompt_level_strict_acc_stderr,none": 0.01670417955850395,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31921541399062664,
"acc_norm_stderr,none": 0.005685735777527928,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.708,
"acc_norm_stderr,none": 0.028814320402205634
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.46524064171123,
"acc_norm_stderr,none": 0.036573080985189216
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.412,
"acc_norm_stderr,none": 0.03119159602602282
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.273972602739726,
"acc_norm_stderr,none": 0.03703787583167248
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.48314606741573035,
"acc_norm_stderr,none": 0.037560944447344834
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2701342281879195,
"acc_norm_stderr,none": 0.01287285387039935,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2474747474747475,
"acc_norm_stderr,none": 0.030746300742124484
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2692307692307692,
"acc_norm_stderr,none": 0.019000027142915304
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.28125,
"acc_norm_stderr,none": 0.021265785688273954
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.18484288354898337,
"prompt_level_strict_acc_stderr,none": 0.01670417955850395,
"inst_level_strict_acc,none": 0.2997601918465228,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.19593345656192238,
"prompt_level_loose_acc_stderr,none": 0.017080611553455465,
"inst_level_loose_acc,none": 0.30935251798561153,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007553271226197802,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15633311170212766,
"acc_stderr,none": 0.0033110071989607523
},
"leaderboard_musr": {
"acc_norm,none": 0.3505291005291005,
"acc_norm_stderr,none": 0.016835992192137873,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
}
},
"leaderboard": {
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007553271226197802,
"inst_level_loose_acc,none": 0.30935251798561153,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.15633311170212766,
"acc_stderr,none": 0.0033110071989607523,
"acc_norm,none": 0.31469710727720845,
"acc_norm_stderr,none": 0.00497415167471417,
"inst_level_strict_acc,none": 0.2997601918465228,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.19593345656192238,
"prompt_level_loose_acc_stderr,none": 0.017080611553455465,
"prompt_level_strict_acc,none": 0.18484288354898337,
"prompt_level_strict_acc_stderr,none": 0.01670417955850395,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31921541399062664,
"acc_norm_stderr,none": 0.005685735777527928,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.708,
"acc_norm_stderr,none": 0.028814320402205634
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.46524064171123,
"acc_norm_stderr,none": 0.036573080985189216
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.412,
"acc_norm_stderr,none": 0.03119159602602282
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.273972602739726,
"acc_norm_stderr,none": 0.03703787583167248
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.48314606741573035,
"acc_norm_stderr,none": 0.037560944447344834
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2701342281879195,
"acc_norm_stderr,none": 0.01287285387039935,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2474747474747475,
"acc_norm_stderr,none": 0.030746300742124484
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2692307692307692,
"acc_norm_stderr,none": 0.019000027142915304
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.28125,
"acc_norm_stderr,none": 0.021265785688273954
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.18484288354898337,
"prompt_level_strict_acc_stderr,none": 0.01670417955850395,
"inst_level_strict_acc,none": 0.2997601918465228,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.19593345656192238,
"prompt_level_loose_acc_stderr,none": 0.017080611553455465,
"inst_level_loose_acc,none": 0.30935251798561153,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007553271226197802,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15633311170212766,
"acc_stderr,none": 0.0033110071989607523
},
"leaderboard_musr": {
"acc_norm,none": 0.3505291005291005,
"acc_norm_stderr,none": 0.016835992192137873,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
seonggyun/cable_bent_wire | seonggyun | "2024-12-31T02:11:26Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:11:19Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 23197311.168
num_examples: 2016
download_size: 13518978
dataset_size: 23197311.168
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/cable_cable_swap | seonggyun | "2024-12-31T02:11:35Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:11:30Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 24734470.848
num_examples: 2016
download_size: 15086577
dataset_size: 24734470.848
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/cable_combined | seonggyun | "2024-12-31T02:11:50Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:11:40Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 23098980.768
num_examples: 2016
download_size: 13828102
dataset_size: 23098980.768
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/cable_cut_inner_insulation | seonggyun | "2024-12-31T02:11:59Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:11:54Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 23949488.832
num_examples: 2016
download_size: 13617103
dataset_size: 23949488.832
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/cable_cut_outer_insulation | seonggyun | "2024-12-31T02:12:09Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:12:03Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 25005594.624
num_examples: 2016
download_size: 13565840
dataset_size: 25005594.624
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/cable_missing_cable | seonggyun | "2024-12-31T02:12:21Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:12:13Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 23014830.912
num_examples: 2016
download_size: 12828436
dataset_size: 23014830.912
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/cable_poke_insulation | seonggyun | "2024-12-31T02:12:41Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:12:35Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 23153741.376
num_examples: 2016
download_size: 13109574
dataset_size: 23153741.376
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/capsule_crack | seonggyun | "2024-12-31T02:12:56Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:12:46Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 9667969.44
num_examples: 2016
download_size: 5503292
dataset_size: 9667969.44
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/capsule_faulty_imprint | seonggyun | "2024-12-31T02:13:07Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:13:00Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 9172656.384
num_examples: 2016
download_size: 5085672
dataset_size: 9172656.384
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/capsule_poke | seonggyun | "2024-12-31T02:13:17Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:13:11Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 9304099.584
num_examples: 2016
download_size: 5011707
dataset_size: 9304099.584
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/capsule_scratch | seonggyun | "2024-12-31T02:13:27Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:13:22Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 9947441.472
num_examples: 2016
download_size: 5735791
dataset_size: 9947441.472
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/capsule_squeeze | seonggyun | "2024-12-31T02:13:38Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:13:32Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 9320802.144
num_examples: 2016
download_size: 6162052
dataset_size: 9320802.144
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/bottle_broken_large | seonggyun | "2024-12-31T02:13:46Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:13:42Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 13316129.088
num_examples: 2016
download_size: 9639997
dataset_size: 13316129.088
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/bottle_broken_small | seonggyun | "2024-12-31T02:13:53Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:13:49Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 14902581.984
num_examples: 2016
download_size: 8487509
dataset_size: 14902581.984
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
seonggyun/bottle_contamination | seonggyun | "2024-12-31T02:14:01Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:13:57Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': image
'1': mask
splits:
- name: train
num_bytes: 14369920.512
num_examples: 2016
download_size: 8724870
dataset_size: 14369920.512
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
on1onmangoes/RADARHEYZZKWEBMAIN | on1onmangoes | "2024-12-31T02:17:14Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:17:13Z" | ---
dataset_info:
features:
- name: Episode
dtype: int64
- name: Section
dtype: string
- name: Details
dtype: string
- name: Categories
dtype: string
splits:
- name: train
num_bytes: 7214
num_examples: 12
download_size: 6958
dataset_size: 7214
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep-details | open-llm-leaderboard | "2024-12-31T02:21:21Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:18:07Z" | ---
pretty_name: Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep-details\"\
,\n\tname=\"JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-12-31T02-18-06.890327](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep/results_2024-12-31T02-18-06.890327.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc_norm,none\": 0.30989752237644314,\n \"acc_norm_stderr,none\"\
: 0.004957296149977267,\n \"inst_level_strict_acc,none\": 0.302158273381295,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.19408502772643252,\n \"prompt_level_strict_acc_stderr,none\": 0.01701938055074939,\n\
\ \"acc,none\": 0.15965757978723405,\n \"acc_stderr,none\"\
: 0.003339427744094266,\n \"prompt_level_loose_acc,none\": 0.2033271719038817,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.017319718641834708,\n \
\ \"inst_level_loose_acc,none\": 0.3117505995203837,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"exact_match,none\": 0.0015105740181268882,\n \
\ \"exact_match_stderr,none\": 0.0010687102182054776,\n \"alias\":\
\ \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.31487588960249957,\n \"acc_norm_stderr,none\": 0.005673246388326131,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.704,\n \"acc_norm_stderr,none\": 0.028928939388379697\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.47058823529411764,\n\
\ \"acc_norm_stderr,none\": 0.03659829510813266\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.208,\n \"acc_norm_stderr,none\":\
\ 0.02572139890141637\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401222\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.492,\n\
\ \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\":\
\ 0.017953084777052892\n },\n \"leaderboard_bbh_hyperbaton\": {\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n },\n\
\ \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422255\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.324,\n \"acc_norm_stderr,none\": 0.029658294924545567\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.388,\n \"acc_norm_stderr,none\": 0.030881038748993974\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.164,\n\
\ \"acc_norm_stderr,none\": 0.02346526100207671\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.273972602739726,\n \"acc_norm_stderr,none\"\
: 0.03703787583167248\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\":\
\ 0.021723342617052086\n },\n \"leaderboard_bbh_ruin_names\": {\n\
\ \"alias\": \" - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\"\
: 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038166\n },\n\
\ \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.4550561797752809,\n\
\ \"acc_norm_stderr,none\": 0.03743016495716991\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.496,\n \"acc_norm_stderr,none\":\
\ 0.0316851985511992\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \
\ \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\":\
\ 0.021172081336336534\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\":\
\ 0.029462657598578648\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2625838926174497,\n\
\ \"acc_norm_stderr,none\": 0.012760527194994205,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.25252525252525254,\n \"acc_norm_stderr,none\": 0.03095405547036587\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2619047619047619,\n\
\ \"acc_norm_stderr,none\": 0.01883343978951454\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.26785714285714285,\n \"acc_norm_stderr,none\"\
: 0.02094574294163546\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.19408502772643252,\n \"prompt_level_strict_acc_stderr,none\": 0.01701938055074939,\n\
\ \"inst_level_strict_acc,none\": 0.302158273381295,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.2033271719038817,\n \"prompt_level_loose_acc_stderr,none\": 0.017319718641834708,\n\
\ \"inst_level_loose_acc,none\": 0.3117505995203837,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0015105740181268882,\n \"exact_match_stderr,none\"\
: 0.0010687102182054776,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.003257328990228013,\n\
\ \"exact_match_stderr,none\": 0.003257328990228013\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \
\ \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0051813471502590676,\n \"exact_match_stderr,none\"\
: 0.0051813471502590676\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.15965757978723405,\n \"acc_stderr,none\"\
: 0.003339427744094266\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.34656084656084657,\n \"acc_norm_stderr,none\"\
: 0.016776413841165225,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.512,\n\
\ \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.2265625,\n \"acc_norm_stderr,none\"\
: 0.026214195644894838\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.304,\n \"acc_norm_stderr,none\": 0.02915021337415965\n\
\ }\n },\n \"leaderboard\": {\n \"acc_norm,none\": 0.30989752237644314,\n\
\ \"acc_norm_stderr,none\": 0.004957296149977267,\n \"inst_level_strict_acc,none\"\
: 0.302158273381295,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"prompt_level_strict_acc,none\": 0.19408502772643252,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.01701938055074939,\n \"acc,none\": 0.15965757978723405,\n \"acc_stderr,none\"\
: 0.003339427744094266,\n \"prompt_level_loose_acc,none\": 0.2033271719038817,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.017319718641834708,\n \
\ \"inst_level_loose_acc,none\": 0.3117505995203837,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"exact_match,none\": 0.0015105740181268882,\n \"exact_match_stderr,none\"\
: 0.0010687102182054776,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.31487588960249957,\n \"acc_norm_stderr,none\"\
: 0.005673246388326131,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.704,\n \"acc_norm_stderr,none\": 0.028928939388379697\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.47058823529411764,\n \"acc_norm_stderr,none\"\
: 0.03659829510813266\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.208,\n \"acc_norm_stderr,none\": 0.02572139890141637\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401222\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422255\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.324,\n \"acc_norm_stderr,none\": 0.029658294924545567\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.388,\n \"acc_norm_stderr,none\": 0.030881038748993974\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.273972602739726,\n\
\ \"acc_norm_stderr,none\": 0.03703787583167248\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038166\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4550561797752809,\n \"acc_norm_stderr,none\"\
: 0.03743016495716991\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.496,\n \"acc_norm_stderr,none\": 0.0316851985511992\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \"\
acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2625838926174497,\n\
\ \"acc_norm_stderr,none\": 0.012760527194994205,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.25252525252525254,\n\
\ \"acc_norm_stderr,none\": 0.03095405547036587\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2619047619047619,\n \"acc_norm_stderr,none\": 0.01883343978951454\n \
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.26785714285714285,\n \"acc_norm_stderr,none\"\
: 0.02094574294163546\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.19408502772643252,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01701938055074939,\n \
\ \"inst_level_strict_acc,none\": 0.302158273381295,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.2033271719038817,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.017319718641834708,\n \"inst_level_loose_acc,none\"\
: 0.3117505995203837,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0015105740181268882,\n\
\ \"exact_match_stderr,none\": 0.0010687102182054776,\n \"alias\"\
: \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.003257328990228013,\n \"exact_match_stderr,none\": 0.003257328990228013\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.0051813471502590676,\n \"exact_match_stderr,none\"\
: 0.0051813471502590676\n },\n \"leaderboard_math_precalculus_hard\": {\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.15965757978723405,\n\
\ \"acc_stderr,none\": 0.003339427744094266\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.34656084656084657,\n \"acc_norm_stderr,none\"\
: 0.016776413841165225,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2265625,\n\
\ \"acc_norm_stderr,none\": 0.026214195644894838\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.304,\n \"acc_norm_stderr,none\": 0.02915021337415965\n }\n}\n```"
repo_url: https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_date_understanding
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_navigate
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_object_counting
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_ruin_names
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_snarks
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_gpqa_diamond
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_gpqa_extended
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_gpqa_main
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_ifeval
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_ifeval_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_math_algebra_hard
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_math_geometry_hard
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_math_num_theory_hard
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_math_precalculus_hard
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_mmlu_pro
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_musr_object_placements
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T02-18-06.890327.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_musr_team_allocation
data_files:
- split: 2024_12_31T02_18_06.890327
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T02-18-06.890327.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T02-18-06.890327.jsonl'
---
# Dataset Card for Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep-details",
name="JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-12-31T02-18-06.890327](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_5e-7_3ep_0alp_0lam_1ep/results_2024-12-31T02-18-06.890327.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc_norm,none": 0.30989752237644314,
"acc_norm_stderr,none": 0.004957296149977267,
"inst_level_strict_acc,none": 0.302158273381295,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"acc,none": 0.15965757978723405,
"acc_stderr,none": 0.003339427744094266,
"prompt_level_loose_acc,none": 0.2033271719038817,
"prompt_level_loose_acc_stderr,none": 0.017319718641834708,
"inst_level_loose_acc,none": 0.3117505995203837,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.0015105740181268882,
"exact_match_stderr,none": 0.0010687102182054776,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31487588960249957,
"acc_norm_stderr,none": 0.005673246388326131,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.704,
"acc_norm_stderr,none": 0.028928939388379697
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47058823529411764,
"acc_norm_stderr,none": 0.03659829510813266
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.273972602739726,
"acc_norm_stderr,none": 0.03703787583167248
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4550561797752809,
"acc_norm_stderr,none": 0.03743016495716991
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2625838926174497,
"acc_norm_stderr,none": 0.012760527194994205,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.25252525252525254,
"acc_norm_stderr,none": 0.03095405547036587
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2619047619047619,
"acc_norm_stderr,none": 0.01883343978951454
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.26785714285714285,
"acc_norm_stderr,none": 0.02094574294163546
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"inst_level_strict_acc,none": 0.302158273381295,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2033271719038817,
"prompt_level_loose_acc_stderr,none": 0.017319718641834708,
"inst_level_loose_acc,none": 0.3117505995203837,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0015105740181268882,
"exact_match_stderr,none": 0.0010687102182054776,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15965757978723405,
"acc_stderr,none": 0.003339427744094266
},
"leaderboard_musr": {
"acc_norm,none": 0.34656084656084657,
"acc_norm_stderr,none": 0.016776413841165225,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.02915021337415965
}
},
"leaderboard": {
"acc_norm,none": 0.30989752237644314,
"acc_norm_stderr,none": 0.004957296149977267,
"inst_level_strict_acc,none": 0.302158273381295,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"acc,none": 0.15965757978723405,
"acc_stderr,none": 0.003339427744094266,
"prompt_level_loose_acc,none": 0.2033271719038817,
"prompt_level_loose_acc_stderr,none": 0.017319718641834708,
"inst_level_loose_acc,none": 0.3117505995203837,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.0015105740181268882,
"exact_match_stderr,none": 0.0010687102182054776,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31487588960249957,
"acc_norm_stderr,none": 0.005673246388326131,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.704,
"acc_norm_stderr,none": 0.028928939388379697
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47058823529411764,
"acc_norm_stderr,none": 0.03659829510813266
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.273972602739726,
"acc_norm_stderr,none": 0.03703787583167248
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4550561797752809,
"acc_norm_stderr,none": 0.03743016495716991
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2625838926174497,
"acc_norm_stderr,none": 0.012760527194994205,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.25252525252525254,
"acc_norm_stderr,none": 0.03095405547036587
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2619047619047619,
"acc_norm_stderr,none": 0.01883343978951454
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.26785714285714285,
"acc_norm_stderr,none": 0.02094574294163546
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"inst_level_strict_acc,none": 0.302158273381295,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2033271719038817,
"prompt_level_loose_acc_stderr,none": 0.017319718641834708,
"inst_level_loose_acc,none": 0.3117505995203837,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0015105740181268882,
"exact_match_stderr,none": 0.0010687102182054776,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15965757978723405,
"acc_stderr,none": 0.003339427744094266
},
"leaderboard_musr": {
"acc_norm,none": 0.34656084656084657,
"acc_norm_stderr,none": 0.016776413841165225,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.02915021337415965
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep-details | open-llm-leaderboard | "2024-12-31T02:26:24Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:23:15Z" | ---
pretty_name: Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep-details\"\
,\n\tname=\"JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-12-31T02-23-13.868188](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep/results_2024-12-31T02-23-13.868188.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc,none\": 0.15558510638297873,\n \"acc_stderr,none\"\
: 0.003304540580938003,\n \"prompt_level_loose_acc,none\": 0.20147874306839186,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.017260802262371536,\n \
\ \"inst_level_strict_acc,none\": 0.30335731414868106,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"acc_norm,none\": 0.314307951744714,\n \"acc_norm_stderr,none\"\
: 0.004962524991702922,\n \"prompt_level_strict_acc,none\": 0.1922365988909427,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01695755534321269,\n \
\ \"inst_level_loose_acc,none\": 0.31534772182254195,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.31886825203957647,\n \"acc_norm_stderr,none\"\
: 0.005669827552288653,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.724,\n\
\ \"acc_norm_stderr,none\": 0.02832853727421142\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.47058823529411764,\n \"acc_norm_stderr,none\"\
: 0.03659829510813266\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.408,\n\
\ \"acc_norm_stderr,none\": 0.031145209846548512\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.5,\n \"acc_norm_stderr,none\": 0.031686212526223896\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\"\
: \" - leaderboard_bbh_geometric_shapes\",\n \"acc_norm,none\": 0.088,\n\
\ \"acc_norm_stderr,none\": 0.017953084777052892\n },\n \
\ \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\":\
\ 0.03166998503010743\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\":\
\ 0.024960691989171963\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.148,\n \"acc_norm_stderr,none\":\
\ 0.022503547243806186\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\
,\n \"acc_norm,none\": 0.324,\n \"acc_norm_stderr,none\":\
\ 0.029658294924545567\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \
\ \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \"\
\ - leaderboard_bbh_navigate\",\n \"acc_norm,none\": 0.58,\n \
\ \"acc_norm_stderr,none\": 0.03127799950463661\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\": 0.023692813205492536\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.2808219178082192,\n \"acc_norm_stderr,none\": 0.037320694849458984\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.128,\n \
\ \"acc_norm_stderr,none\": 0.021172081336336534\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4887640449438202,\n \"acc_norm_stderr,none\"\
: 0.03757281091983857\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.52,\n \"acc_norm_stderr,none\": 0.03166085340849512\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.128,\n\
\ \"acc_norm_stderr,none\": 0.021172081336336534\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.124,\n \"acc_norm_stderr,none\": 0.020886382258673272\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\":\
\ 0.022752024491765464\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\":\
\ 0.029462657598578648\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.26929530201342283,\n\
\ \"acc_norm_stderr,none\": 0.01286000585623481,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2474747474747475,\n \"acc_norm_stderr,none\": 0.030746300742124484\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2802197802197802,\n\
\ \"acc_norm_stderr,none\": 0.019237609141793073\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.265625,\n \"acc_norm_stderr,none\"\
: 0.02089005840079951\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.1922365988909427,\n \"prompt_level_strict_acc_stderr,none\": 0.01695755534321269,\n\
\ \"inst_level_strict_acc,none\": 0.30335731414868106,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.20147874306839186,\n \"prompt_level_loose_acc_stderr,none\": 0.017260802262371536,\n\
\ \"inst_level_loose_acc,none\": 0.31534772182254195,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"alias\": \" - leaderboard_math_hard\"\n },\n \
\ \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\"\
: \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\":\
\ 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\"\
: 0.15558510638297873,\n \"acc_stderr,none\": 0.003304540580938003\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.3505291005291005,\n\
\ \"acc_norm_stderr,none\": 0.01681511154731285,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \
\ \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\"\
: \" - leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2265625,\n\
\ \"acc_norm_stderr,none\": 0.026214195644894838\n },\n \
\ \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.312,\n \"acc_norm_stderr,none\":\
\ 0.02936106757521985\n }\n },\n \"leaderboard\": {\n \"acc,none\"\
: 0.15558510638297873,\n \"acc_stderr,none\": 0.003304540580938003,\n \
\ \"prompt_level_loose_acc,none\": 0.20147874306839186,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.017260802262371536,\n \"inst_level_strict_acc,none\": 0.30335731414868106,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"acc_norm,none\": 0.314307951744714,\n\
\ \"acc_norm_stderr,none\": 0.004962524991702922,\n \"prompt_level_strict_acc,none\"\
: 0.1922365988909427,\n \"prompt_level_strict_acc_stderr,none\": 0.01695755534321269,\n\
\ \"inst_level_loose_acc,none\": 0.31534772182254195,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.31886825203957647,\n \"acc_norm_stderr,none\"\
: 0.005669827552288653,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.724,\n \"acc_norm_stderr,none\": 0.02832853727421142\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.47058823529411764,\n \"acc_norm_stderr,none\"\
: 0.03659829510813266\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.408,\n \"acc_norm_stderr,none\": 0.031145209846548512\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.5,\n \"acc_norm_stderr,none\": 0.031686212526223896\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.148,\n \"acc_norm_stderr,none\": 0.022503547243806186\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.324,\n \"acc_norm_stderr,none\": 0.029658294924545567\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\": 0.023692813205492536\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.2808219178082192,\n\
\ \"acc_norm_stderr,none\": 0.037320694849458984\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4887640449438202,\n \"acc_norm_stderr,none\"\
: 0.03757281091983857\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.52,\n \"acc_norm_stderr,none\": 0.03166085340849512\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \"\
acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.124,\n \"acc_norm_stderr,none\": 0.020886382258673272\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.26929530201342283,\n\
\ \"acc_norm_stderr,none\": 0.01286000585623481,\n \"alias\": \" -\
\ leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"alias\"\
: \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2474747474747475,\n\
\ \"acc_norm_stderr,none\": 0.030746300742124484\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2802197802197802,\n \"acc_norm_stderr,none\": 0.019237609141793073\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.265625,\n \"acc_norm_stderr,none\": 0.02089005840079951\n\
\ },\n \"leaderboard_ifeval\": {\n \"alias\": \" - leaderboard_ifeval\"\
,\n \"prompt_level_strict_acc,none\": 0.1922365988909427,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.01695755534321269,\n \"inst_level_strict_acc,none\": 0.30335731414868106,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.20147874306839186,\n \"prompt_level_loose_acc_stderr,none\": 0.017260802262371536,\n\
\ \"inst_level_loose_acc,none\": 0.31534772182254195,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\"\n },\n \"leaderboard_math_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_hard\"\
\n },\n \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\": {\n\
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.15558510638297873,\n\
\ \"acc_stderr,none\": 0.003304540580938003\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.3505291005291005,\n \"acc_norm_stderr,none\"\
: 0.01681511154731285,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2265625,\n\
\ \"acc_norm_stderr,none\": 0.026214195644894838\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.312,\n \"acc_norm_stderr,none\": 0.02936106757521985\n }\n}\n```"
repo_url: https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_date_understanding
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_navigate
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_object_counting
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_ruin_names
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_snarks
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_gpqa_diamond
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_gpqa_extended
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_gpqa_main
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_ifeval
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_ifeval_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_math_algebra_hard
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_math_geometry_hard
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_math_num_theory_hard
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_math_precalculus_hard
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_mmlu_pro
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_musr_object_placements
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T02-23-13.868188.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_musr_team_allocation
data_files:
- split: 2024_12_31T02_23_13.868188
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T02-23-13.868188.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T02-23-13.868188.jsonl'
---
# Dataset Card for Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep-details",
name="JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-12-31T02-23-13.868188](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_2ep/results_2024-12-31T02-23-13.868188.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc,none": 0.15558510638297873,
"acc_stderr,none": 0.003304540580938003,
"prompt_level_loose_acc,none": 0.20147874306839186,
"prompt_level_loose_acc_stderr,none": 0.017260802262371536,
"inst_level_strict_acc,none": 0.30335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"acc_norm,none": 0.314307951744714,
"acc_norm_stderr,none": 0.004962524991702922,
"prompt_level_strict_acc,none": 0.1922365988909427,
"prompt_level_strict_acc_stderr,none": 0.01695755534321269,
"inst_level_loose_acc,none": 0.31534772182254195,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31886825203957647,
"acc_norm_stderr,none": 0.005669827552288653,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.724,
"acc_norm_stderr,none": 0.02832853727421142
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47058823529411764,
"acc_norm_stderr,none": 0.03659829510813266
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.408,
"acc_norm_stderr,none": 0.031145209846548512
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.148,
"acc_norm_stderr,none": 0.022503547243806186
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2808219178082192,
"acc_norm_stderr,none": 0.037320694849458984
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4887640449438202,
"acc_norm_stderr,none": 0.03757281091983857
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.52,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.124,
"acc_norm_stderr,none": 0.020886382258673272
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.26929530201342283,
"acc_norm_stderr,none": 0.01286000585623481,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2474747474747475,
"acc_norm_stderr,none": 0.030746300742124484
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2802197802197802,
"acc_norm_stderr,none": 0.019237609141793073
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.265625,
"acc_norm_stderr,none": 0.02089005840079951
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.1922365988909427,
"prompt_level_strict_acc_stderr,none": 0.01695755534321269,
"inst_level_strict_acc,none": 0.30335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.20147874306839186,
"prompt_level_loose_acc_stderr,none": 0.017260802262371536,
"inst_level_loose_acc,none": 0.31534772182254195,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15558510638297873,
"acc_stderr,none": 0.003304540580938003
},
"leaderboard_musr": {
"acc_norm,none": 0.3505291005291005,
"acc_norm_stderr,none": 0.01681511154731285,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
}
},
"leaderboard": {
"acc,none": 0.15558510638297873,
"acc_stderr,none": 0.003304540580938003,
"prompt_level_loose_acc,none": 0.20147874306839186,
"prompt_level_loose_acc_stderr,none": 0.017260802262371536,
"inst_level_strict_acc,none": 0.30335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"acc_norm,none": 0.314307951744714,
"acc_norm_stderr,none": 0.004962524991702922,
"prompt_level_strict_acc,none": 0.1922365988909427,
"prompt_level_strict_acc_stderr,none": 0.01695755534321269,
"inst_level_loose_acc,none": 0.31534772182254195,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31886825203957647,
"acc_norm_stderr,none": 0.005669827552288653,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.724,
"acc_norm_stderr,none": 0.02832853727421142
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47058823529411764,
"acc_norm_stderr,none": 0.03659829510813266
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.408,
"acc_norm_stderr,none": 0.031145209846548512
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.148,
"acc_norm_stderr,none": 0.022503547243806186
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2808219178082192,
"acc_norm_stderr,none": 0.037320694849458984
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4887640449438202,
"acc_norm_stderr,none": 0.03757281091983857
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.52,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.124,
"acc_norm_stderr,none": 0.020886382258673272
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.26929530201342283,
"acc_norm_stderr,none": 0.01286000585623481,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2474747474747475,
"acc_norm_stderr,none": 0.030746300742124484
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2802197802197802,
"acc_norm_stderr,none": 0.019237609141793073
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.265625,
"acc_norm_stderr,none": 0.02089005840079951
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.1922365988909427,
"prompt_level_strict_acc_stderr,none": 0.01695755534321269,
"inst_level_strict_acc,none": 0.30335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.20147874306839186,
"prompt_level_loose_acc_stderr,none": 0.017260802262371536,
"inst_level_loose_acc,none": 0.31534772182254195,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15558510638297873,
"acc_stderr,none": 0.003304540580938003
},
"leaderboard_musr": {
"acc_norm,none": 0.3505291005291005,
"acc_norm_stderr,none": 0.01681511154731285,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep-details | open-llm-leaderboard | "2024-12-31T02:27:20Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T02:24:10Z" | ---
pretty_name: Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep-details\"\
,\n\tname=\"JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-12-31T02-24-10.109501](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep/results_2024-12-31T02-24-10.109501.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"prompt_level_loose_acc,none\": 0.20702402957486138,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.017435865587996542,\n \"\
inst_level_loose_acc,none\": 0.3237410071942446,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"acc_norm,none\": 0.31210273706057856,\n \"acc_norm_stderr,none\"\
: 0.004969675854524385,\n \"prompt_level_strict_acc,none\": 0.19038817005545286,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.016895129407634615,\n \
\ \"acc,none\": 0.15915890957446807,\n \"acc_stderr,none\":\
\ 0.00333519766069695,\n \"inst_level_strict_acc,none\": 0.30815347721822545,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.3166116993577504,\n \"acc_norm_stderr,none\": 0.0056817406499362405,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.704,\n \"acc_norm_stderr,none\": 0.028928939388379697\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.46524064171123,\n\
\ \"acc_norm_stderr,none\": 0.036573080985189216\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\":\
\ 0.025537121574548162\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.416,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.492,\n\
\ \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.084,\n \"acc_norm_stderr,none\":\
\ 0.017578738526776348\n },\n \"leaderboard_bbh_hyperbaton\": {\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n },\n\
\ \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.188,\n \"acc_norm_stderr,none\": 0.024760377727750513\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.3,\n \"acc_norm_stderr,none\": 0.029040893477575783\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.4,\n \"acc_norm_stderr,none\": 0.031046021028253316\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.176,\n\
\ \"acc_norm_stderr,none\": 0.024133497525457123\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.273972602739726,\n \"acc_norm_stderr,none\"\
: 0.03703787583167248\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.132,\n \
\ \"acc_norm_stderr,none\": 0.021450980824038166\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4550561797752809,\n \"acc_norm_stderr,none\"\
: 0.03743016495716991\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.14,\n\
\ \"acc_norm_stderr,none\": 0.021989409645240245\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.124,\n \"acc_norm_stderr,none\": 0.020886382258673272\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\":\
\ 0.022249407735450245\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.312,\n \"acc_norm_stderr,none\":\
\ 0.02936106757521985\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.52,\n \"acc_norm_stderr,none\": 0.03166085340849512\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2684563758389262,\n\
\ \"acc_norm_stderr,none\": 0.012851083815706302,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.25757575757575757,\n \"acc_norm_stderr,none\": 0.031156269519646826\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.27106227106227104,\n\
\ \"acc_norm_stderr,none\": 0.01904063815660353\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2700892857142857,\n \"acc_norm_stderr,none\"\
: 0.021000749078822437\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.19038817005545286,\n \"prompt_level_strict_acc_stderr,none\": 0.016895129407634615,\n\
\ \"inst_level_strict_acc,none\": 0.30815347721822545,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.20702402957486138,\n \"prompt_level_loose_acc_stderr,none\": 0.017435865587996542,\n\
\ \"inst_level_loose_acc,none\": 0.3237410071942446,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"alias\": \" - leaderboard_math_hard\"\n },\n \
\ \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\"\
: \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\":\
\ 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\"\
: 0.15915890957446807,\n \"acc_stderr,none\": 0.00333519766069695\n \
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.34656084656084657,\n\
\ \"acc_norm_stderr,none\": 0.016818245296711158,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \
\ \"acc_norm,none\": 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\"\
: \" - leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2265625,\n\
\ \"acc_norm_stderr,none\": 0.026214195644894838\n },\n \
\ \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.312,\n \"acc_norm_stderr,none\":\
\ 0.02936106757521985\n }\n },\n \"leaderboard\": {\n \"prompt_level_loose_acc,none\"\
: 0.20702402957486138,\n \"prompt_level_loose_acc_stderr,none\": 0.017435865587996542,\n\
\ \"inst_level_loose_acc,none\": 0.3237410071942446,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"acc_norm,none\": 0.31210273706057856,\n \"acc_norm_stderr,none\"\
: 0.004969675854524385,\n \"prompt_level_strict_acc,none\": 0.19038817005545286,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.016895129407634615,\n \
\ \"acc,none\": 0.15915890957446807,\n \"acc_stderr,none\": 0.00333519766069695,\n\
\ \"inst_level_strict_acc,none\": 0.30815347721822545,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.3166116993577504,\n \"acc_norm_stderr,none\"\
: 0.0056817406499362405,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.704,\n \"acc_norm_stderr,none\": 0.028928939388379697\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.46524064171123,\n \"acc_norm_stderr,none\"\
: 0.036573080985189216\n },\n \"leaderboard_bbh_date_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.416,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.084,\n \"acc_norm_stderr,none\": 0.017578738526776348\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.188,\n \"acc_norm_stderr,none\": 0.024760377727750513\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.3,\n \"acc_norm_stderr,none\": 0.029040893477575783\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"\
acc_norm,none\": 0.4,\n \"acc_norm_stderr,none\": 0.031046021028253316\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.176,\n \"acc_norm_stderr,none\": 0.024133497525457123\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.273972602739726,\n\
\ \"acc_norm_stderr,none\": 0.03703787583167248\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038166\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4550561797752809,\n \"acc_norm_stderr,none\"\
: 0.03743016495716991\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.124,\n \"acc_norm_stderr,none\": 0.020886382258673272\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\": 0.022249407735450245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.312,\n \"acc_norm_stderr,none\": 0.02936106757521985\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.52,\n \"acc_norm_stderr,none\": 0.03166085340849512\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2684563758389262,\n\
\ \"acc_norm_stderr,none\": 0.012851083815706302,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.25757575757575757,\n\
\ \"acc_norm_stderr,none\": 0.031156269519646826\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.27106227106227104,\n \"acc_norm_stderr,none\": 0.01904063815660353\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2700892857142857,\n \"acc_norm_stderr,none\"\
: 0.021000749078822437\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.19038817005545286,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.016895129407634615,\n \
\ \"inst_level_strict_acc,none\": 0.30815347721822545,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.20702402957486138,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.017435865587996542,\n \"inst_level_loose_acc,none\"\
: 0.3237410071942446,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_hard\"\
\n },\n \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\": {\n\
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.15915890957446807,\n\
\ \"acc_stderr,none\": 0.00333519766069695\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.34656084656084657,\n \"acc_norm_stderr,none\"\
: 0.016818245296711158,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2265625,\n\
\ \"acc_norm_stderr,none\": 0.026214195644894838\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.312,\n \"acc_norm_stderr,none\": 0.02936106757521985\n }\n}\n```"
repo_url: https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_date_understanding
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_navigate
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_object_counting
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_ruin_names
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_snarks
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_gpqa_diamond
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_gpqa_extended
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_gpqa_main
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_ifeval
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_ifeval_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_math_algebra_hard
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_math_geometry_hard
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_math_num_theory_hard
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_math_precalculus_hard
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_mmlu_pro
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_musr_object_placements
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T02-24-10.109501.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_musr_team_allocation
data_files:
- split: 2024_12_31T02_24_10.109501
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T02-24-10.109501.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T02-24-10.109501.jsonl'
---
# Dataset Card for Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep-details",
name="JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-12-31T02-24-10.109501](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-5ep-MDPO_7e-7_3ep_0alp_0lam_1ep/results_2024-12-31T02-24-10.109501.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"prompt_level_loose_acc,none": 0.20702402957486138,
"prompt_level_loose_acc_stderr,none": 0.017435865587996542,
"inst_level_loose_acc,none": 0.3237410071942446,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"acc_norm,none": 0.31210273706057856,
"acc_norm_stderr,none": 0.004969675854524385,
"prompt_level_strict_acc,none": 0.19038817005545286,
"prompt_level_strict_acc_stderr,none": 0.016895129407634615,
"acc,none": 0.15915890957446807,
"acc_stderr,none": 0.00333519766069695,
"inst_level_strict_acc,none": 0.30815347721822545,
"inst_level_strict_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3166116993577504,
"acc_norm_stderr,none": 0.0056817406499362405,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.704,
"acc_norm_stderr,none": 0.028928939388379697
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.46524064171123,
"acc_norm_stderr,none": 0.036573080985189216
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.416,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.084,
"acc_norm_stderr,none": 0.017578738526776348
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.024760377727750513
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.3,
"acc_norm_stderr,none": 0.029040893477575783
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.4,
"acc_norm_stderr,none": 0.031046021028253316
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.176,
"acc_norm_stderr,none": 0.024133497525457123
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.273972602739726,
"acc_norm_stderr,none": 0.03703787583167248
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4550561797752809,
"acc_norm_stderr,none": 0.03743016495716991
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.124,
"acc_norm_stderr,none": 0.020886382258673272
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.52,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2684563758389262,
"acc_norm_stderr,none": 0.012851083815706302,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.25757575757575757,
"acc_norm_stderr,none": 0.031156269519646826
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.27106227106227104,
"acc_norm_stderr,none": 0.01904063815660353
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2700892857142857,
"acc_norm_stderr,none": 0.021000749078822437
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.19038817005545286,
"prompt_level_strict_acc_stderr,none": 0.016895129407634615,
"inst_level_strict_acc,none": 0.30815347721822545,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.20702402957486138,
"prompt_level_loose_acc_stderr,none": 0.017435865587996542,
"inst_level_loose_acc,none": 0.3237410071942446,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15915890957446807,
"acc_stderr,none": 0.00333519766069695
},
"leaderboard_musr": {
"acc_norm,none": 0.34656084656084657,
"acc_norm_stderr,none": 0.016818245296711158,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
}
},
"leaderboard": {
"prompt_level_loose_acc,none": 0.20702402957486138,
"prompt_level_loose_acc_stderr,none": 0.017435865587996542,
"inst_level_loose_acc,none": 0.3237410071942446,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"acc_norm,none": 0.31210273706057856,
"acc_norm_stderr,none": 0.004969675854524385,
"prompt_level_strict_acc,none": 0.19038817005545286,
"prompt_level_strict_acc_stderr,none": 0.016895129407634615,
"acc,none": 0.15915890957446807,
"acc_stderr,none": 0.00333519766069695,
"inst_level_strict_acc,none": 0.30815347721822545,
"inst_level_strict_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3166116993577504,
"acc_norm_stderr,none": 0.0056817406499362405,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.704,
"acc_norm_stderr,none": 0.028928939388379697
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.46524064171123,
"acc_norm_stderr,none": 0.036573080985189216
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.416,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.084,
"acc_norm_stderr,none": 0.017578738526776348
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.024760377727750513
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.3,
"acc_norm_stderr,none": 0.029040893477575783
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.4,
"acc_norm_stderr,none": 0.031046021028253316
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.176,
"acc_norm_stderr,none": 0.024133497525457123
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.273972602739726,
"acc_norm_stderr,none": 0.03703787583167248
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4550561797752809,
"acc_norm_stderr,none": 0.03743016495716991
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.124,
"acc_norm_stderr,none": 0.020886382258673272
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.52,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2684563758389262,
"acc_norm_stderr,none": 0.012851083815706302,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.25757575757575757,
"acc_norm_stderr,none": 0.031156269519646826
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.27106227106227104,
"acc_norm_stderr,none": 0.01904063815660353
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2700892857142857,
"acc_norm_stderr,none": 0.021000749078822437
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.19038817005545286,
"prompt_level_strict_acc_stderr,none": 0.016895129407634615,
"inst_level_strict_acc,none": 0.30815347721822545,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.20702402957486138,
"prompt_level_loose_acc_stderr,none": 0.017435865587996542,
"inst_level_loose_acc,none": 0.3237410071942446,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15915890957446807,
"acc_stderr,none": 0.00333519766069695
},
"leaderboard_musr": {
"acc_norm,none": 0.34656084656084657,
"acc_norm_stderr,none": 0.016818245296711158,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
HY-H/BrainLLM | HY-H | "2025-01-05T06:31:50Z" | 5 | 0 | [
"license:mit",
"region:us"
] | null | "2024-12-31T02:30:23Z" | ---
license: mit
---
|
daqc/medicina-qa-binarized-dpo-orpo-es | daqc | "2024-12-31T04:29:27Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T04:29:25Z" | ---
dataset_info:
features:
- name: question
dtype: string
- name: chosen
list:
- name: content
dtype: string
- name: role
dtype: string
- name: rejected
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 3992670.0
num_examples: 9000
- name: test
num_bytes: 443630.0
num_examples: 1000
download_size: 1811046
dataset_size: 4436300.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
chiyuanhsiao/llama-questions-text_original | chiyuanhsiao | "2024-12-31T04:38:02Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T04:37:56Z" | ---
dataset_info:
features:
- name: question
dtype: string
- name: answer
dtype: string
- name: audio
dtype: audio
- name: question_unit
sequence: int64
- name: response_interleaf
dtype: string
- name: response_text
dtype: string
- name: response_speech
dtype: audio
splits:
- name: test
num_bytes: 35366195.0
num_examples: 300
download_size: 24357493
dataset_size: 35366195.0
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
---
|
chiyuanhsiao/spoken-web-questions-text_original | chiyuanhsiao | "2024-12-31T04:38:51Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T04:38:38Z" | ---
dataset_info:
features:
- name: url
dtype: string
- name: question
dtype: string
- name: answers
sequence: string
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: question_unit
sequence: int64
- name: response_interleaf
dtype: string
- name: response_text
dtype: string
- name: response_speech
dtype: audio
splits:
- name: test
num_bytes: 196747862.424
num_examples: 2032
download_size: 148291433
dataset_size: 196747862.424
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
---
|
DT4LM/t5v1-1base_rte_pair_textbugger | DT4LM | "2024-12-31T04:46:27Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T04:46:12Z" | ---
dataset_info:
features:
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 57239
num_examples: 181
download_size: 44625
dataset_size: 57239
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/t5v1-1base_rte_pair_textbugger_original | DT4LM | "2024-12-31T04:46:32Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T04:46:27Z" | ---
dataset_info:
features:
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 56820
num_examples: 181
download_size: 45785
dataset_size: 56820
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/gpt2_mr_pair_textbugger | DT4LM | "2024-12-31T08:22:03Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T04:53:08Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 90258
num_examples: 666
download_size: 65475
dataset_size: 90258
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/debertav3base_mr_pair_textbugger | DT4LM | "2024-12-31T05:00:48Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T04:57:29Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 72933
num_examples: 534
download_size: 53714
dataset_size: 72933
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/debertav3base_mr_pair_textbugger_original | DT4LM | "2024-12-31T05:02:59Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T05:00:49Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 71832
num_examples: 534
download_size: 50521
dataset_size: 71832
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Tien09/pair_similarity_new | Tien09 | "2024-12-31T05:28:31Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T05:28:28Z" | ---
dataset_info:
features:
- name: effect_text
dtype: string
- name: score
dtype: float64
- name: effect_text2
dtype: string
splits:
- name: train
num_bytes: 4755801.047816236
num_examples: 8959
- name: test
num_bytes: 1019213.9760918822
num_examples: 1920
- name: validation
num_bytes: 1019213.9760918822
num_examples: 1920
download_size: 2732029
dataset_size: 6794229.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
- split: validation
path: data/validation-*
---
|
chiyuanhsiao/trivia_qa-audio-text_original | chiyuanhsiao | "2024-12-31T05:58:21Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T05:58:04Z" | ---
dataset_info:
features:
- name: question
dtype: string
- name: question_id
dtype: string
- name: question_source
dtype: string
- name: entity_pages
sequence:
- name: doc_source
dtype: string
- name: filename
dtype: string
- name: title
dtype: string
- name: wiki_context
dtype: string
- name: search_results
sequence:
- name: description
dtype: string
- name: filename
dtype: string
- name: rank
dtype: int32
- name: title
dtype: string
- name: url
dtype: string
- name: search_context
dtype: string
- name: answer
struct:
- name: aliases
sequence: string
- name: normalized_aliases
sequence: string
- name: matched_wiki_entity_name
dtype: string
- name: normalized_matched_wiki_entity_name
dtype: string
- name: normalized_value
dtype: string
- name: type
dtype: string
- name: value
dtype: string
- name: question_audio
dtype:
audio:
sampling_rate: 16000
- name: question_unit
sequence: int64
- name: response_interleaf
dtype: string
- name: response_text
dtype: string
- name: response_speech
dtype: audio
splits:
- name: validation
num_bytes: 215045302.0
num_examples: 1000
download_size: 154416684
dataset_size: 215045302.0
configs:
- config_name: default
data_files:
- split: validation
path: data/validation-*
---
|
polyglots/DPO-Sinhala-Classification | polyglots | "2024-12-31T06:07:28Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:07:27Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: chosen
dtype: string
- name: rejected
dtype: string
splits:
- name: train
num_bytes: 9442932
num_examples: 14476
download_size: 1189606
dataset_size: 9442932
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
zd21/ReST-MCTS_Llama3-8b-Instruct_ReST-EM-CoT_1st | zd21 | "2024-12-31T06:14:02Z" | 5 | 0 | [
"license:cc-by-4.0",
"size_categories:10K<n<100K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:12:37Z" | ---
license: cc-by-4.0
---
|
zd21/ReST-MCTS_Mistral-MetaMATH-7b-Instruct_ReST-MCTS_1st | zd21 | "2024-12-31T06:18:46Z" | 5 | 0 | [
"license:cc-by-4.0",
"size_categories:10K<n<100K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:18:26Z" | ---
license: cc-by-4.0
---
|
zd21/ReST-MCTS_SciGLM-6B_ReST-EM-CoT_1st | zd21 | "2024-12-31T06:21:38Z" | 5 | 0 | [
"license:cc-by-4.0",
"size_categories:10K<n<100K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:21:15Z" | ---
license: cc-by-4.0
---
|
zd21/ReST-MCTS_SciGLM-6B_ReST-MCTS_Policy_1st | zd21 | "2024-12-31T06:22:03Z" | 5 | 0 | [
"license:cc-by-4.0",
"size_categories:10K<n<100K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:21:43Z" | ---
license: cc-by-4.0
---
|
herpaderpapotato/pose_vrlens_nsfw_single_class | herpaderpapotato | "2025-01-01T11:02:10Z" | 5 | 1 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us",
"not-for-all-audiences"
] | null | "2024-12-31T06:43:14Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: objects
struct:
- name: categories
sequence: int64
- name: bbox
sequence:
sequence: string
- name: keypoints
sequence:
sequence:
sequence: string
splits:
- name: train
num_bytes: 277956991
num_examples: 619
download_size: 274837413
dataset_size: 277956991
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
tags:
- not-for-all-audiences
viewer: false
--- |
haorandai/Dec30_PGD_Banana_UF_Epsilon0.05_10samples_5constraints | haorandai | "2024-12-31T06:43:29Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:43:28Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1612353.0
num_examples: 15
download_size: 1613957
dataset_size: 1612353.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/gpt2_rte_pair_textbugger | DT4LM | "2024-12-31T06:46:50Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:46:46Z" | ---
dataset_info:
features:
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 73567
num_examples: 223
download_size: 56563
dataset_size: 73567
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/gpt2_rte_pair_textbugger_original | DT4LM | "2024-12-31T06:46:54Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:46:51Z" | ---
dataset_info:
features:
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 73009
num_examples: 223
download_size: 54176
dataset_size: 73009
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_PGD_Banana_Orange_Epsilon0.05_10samples_5constraints | haorandai | "2024-12-31T06:47:08Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:47:06Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1153972.0
num_examples: 15
download_size: 1155529
dataset_size: 1153972.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
zd21/ReST-MCTS_Mistral-MetaMATH-7b-Instruct_ReST-MCTS_2nd | zd21 | "2024-12-31T06:51:53Z" | 5 | 0 | [
"license:cc-by-4.0",
"size_categories:10K<n<100K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:51:34Z" | ---
license: cc-by-4.0
---
|
zd21/ReST-MCTS_SciGLM-6B_ReST-EM-CoT_2nd | zd21 | "2024-12-31T06:53:26Z" | 5 | 0 | [
"license:cc-by-4.0",
"size_categories:10K<n<100K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:53:03Z" | ---
license: cc-by-4.0
---
|
haorandai/Dec30_PGD_Bicycle_Orange_Epsilon0.05_10samples_5constraints | haorandai | "2024-12-31T06:54:07Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:54:05Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1252773.0
num_examples: 15
download_size: 1254339
dataset_size: 1252773.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
zd21/ReST-MCTS_SciGLM-6B_ReST-MCTS_Policy_2nd | zd21 | "2024-12-31T06:54:33Z" | 5 | 0 | [
"license:cc-by-4.0",
"size_categories:10K<n<100K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:54:13Z" | ---
license: cc-by-4.0
---
|
DT4LM/debertav3base_sst2_pair_textbugger | DT4LM | "2024-12-31T06:55:46Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:54:41Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 26857
num_examples: 283
download_size: 20801
dataset_size: 26857
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
zd21/ReST-MCTS_SciGLM-6B_Self-Rewarding-DPO_2nd | zd21 | "2024-12-31T06:55:16Z" | 5 | 0 | [
"license:cc-by-4.0",
"size_categories:n<1K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:54:59Z" | ---
license: cc-by-4.0
---
|
DT4LM/debertav3base_sst2_pair_textbugger_original | DT4LM | "2024-12-31T06:58:59Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:55:46Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 26380
num_examples: 283
download_size: 19446
dataset_size: 26380
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_PGD_Mice_UF_Epsilon0.05_10samples_5constraints | haorandai | "2024-12-31T06:59:12Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T06:59:10Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1645186.0
num_examples: 15
download_size: 1646789
dataset_size: 1645186.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_PGD_Mice_Orange_Epsilon0.05_10samples_5constraints | haorandai | "2024-12-31T07:01:13Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:01:11Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1205479.0
num_examples: 15
download_size: 1207069
dataset_size: 1205479.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Random_Banana_UF_Epsilon0.05_10samples_5constraints | haorandai | "2024-12-31T07:03:31Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:03:29Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1573031.0
num_examples: 15
download_size: 1574499
dataset_size: 1573031.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Random_Bicycle_UF_Epsilon0.05_10samples_5constraints | haorandai | "2024-12-31T07:05:10Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:05:08Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1592717.0
num_examples: 15
download_size: 1594188
dataset_size: 1592717.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Random_Mice_UF_Epsilon0.05_10samples_5constraints | haorandai | "2024-12-31T07:06:34Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:06:32Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1636773.0
num_examples: 15
download_size: 1638256
dataset_size: 1636773.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Random_Banana_Orange_Epsilon0.05_10samples_5constraints | haorandai | "2024-12-31T07:08:01Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:07:59Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1061128.0
num_examples: 15
download_size: 1062588
dataset_size: 1061128.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Random_Bicycle_Orange_Epsilon0.05_10samples_5constraints | haorandai | "2024-12-31T07:09:35Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:09:33Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1187452.0
num_examples: 15
download_size: 1188909
dataset_size: 1187452.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Random_Mice_Orange_Epsilon0.05_10samples_5constraints | haorandai | "2024-12-31T07:11:13Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:11:12Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1191159.0
num_examples: 15
download_size: 1192608
dataset_size: 1191159.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Clean_Banana_UF_10samples_5constraints | haorandai | "2024-12-31T07:15:13Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:15:11Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1489925.0
num_examples: 15
download_size: 478994
dataset_size: 1489925.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Clean_Bicycle_UF_10samples_5constraints | haorandai | "2024-12-31T07:17:24Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:17:22Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1635746.0
num_examples: 15
download_size: 613972
dataset_size: 1635746.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Clean_Mice_UF_10samples_5constraints | haorandai | "2024-12-31T07:18:52Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:18:51Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 1518964.0
num_examples: 15
download_size: 537252
dataset_size: 1518964.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Clean_Bicycle_Orange_10samples_5constraints | haorandai | "2024-12-31T07:24:31Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:24:30Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 526159.0
num_examples: 15
download_size: 523511
dataset_size: 526159.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Clean_Banana_Orange_10samples_5constraints | haorandai | "2024-12-31T07:26:23Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:26:22Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 413011.0
num_examples: 15
download_size: 410375
dataset_size: 413011.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
haorandai/Dec30_Clean_Mice_Orange_10samples_5constraints | haorandai | "2024-12-31T07:27:51Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T07:27:50Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 498932.0
num_examples: 15
download_size: 496295
dataset_size: 498932.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
CodeEval-Pro/bigcodebench-lite-pro | CodeEval-Pro | "2024-12-31T07:49:06Z" | 5 | 1 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"arxiv:2412.21199",
"region:us"
] | null | "2024-12-31T07:42:50Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: raw_solution
dtype: string
- name: test_code
dtype: string
- name: raw_problem
dtype: string
- name: new_solution
dtype: string
- name: new_problem
dtype: string
splits:
- name: train
num_bytes: 178710
num_examples: 57
download_size: 103229
dataset_size: 178710
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
Evaluation dataset for umanEval Pro and MBPP Pro: Evaluating Large Language Models on Self-invoking Code Generation Task (arxiv.org/abs/2412.21199). |
chiyuanhsiao/trivia_qa-audio-text-score | chiyuanhsiao | "2024-12-31T08:01:26Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:01:07Z" | ---
dataset_info:
features:
- name: question
dtype: string
- name: question_id
dtype: string
- name: question_source
dtype: string
- name: entity_pages
sequence:
- name: doc_source
dtype: string
- name: filename
dtype: string
- name: title
dtype: string
- name: wiki_context
dtype: string
- name: search_results
sequence:
- name: description
dtype: string
- name: filename
dtype: string
- name: rank
dtype: int32
- name: title
dtype: string
- name: url
dtype: string
- name: search_context
dtype: string
- name: answer
struct:
- name: aliases
sequence: string
- name: normalized_aliases
sequence: string
- name: matched_wiki_entity_name
dtype: string
- name: normalized_matched_wiki_entity_name
dtype: string
- name: normalized_value
dtype: string
- name: type
dtype: string
- name: value
dtype: string
- name: question_audio
dtype:
audio:
sampling_rate: 16000
- name: question_unit
sequence: int64
- name: response_interleaf
dtype: string
- name: response_text
dtype: string
- name: response_speech
dtype: audio
- name: speech_score
dtype: int64
- name: text_score
dtype: int64
splits:
- name: validation
num_bytes: 232722626.0
num_examples: 1000
download_size: 155444570
dataset_size: 232722626.0
configs:
- config_name: default
data_files:
- split: validation
path: data/validation-*
---
|
chiyuanhsiao/trivia_qa-audio-text_original-score | chiyuanhsiao | "2024-12-31T08:01:53Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:01:41Z" | ---
dataset_info:
features:
- name: question
dtype: string
- name: question_id
dtype: string
- name: question_source
dtype: string
- name: entity_pages
sequence:
- name: doc_source
dtype: string
- name: filename
dtype: string
- name: title
dtype: string
- name: wiki_context
dtype: string
- name: search_results
sequence:
- name: description
dtype: string
- name: filename
dtype: string
- name: rank
dtype: int32
- name: title
dtype: string
- name: url
dtype: string
- name: search_context
dtype: string
- name: answer
struct:
- name: aliases
sequence: string
- name: normalized_aliases
sequence: string
- name: matched_wiki_entity_name
dtype: string
- name: normalized_matched_wiki_entity_name
dtype: string
- name: normalized_value
dtype: string
- name: type
dtype: string
- name: value
dtype: string
- name: question_audio
dtype:
audio:
sampling_rate: 16000
- name: question_unit
sequence: int64
- name: response_interleaf
dtype: string
- name: response_text
dtype: string
- name: response_speech
dtype: audio
- name: speech_score
dtype: int64
- name: text_score
dtype: int64
splits:
- name: validation
num_bytes: 215061302.0
num_examples: 1000
download_size: 154423750
dataset_size: 215061302.0
configs:
- config_name: default
data_files:
- split: validation
path: data/validation-*
---
|
open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam-details | open-llm-leaderboard | "2024-12-31T08:23:31Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:20:06Z" | ---
pretty_name: Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam-details\"\
,\n\tname=\"JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-12-31T08-20-06.028753](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam/results_2024-12-31T08-20-06.028753.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_loose_acc,none\": 0.3213429256594724,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\",\n \"acc,none\": 0.15616688829787234,\n\
\ \"acc_stderr,none\": 0.003309572479029828,\n \"acc_norm,none\"\
: 0.30924892982228563,\n \"acc_norm_stderr,none\": 0.004931829515718916,\n\
\ \"inst_level_strict_acc,none\": 0.3105515587529976,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"prompt_level_strict_acc,none\"\
: 0.20147874306839186,\n \"prompt_level_strict_acc_stderr,none\": 0.017260802262371536,\n\
\ \"prompt_level_loose_acc,none\": 0.2144177449168207,\n \"\
prompt_level_loose_acc_stderr,none\": 0.017661570312173906,\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.313660822773824,\n \"acc_norm_stderr,none\": 0.005625721472863487,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.74,\n \"acc_norm_stderr,none\": 0.027797315752644335\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.47058823529411764,\n\
\ \"acc_norm_stderr,none\": 0.03659829510813266\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.228,\n \"acc_norm_stderr,none\":\
\ 0.026587432487268498\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.46,\n\
\ \"acc_norm_stderr,none\": 0.031584653891499004\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\":\
\ 0.017953084777052892\n },\n \"leaderboard_bbh_hyperbaton\": {\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n },\n\
\ \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.196,\n \"acc_norm_stderr,none\": 0.025156857313255926\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.352,\n \"acc_norm_stderr,none\": 0.030266288057359866\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.112,\n\
\ \"acc_norm_stderr,none\": 0.019985536939171485\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.2602739726027397,\n \"acc_norm_stderr,none\"\
: 0.03643903096750157\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\":\
\ 0.02346526100207671\n },\n \"leaderboard_bbh_ruin_names\": {\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\"\
: 0.144,\n \"acc_norm_stderr,none\": 0.022249407735450245\n },\n\
\ \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.43820224719101125,\n\
\ \"acc_norm_stderr,none\": 0.03729414592947275\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\":\
\ 0.031563285061213475\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \
\ \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\": 0.022249407735450245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\":\
\ 0.022995023034068682\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.116,\n \"acc_norm_stderr,none\":\
\ 0.020293429803083823\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\":\
\ 0.029462657598578648\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.27684563758389263,\n\
\ \"acc_norm_stderr,none\": 0.012956401735727801,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2222222222222222,\n \"acc_norm_stderr,none\": 0.029620227874790486\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.29120879120879123,\n\
\ \"acc_norm_stderr,none\": 0.019460910297288078\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.28348214285714285,\n \"acc_norm_stderr,none\"\
: 0.0213168289872622\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.20147874306839186,\n \"prompt_level_strict_acc_stderr,none\": 0.017260802262371536,\n\
\ \"inst_level_strict_acc,none\": 0.3105515587529976,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.2144177449168207,\n \"prompt_level_loose_acc_stderr,none\": 0.017661570312173906,\n\
\ \"inst_level_loose_acc,none\": 0.3213429256594724,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"alias\": \" - leaderboard_math_hard\"\n },\n \
\ \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\"\
: \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\":\
\ 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\"\
: 0.15616688829787234,\n \"acc_stderr,none\": 0.003309572479029828\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.32671957671957674,\n\
\ \"acc_norm_stderr,none\": 0.016549758347469634,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \
\ \"acc_norm,none\": 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\"\
: \" - leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2265625,\n\
\ \"acc_norm_stderr,none\": 0.026214195644894838\n },\n \
\ \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.264,\n \"acc_norm_stderr,none\":\
\ 0.027934518957690866\n }\n },\n \"leaderboard\": {\n \"inst_level_loose_acc,none\"\
: 0.3213429256594724,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"acc,none\": 0.15616688829787234,\n \"acc_stderr,none\": 0.003309572479029828,\n\
\ \"acc_norm,none\": 0.30924892982228563,\n \"acc_norm_stderr,none\"\
: 0.004931829515718916,\n \"inst_level_strict_acc,none\": 0.3105515587529976,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"prompt_level_strict_acc,none\"\
: 0.20147874306839186,\n \"prompt_level_strict_acc_stderr,none\": 0.017260802262371536,\n\
\ \"prompt_level_loose_acc,none\": 0.2144177449168207,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.017661570312173906,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.313660822773824,\n \"acc_norm_stderr,none\"\
: 0.005625721472863487,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.74,\n \"acc_norm_stderr,none\": 0.027797315752644335\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.47058823529411764,\n \"acc_norm_stderr,none\"\
: 0.03659829510813266\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.228,\n \"acc_norm_stderr,none\": 0.026587432487268498\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.196,\n \"acc_norm_stderr,none\": 0.025156857313255926\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.352,\n \"acc_norm_stderr,none\": 0.030266288057359866\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.112,\n \"acc_norm_stderr,none\": 0.019985536939171485\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.2602739726027397,\n\
\ \"acc_norm_stderr,none\": 0.03643903096750157\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\": 0.022249407735450245\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.43820224719101125,\n \"acc_norm_stderr,none\"\
: 0.03729414592947275\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\": 0.022249407735450245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068682\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.116,\n \"acc_norm_stderr,none\": 0.020293429803083823\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.27684563758389263,\n\
\ \"acc_norm_stderr,none\": 0.012956401735727801,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2222222222222222,\n\
\ \"acc_norm_stderr,none\": 0.029620227874790486\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.29120879120879123,\n \"acc_norm_stderr,none\": 0.019460910297288078\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.28348214285714285,\n \"acc_norm_stderr,none\"\
: 0.0213168289872622\n },\n \"leaderboard_ifeval\": {\n \"alias\":\
\ \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.20147874306839186,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.017260802262371536,\n \
\ \"inst_level_strict_acc,none\": 0.3105515587529976,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.2144177449168207,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.017661570312173906,\n \"inst_level_loose_acc,none\"\
: 0.3213429256594724,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_hard\"\
\n },\n \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\": {\n\
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.15616688829787234,\n\
\ \"acc_stderr,none\": 0.003309572479029828\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.32671957671957674,\n \"acc_norm_stderr,none\"\
: 0.016549758347469634,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2265625,\n\
\ \"acc_norm_stderr,none\": 0.026214195644894838\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.264,\n \"acc_norm_stderr,none\": 0.027934518957690866\n }\n}\n```"
repo_url: https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_date_understanding
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_navigate
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_object_counting
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_ruin_names
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_snarks
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_gpqa_diamond
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_gpqa_extended
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_gpqa_main
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_ifeval
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_ifeval_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_math_algebra_hard
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_math_geometry_hard
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_math_num_theory_hard
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_math_precalculus_hard
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_mmlu_pro
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_musr_object_placements
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T08-20-06.028753.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_musr_team_allocation
data_files:
- split: 2024_12_31T08_20_06.028753
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T08-20-06.028753.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T08-20-06.028753.jsonl'
---
# Dataset Card for Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam-details",
name="JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-12-31T08-20-06.028753](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_2ep_0alp_0lam/results_2024-12-31T08-20-06.028753.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.15616688829787234,
"acc_stderr,none": 0.003309572479029828,
"acc_norm,none": 0.30924892982228563,
"acc_norm_stderr,none": 0.004931829515718916,
"inst_level_strict_acc,none": 0.3105515587529976,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"prompt_level_strict_acc,none": 0.20147874306839186,
"prompt_level_strict_acc_stderr,none": 0.017260802262371536,
"prompt_level_loose_acc,none": 0.2144177449168207,
"prompt_level_loose_acc_stderr,none": 0.017661570312173906,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.313660822773824,
"acc_norm_stderr,none": 0.005625721472863487,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.74,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47058823529411764,
"acc_norm_stderr,none": 0.03659829510813266
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.228,
"acc_norm_stderr,none": 0.026587432487268498
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.196,
"acc_norm_stderr,none": 0.025156857313255926
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359866
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.112,
"acc_norm_stderr,none": 0.019985536939171485
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2602739726027397,
"acc_norm_stderr,none": 0.03643903096750157
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.43820224719101125,
"acc_norm_stderr,none": 0.03729414592947275
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.116,
"acc_norm_stderr,none": 0.020293429803083823
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_gpqa": {
"acc_norm,none": 0.27684563758389263,
"acc_norm_stderr,none": 0.012956401735727801,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2222222222222222,
"acc_norm_stderr,none": 0.029620227874790486
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.29120879120879123,
"acc_norm_stderr,none": 0.019460910297288078
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.28348214285714285,
"acc_norm_stderr,none": 0.0213168289872622
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.20147874306839186,
"prompt_level_strict_acc_stderr,none": 0.017260802262371536,
"inst_level_strict_acc,none": 0.3105515587529976,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2144177449168207,
"prompt_level_loose_acc_stderr,none": 0.017661570312173906,
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15616688829787234,
"acc_stderr,none": 0.003309572479029828
},
"leaderboard_musr": {
"acc_norm,none": 0.32671957671957674,
"acc_norm_stderr,none": 0.016549758347469634,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
}
},
"leaderboard": {
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.15616688829787234,
"acc_stderr,none": 0.003309572479029828,
"acc_norm,none": 0.30924892982228563,
"acc_norm_stderr,none": 0.004931829515718916,
"inst_level_strict_acc,none": 0.3105515587529976,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"prompt_level_strict_acc,none": 0.20147874306839186,
"prompt_level_strict_acc_stderr,none": 0.017260802262371536,
"prompt_level_loose_acc,none": 0.2144177449168207,
"prompt_level_loose_acc_stderr,none": 0.017661570312173906,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.313660822773824,
"acc_norm_stderr,none": 0.005625721472863487,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.74,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47058823529411764,
"acc_norm_stderr,none": 0.03659829510813266
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.228,
"acc_norm_stderr,none": 0.026587432487268498
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.196,
"acc_norm_stderr,none": 0.025156857313255926
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359866
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.112,
"acc_norm_stderr,none": 0.019985536939171485
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2602739726027397,
"acc_norm_stderr,none": 0.03643903096750157
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.43820224719101125,
"acc_norm_stderr,none": 0.03729414592947275
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.116,
"acc_norm_stderr,none": 0.020293429803083823
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_gpqa": {
"acc_norm,none": 0.27684563758389263,
"acc_norm_stderr,none": 0.012956401735727801,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2222222222222222,
"acc_norm_stderr,none": 0.029620227874790486
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.29120879120879123,
"acc_norm_stderr,none": 0.019460910297288078
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.28348214285714285,
"acc_norm_stderr,none": 0.0213168289872622
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.20147874306839186,
"prompt_level_strict_acc_stderr,none": 0.017260802262371536,
"inst_level_strict_acc,none": 0.3105515587529976,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2144177449168207,
"prompt_level_loose_acc_stderr,none": 0.017661570312173906,
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15616688829787234,
"acc_stderr,none": 0.003309572479029828
},
"leaderboard_musr": {
"acc_norm,none": 0.32671957671957674,
"acc_norm_stderr,none": 0.016549758347469634,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
DT4LM/gpt2_mr_pair_textbugger_original | DT4LM | "2024-12-31T08:22:16Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:22:13Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 88714
num_examples: 666
download_size: 62328
dataset_size: 88714
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam-details | open-llm-leaderboard | "2024-12-31T08:26:28Z" | 5 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:23:06Z" | ---
pretty_name: Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam-details\"\
,\n\tname=\"JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-12-31T08-23-05.016642](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam/results_2024-12-31T08-23-05.016642.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"exact_match,none\": 0.0007552870090634441,\n \"exact_match_stderr,none\"\
: 0.0007542068622709997,\n \"inst_level_strict_acc,none\": 0.30335731414868106,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"inst_level_loose_acc,none\"\
: 0.3213429256594724,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\
,\n \"acc,none\": 0.15949135638297873,\n \"acc_stderr,none\"\
: 0.003338018999920184,\n \"prompt_level_strict_acc,none\": 0.19408502772643252,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01701938055074939,\n \
\ \"prompt_level_loose_acc,none\": 0.21072088724584104,\n \"\
prompt_level_loose_acc_stderr,none\": 0.017549801883664215,\n \"acc_norm,none\"\
: 0.3110649889739266,\n \"acc_norm_stderr,none\": 0.0049416858359111965,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.31713244228432563,\n \"acc_norm_stderr,none\"\
: 0.005648412678059611,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.736,\n\
\ \"acc_norm_stderr,none\": 0.027934518957690866\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.47058823529411764,\n \"acc_norm_stderr,none\"\
: 0.03659829510813266\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.264,\n \"acc_norm_stderr,none\": 0.027934518957690866\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.324,\n\
\ \"acc_norm_stderr,none\": 0.029658294924545567\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\":\
\ 0.03162125257572558\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.516,\n \
\ \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.208,\n \"acc_norm_stderr,none\":\
\ 0.02572139890141637\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.364,\n \"acc_norm_stderr,none\": 0.030491555220405475\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.448,\n \"acc_norm_stderr,none\": 0.03151438761115349\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.112,\n\
\ \"acc_norm_stderr,none\": 0.019985536939171485\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.273972602739726,\n \"acc_norm_stderr,none\"\
: 0.03703787583167248\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\":\
\ 0.022752024491765464\n },\n \"leaderboard_bbh_ruin_names\": {\n\
\ \"alias\": \" - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\"\
: 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n },\n\
\ \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.112,\n \"acc_norm_stderr,none\": 0.019985536939171485\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.39325842696629215,\n\
\ \"acc_norm_stderr,none\": 0.036715907095165784\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\":\
\ 0.03151438761115348\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \
\ \"acc_norm,none\": 0.124,\n \"acc_norm_stderr,none\": 0.020886382258673272\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.176,\n \"acc_norm_stderr,none\":\
\ 0.024133497525457123\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.108,\n \"acc_norm_stderr,none\":\
\ 0.019669559381568776\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\":\
\ 0.029752391824475363\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.27181208053691275,\n\
\ \"acc_norm_stderr,none\": 0.012882830312610578,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.21717171717171718,\n \"acc_norm_stderr,none\": 0.02937661648494561\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.28205128205128205,\n\
\ \"acc_norm_stderr,none\": 0.019275803929950375\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.28348214285714285,\n \"acc_norm_stderr,none\"\
: 0.0213168289872622\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.19408502772643252,\n \"prompt_level_strict_acc_stderr,none\": 0.01701938055074939,\n\
\ \"inst_level_strict_acc,none\": 0.30335731414868106,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.21072088724584104,\n \"prompt_level_loose_acc_stderr,none\": 0.017549801883664215,\n\
\ \"inst_level_loose_acc,none\": 0.3213429256594724,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0007552870090634441,\n \"exact_match_stderr,none\"\
: 0.0007542068622709997,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.008130081300813009,\n \"exact_match_stderr,none\"\
: 0.008130081300813007\n },\n \"leaderboard_math_geometry_hard\":\
\ {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \
\ \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\"\
: \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\":\
\ 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\"\
: 0.15949135638297873,\n \"acc_stderr,none\": 0.0033380189999201843\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.32671957671957674,\n\
\ \"acc_norm_stderr,none\": 0.016549758347469634,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \
\ \"acc_norm,none\": 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\"\
: \" - leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2265625,\n\
\ \"acc_norm_stderr,none\": 0.026214195644894838\n },\n \
\ \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.264,\n \"acc_norm_stderr,none\":\
\ 0.027934518957690866\n }\n },\n \"leaderboard\": {\n \"exact_match,none\"\
: 0.0007552870090634441,\n \"exact_match_stderr,none\": 0.0007542068622709997,\n\
\ \"inst_level_strict_acc,none\": 0.30335731414868106,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"inst_level_loose_acc,none\": 0.3213429256594724,\n \
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"acc,none\": 0.15949135638297873,\n\
\ \"acc_stderr,none\": 0.003338018999920184,\n \"prompt_level_strict_acc,none\"\
: 0.19408502772643252,\n \"prompt_level_strict_acc_stderr,none\": 0.01701938055074939,\n\
\ \"prompt_level_loose_acc,none\": 0.21072088724584104,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.017549801883664215,\n \"acc_norm,none\": 0.3110649889739266,\n \
\ \"acc_norm_stderr,none\": 0.0049416858359111965,\n \"alias\": \"leaderboard\"\
\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.31713244228432563,\n\
\ \"acc_norm_stderr,none\": 0.005648412678059611,\n \"alias\": \"\
\ - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\": {\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\"\
: 0.736,\n \"acc_norm_stderr,none\": 0.027934518957690866\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.47058823529411764,\n \"acc_norm_stderr,none\"\
: 0.03659829510813266\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.264,\n \"acc_norm_stderr,none\": 0.027934518957690866\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.324,\n \"acc_norm_stderr,none\": 0.029658294924545567\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\": 0.03162125257572558\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.208,\n \"acc_norm_stderr,none\": 0.02572139890141637\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.364,\n \"acc_norm_stderr,none\": 0.030491555220405475\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.448,\n \"acc_norm_stderr,none\": 0.03151438761115349\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.112,\n \"acc_norm_stderr,none\": 0.019985536939171485\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.273972602739726,\n\
\ \"acc_norm_stderr,none\": 0.03703787583167248\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.112,\n \"acc_norm_stderr,none\": 0.019985536939171485\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.39325842696629215,\n \"acc_norm_stderr,none\"\
: 0.036715907095165784\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.124,\n \"acc_norm_stderr,none\": 0.020886382258673272\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.176,\n \"acc_norm_stderr,none\": 0.024133497525457123\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.108,\n \"acc_norm_stderr,none\": 0.019669559381568776\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.27181208053691275,\n\
\ \"acc_norm_stderr,none\": 0.012882830312610578,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.21717171717171718,\n\
\ \"acc_norm_stderr,none\": 0.02937661648494561\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.28205128205128205,\n \"acc_norm_stderr,none\": 0.019275803929950375\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.28348214285714285,\n \"acc_norm_stderr,none\"\
: 0.0213168289872622\n },\n \"leaderboard_ifeval\": {\n \"alias\":\
\ \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.19408502772643252,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01701938055074939,\n \
\ \"inst_level_strict_acc,none\": 0.30335731414868106,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.21072088724584104,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.017549801883664215,\n \"inst_level_loose_acc,none\"\
: 0.3213429256594724,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0007552870090634441,\n\
\ \"exact_match_stderr,none\": 0.0007542068622709997,\n \"alias\"\
: \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.008130081300813009,\n \"exact_match_stderr,none\"\
: 0.008130081300813007\n },\n \"leaderboard_math_geometry_hard\": {\n \
\ \"alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\": {\n\
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.15949135638297873,\n\
\ \"acc_stderr,none\": 0.0033380189999201843\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.32671957671957674,\n \"acc_norm_stderr,none\"\
: 0.016549758347469634,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2265625,\n\
\ \"acc_norm_stderr,none\": 0.026214195644894838\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.264,\n \"acc_norm_stderr,none\": 0.027934518957690866\n }\n}\n```"
repo_url: https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_date_understanding
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_navigate
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_object_counting
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_ruin_names
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_snarks
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_gpqa_diamond
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_gpqa_extended
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_gpqa_main
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_ifeval
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_ifeval_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_math_algebra_hard
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_math_geometry_hard
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_math_num_theory_hard
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_math_precalculus_hard
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_mmlu_pro
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_musr_object_placements
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-12-31T08-23-05.016642.jsonl'
- config_name: JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_musr_team_allocation
data_files:
- split: 2024_12_31T08_23_05.016642
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T08-23-05.016642.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-12-31T08-23-05.016642.jsonl'
---
# Dataset Card for Evaluation run of JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam](https://huggingface.co/JayHyeon/Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam-details",
name="JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-12-31T08-23-05.016642](https://huggingface.co/datasets/open-llm-leaderboard/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam-details/blob/main/JayHyeon__Qwen2.5-0.5B-SFT-2e-5-2ep-MDPO_5e-7_1ep_0alp_0lam/results_2024-12-31T08-23-05.016642.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007542068622709997,
"inst_level_strict_acc,none": 0.30335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.15949135638297873,
"acc_stderr,none": 0.003338018999920184,
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"acc_norm,none": 0.3110649889739266,
"acc_norm_stderr,none": 0.0049416858359111965,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31713244228432563,
"acc_norm_stderr,none": 0.005648412678059611,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.736,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47058823529411764,
"acc_norm_stderr,none": 0.03659829510813266
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572558
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115349
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.112,
"acc_norm_stderr,none": 0.019985536939171485
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.273972602739726,
"acc_norm_stderr,none": 0.03703787583167248
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.112,
"acc_norm_stderr,none": 0.019985536939171485
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.39325842696629215,
"acc_norm_stderr,none": 0.036715907095165784
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.124,
"acc_norm_stderr,none": 0.020886382258673272
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.176,
"acc_norm_stderr,none": 0.024133497525457123
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.108,
"acc_norm_stderr,none": 0.019669559381568776
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_gpqa": {
"acc_norm,none": 0.27181208053691275,
"acc_norm_stderr,none": 0.012882830312610578,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.21717171717171718,
"acc_norm_stderr,none": 0.02937661648494561
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.28205128205128205,
"acc_norm_stderr,none": 0.019275803929950375
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.28348214285714285,
"acc_norm_stderr,none": 0.0213168289872622
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"inst_level_strict_acc,none": 0.30335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007542068622709997,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15949135638297873,
"acc_stderr,none": 0.0033380189999201843
},
"leaderboard_musr": {
"acc_norm,none": 0.32671957671957674,
"acc_norm_stderr,none": 0.016549758347469634,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
}
},
"leaderboard": {
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007542068622709997,
"inst_level_strict_acc,none": 0.30335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.15949135638297873,
"acc_stderr,none": 0.003338018999920184,
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"acc_norm,none": 0.3110649889739266,
"acc_norm_stderr,none": 0.0049416858359111965,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31713244228432563,
"acc_norm_stderr,none": 0.005648412678059611,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.736,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47058823529411764,
"acc_norm_stderr,none": 0.03659829510813266
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572558
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115349
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.112,
"acc_norm_stderr,none": 0.019985536939171485
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.273972602739726,
"acc_norm_stderr,none": 0.03703787583167248
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.112,
"acc_norm_stderr,none": 0.019985536939171485
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.39325842696629215,
"acc_norm_stderr,none": 0.036715907095165784
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.124,
"acc_norm_stderr,none": 0.020886382258673272
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.176,
"acc_norm_stderr,none": 0.024133497525457123
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.108,
"acc_norm_stderr,none": 0.019669559381568776
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_gpqa": {
"acc_norm,none": 0.27181208053691275,
"acc_norm_stderr,none": 0.012882830312610578,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.21717171717171718,
"acc_norm_stderr,none": 0.02937661648494561
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.28205128205128205,
"acc_norm_stderr,none": 0.019275803929950375
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.28348214285714285,
"acc_norm_stderr,none": 0.0213168289872622
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"inst_level_strict_acc,none": 0.30335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"inst_level_loose_acc,none": 0.3213429256594724,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0007552870090634441,
"exact_match_stderr,none": 0.0007542068622709997,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.15949135638297873,
"acc_stderr,none": 0.0033380189999201843
},
"leaderboard_musr": {
"acc_norm,none": 0.32671957671957674,
"acc_norm_stderr,none": 0.016549758347469634,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2265625,
"acc_norm_stderr,none": 0.026214195644894838
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
chiyuanhsiao/trivia_qa-audio-ASR_GT-score | chiyuanhsiao | "2025-01-01T16:45:30Z" | 5 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:32:32Z" | ---
dataset_info:
features:
- name: question
dtype: string
- name: question_id
dtype: string
- name: question_source
dtype: string
- name: entity_pages
sequence:
- name: doc_source
dtype: string
- name: filename
dtype: string
- name: title
dtype: string
- name: wiki_context
dtype: string
- name: search_results
sequence:
- name: description
dtype: string
- name: filename
dtype: string
- name: rank
dtype: int32
- name: title
dtype: string
- name: url
dtype: string
- name: search_context
dtype: string
- name: answer
struct:
- name: aliases
sequence: string
- name: normalized_aliases
sequence: string
- name: matched_wiki_entity_name
dtype: string
- name: normalized_matched_wiki_entity_name
dtype: string
- name: normalized_value
dtype: string
- name: type
dtype: string
- name: value
dtype: string
- name: question_audio
dtype:
audio:
sampling_rate: 16000
- name: question_unit
sequence: int64
- name: response_interleaf
dtype: string
- name: response_text
dtype: string
- name: response_speech
dtype: audio
- name: response_asr
dtype: string
- name: speech_score
dtype: int64
- name: text_score
dtype: int64
splits:
- name: validation
num_bytes: 711171286.0
num_examples: 1000
download_size: 613281161
dataset_size: 711171286.0
configs:
- config_name: default
data_files:
- split: validation
path: data/validation-*
---
|
chiyuanhsiao/llama-questions-ASR_GT-score | chiyuanhsiao | "2025-01-01T13:58:17Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:39:20Z" | ---
dataset_info:
features:
- name: question
dtype: string
- name: answer
dtype: string
- name: audio
dtype: audio
- name: question_unit
sequence: int64
- name: response_interleaf
dtype: string
- name: response_text
dtype: string
- name: response_speech
dtype: audio
- name: response_asr
dtype: string
- name: speech_score
dtype: int64
- name: text_score
dtype: int64
splits:
- name: test
num_bytes: 176590033.0
num_examples: 300
download_size: 157352925
dataset_size: 176590033.0
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
---
|
DT4LM/debertav3ba_sst2_kuleshov_differential | DT4LM | "2024-12-31T08:56:52Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:52:32Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 27802.563876651984
num_examples: 366
download_size: 23179
dataset_size: 27802.563876651984
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/gp_rte_kuleshov_differential | DT4LM | "2024-12-31T08:59:29Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:55:08Z" | ---
dataset_info:
features:
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 25433.032258064515
num_examples: 76
download_size: 24025
dataset_size: 25433.032258064515
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/gp_mr_kuleshov_differential | DT4LM | "2024-12-31T08:58:02Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:55:54Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 65605.3191780822
num_examples: 521
download_size: 47262
dataset_size: 65605.3191780822
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/debertav3ba_mr_kuleshov_differential | DT4LM | "2024-12-31T08:55:59Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:55:55Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 66289.04962779157
num_examples: 519
download_size: 48744
dataset_size: 66289.04962779157
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/debertav3ba_mr_kuleshov_differential_original | DT4LM | "2024-12-31T08:56:02Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:55:59Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 65218.209677419356
num_examples: 519
download_size: 46674
dataset_size: 65218.209677419356
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/debertav3ba_sst2_kuleshov_differential_original | DT4LM | "2024-12-31T08:56:56Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:56:53Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 27322.625550660792
num_examples: 366
download_size: 22322
dataset_size: 27322.625550660792
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/debertav3ba_mr_pruthi_differential | DT4LM | "2024-12-31T09:00:07Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:56:54Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 21318.4
num_examples: 192
download_size: 16939
dataset_size: 21318.4
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/gp_rte_pruthi_differential | DT4LM | "2024-12-31T08:59:56Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:57:46Z" | ---
dataset_info:
features:
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 19376.220472440946
num_examples: 62
download_size: 21257
dataset_size: 19376.220472440946
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/gp_mr_pruthi_differential | DT4LM | "2024-12-31T08:58:54Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:57:49Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 27265.166219839142
num_examples: 237
download_size: 20868
dataset_size: 27265.166219839142
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/gp_mr_kuleshov_differential_original | DT4LM | "2024-12-31T09:00:08Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:58:03Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 64878.0602739726
num_examples: 521
download_size: 46211
dataset_size: 64878.0602739726
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/t5v1-1ba_rte_pruthi_differential | DT4LM | "2024-12-31T09:00:04Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:58:27Z" | ---
dataset_info:
features:
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 23002.950819672133
num_examples: 85
download_size: 21671
dataset_size: 23002.950819672133
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/gp_mr_pruthi_differential_original | DT4LM | "2024-12-31T09:00:04Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:58:55Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 27241.02144772118
num_examples: 237
download_size: 20519
dataset_size: 27241.02144772118
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/debertav3ba_sst2_pruthi_differential | DT4LM | "2024-12-31T09:00:00Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:58:55Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 11465.187096774194
num_examples: 258
download_size: 10538
dataset_size: 11465.187096774194
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/gp_rte_kuleshov_differential_original | DT4LM | "2024-12-31T08:59:33Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:59:30Z" | ---
dataset_info:
features:
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 25312.290322580644
num_examples: 76
download_size: 23883
dataset_size: 25312.290322580644
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/gp_rte_pruthi_differential_original | DT4LM | "2024-12-31T09:00:01Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T08:59:58Z" | ---
dataset_info:
features:
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 19349.858267716536
num_examples: 62
download_size: 21166
dataset_size: 19349.858267716536
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/debertav3ba_sst2_pruthi_differential_original | DT4LM | "2024-12-31T09:00:03Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T09:00:00Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 11484.051612903226
num_examples: 258
download_size: 10206
dataset_size: 11484.051612903226
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/t5v1-1ba_rte_pruthi_differential_original | DT4LM | "2024-12-31T09:00:08Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T09:00:04Z" | ---
dataset_info:
features:
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 23006.43442622951
num_examples: 85
download_size: 21579
dataset_size: 23006.43442622951
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/debertav3ba_mr_pruthi_differential_original | DT4LM | "2024-12-31T09:00:11Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T09:00:08Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 21320.727272727272
num_examples: 192
download_size: 16593
dataset_size: 21320.727272727272
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DT4LM/debertav3ba_mr_textbugger_differential | DT4LM | "2024-12-31T09:16:18Z" | 5 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-12-31T09:00:55Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int32
splits:
- name: train
num_bytes: 49058.919298245615
num_examples: 368
download_size: 37165
dataset_size: 49058.919298245615
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|