daiqi commited on
Commit
bfbf749
1 Parent(s): 8360c51

Update commit_results.jsonl

Browse files
Files changed (1) hide show
  1. commit_results.jsonl +179 -179
commit_results.jsonl CHANGED
@@ -1,179 +1,179 @@
1
- {"Score": 0.03, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
2
- {"Score": 11.78, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
3
- {"Score": 44.95, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
4
- {"Score": 44.14, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
5
- {"Score": 10.0, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
6
- {"Score": 13.78, "Name": "MageBench", "BaseModel": "DeepSeek-VL-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
7
- {"Score": 8.07, "Name": "MageBench", "BaseModel": "DeepSeek-VL-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
8
- {"Score": 12.95, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
9
- {"Score": 15.26, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
10
- {"Score": 45.57, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
11
- {"Score": 42.28, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
12
- {"Score": 12.52, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
13
- {"Score": 10.03, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
14
- {"Score": 42.52, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
15
- {"Score": 42.36, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
16
- {"Score": 2.1, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
17
- {"Score": 11.95, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
18
- {"Score": 9.92, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
19
- {"Score": 46.28, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
20
- {"Score": 42.59, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
21
- {"Score": 18.13, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
22
- {"Score": 6.57, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
23
- {"Score": 15.17, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
24
- {"Score": 43.19, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
25
- {"Score": 43.47, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
26
- {"Score": 3.91, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
27
- {"Score": 7.14, "Name": "MageBench", "BaseModel": "Yi-VL-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
28
- {"Score": 12.08, "Name": "MageBench", "BaseModel": "Yi-VL-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
29
- {"Score": 7.37, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
30
- {"Score": 13.03, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
31
- {"Score": 46.21, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
32
- {"Score": 43.22, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
33
- {"Score": 15.75, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
34
- {"Score": 4.71, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
35
- {"Score": 14.46, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
36
- {"Score": 43.46, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
37
- {"Score": 44.07, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
38
- {"Score": 14.46, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
39
- {"Score": 16.22, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
40
- {"Score": 16.76, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
41
- {"Score": 44.56, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
42
- {"Score": 43.58, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
43
- {"Score": 10.5, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
44
- {"Score": 35.09, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
45
- {"Score": 27.47, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
46
- {"Score": 45.8, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
47
- {"Score": 14.28, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
48
- {"Score": 64.11, "Name": "MageBench", "BaseModel": "Claude-3.5-Sonnet", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
49
- {"Score": 62.08, "Name": "MageBench", "BaseModel": "Claude-3.5-Sonnet", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
50
- {"Score": 48.26, "Name": "MageBench", "BaseModel": "Claude-3.5-Sonnet", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
51
- {"Score": 45.35, "Name": "MageBench", "BaseModel": "Claude-3.5-Sonnet", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
52
- {"Score": 16.94, "Name": "MageBench", "BaseModel": "Claude-3.5-Sonnet", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
53
- {"Score": 44.79, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
54
- {"Score": 39.3, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
55
- {"Score": 46.13, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
56
- {"Score": 51.84, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
57
- {"Score": 18.33, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
58
- {"Score": 34.28, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
59
- {"Score": 35.5, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
60
- {"Score": 46.09, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
61
- {"Score": 53.03, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
62
- {"Score": 21.2, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
63
- {"Score": 0.0, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
64
- {"Score": 0.0, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
65
- {"Score": 41.18, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
66
- {"Score": 41.18, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
67
- {"Score": 2.53, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
68
- {"Score": 0.0, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
69
- {"Score": 0.0, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
70
- {"Score": 46.61, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
71
- {"Score": 46.61, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
72
- {"Score": 17.33, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
73
- {"Score": 68.71, "Name": "MageBench", "BaseModel": "Human", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
74
- {"Score": 94.32, "Name": "MageBench", "BaseModel": "Human", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
75
- {"Score": 83.63, "Name": "MageBench", "BaseModel": "Human", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "xxx", "State": "Checked"}
76
- {"Score": 96.85, "Name": "MageBench", "BaseModel": "Human", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
77
- {"Score": 54.68, "Name": "MageBench", "BaseModel": "Human", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "xxx", "State": "Checked"}
78
- {"Score": 10.61, "Name": "MageBench", "BaseModel": "InternVL2-1B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
79
- {"Score": 16.14, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-1.8B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
80
- {"Score": 17.33, "Name": "MageBench", "BaseModel": "InternVL2-2B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
81
- {"Score": 9.26, "Name": "MageBench", "BaseModel": "Qwen2-vl-2B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
82
- {"Score": 2.92, "Name": "MageBench", "BaseModel": "InternVL2-4B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
83
- {"Score": 1.36, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
84
- {"Score": 8.08, "Name": "MageBench", "BaseModel": "Yi-VL-6B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
85
- {"Score": 18.22, "Name": "MageBench", "BaseModel": "DeepSeek-VL-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
86
- {"Score": 15.32, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
87
- {"Score": 11.42, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
88
- {"Score": 9.07, "Name": "MageBench", "BaseModel": "Llava-1.6-mistral-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
89
- {"Score": 11.98, "Name": "MageBench", "BaseModel": "Qwen2-vl-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
90
- {"Score": 16.06, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
91
- {"Score": 27.43, "Name": "MageBench", "BaseModel": "InternVL2-8B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
92
- {"Score": 39.22, "Name": "MageBench", "BaseModel": "Llama-3.2-11B-Vision-Instruct", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
93
- {"Score": 13.3, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
94
- {"Score": 31.68, "Name": "MageBench", "BaseModel": "InternVL2-26B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
95
- {"Score": 1.65, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
96
- {"Score": 7.21, "Name": "MageBench", "BaseModel": "Yi-VL-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
97
- {"Score": 7.2, "Name": "MageBench", "BaseModel": "InternVL2-40B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
98
- {"Score": 11.7, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
99
- {"Score": 10.36, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
100
- {"Score": 28.45, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
101
- {"Score": 42.21, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
102
- {"Score": 40.02, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
103
- {"Score": 52.1, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
104
- {"Score": 46.01, "Name": "MageBench", "BaseModel": "InternVL2-1B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
105
- {"Score": 45.76, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-1.8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
106
- {"Score": 45.94, "Name": "MageBench", "BaseModel": "InternVL2-2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
107
- {"Score": 46.43, "Name": "MageBench", "BaseModel": "Qwen2-vl-2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
108
- {"Score": 46.32, "Name": "MageBench", "BaseModel": "InternVL2-4B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
109
- {"Score": 46.62, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
110
- {"Score": 45.6, "Name": "MageBench", "BaseModel": "DeepSeek-VL-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
111
- {"Score": 46.84, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
112
- {"Score": 45.6, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
113
- {"Score": 46.37, "Name": "MageBench", "BaseModel": "Llava-1.6-mistral-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
114
- {"Score": 46.38, "Name": "MageBench", "BaseModel": "Qwen2-vl-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
115
- {"Score": 46.53, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
116
- {"Score": 46.56, "Name": "MageBench", "BaseModel": "InternVL2-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
117
- {"Score": 46.54, "Name": "MageBench", "BaseModel": "Llama-3.2-11B-Vision-Instruct", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
118
- {"Score": 46.65, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
119
- {"Score": 46.77, "Name": "MageBench", "BaseModel": "InternVL2-26B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
120
- {"Score": 46.06, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
121
- {"Score": 45.65, "Name": "MageBench", "BaseModel": "InternVL2-40B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
122
- {"Score": 47.14, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
123
- {"Score": 46.39, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
124
- {"Score": 46.61, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
125
- {"Score": 47.14, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
126
- {"Score": 47.46, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
127
- {"Score": 47.65, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
128
- {"Score": 46.06, "Name": "MageBench", "BaseModel": "InternVL2-1B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
129
- {"Score": 45.6, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-1.8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
130
- {"Score": 46.11, "Name": "MageBench", "BaseModel": "InternVL2-2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
131
- {"Score": 46.52, "Name": "MageBench", "BaseModel": "Qwen2-vl-2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
132
- {"Score": 46.28, "Name": "MageBench", "BaseModel": "InternVL2-4B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
133
- {"Score": 46.74, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
134
- {"Score": 46.64, "Name": "MageBench", "BaseModel": "DeepSeek-VL-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
135
- {"Score": 46.39, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
136
- {"Score": 46.19, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
137
- {"Score": 46.23, "Name": "MageBench", "BaseModel": "Llava-1.6-mistral-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
138
- {"Score": 46.41, "Name": "MageBench", "BaseModel": "Qwen2-vl-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
139
- {"Score": 46.86, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
140
- {"Score": 46.35, "Name": "MageBench", "BaseModel": "InternVL2-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
141
- {"Score": 47.33, "Name": "MageBench", "BaseModel": "Llama-3.2-11B-Vision-Instruct", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
142
- {"Score": 46.53, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
143
- {"Score": 46.5, "Name": "MageBench", "BaseModel": "InternVL2-26B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
144
- {"Score": 46.48, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
145
- {"Score": 46.82, "Name": "MageBench", "BaseModel": "InternVL2-40B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
146
- {"Score": 47.84, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
147
- {"Score": 46.68, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
148
- {"Score": 47.09, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
149
- {"Score": 48.63, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
150
- {"Score": 48.33, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
151
- {"Score": -2.47, "Name": "MageBench", "BaseModel": "InternVL2-1B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
152
- {"Score": 2.0, "Name": "MageBench", "BaseModel": "InternVL2-2B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
153
- {"Score": 0.64, "Name": "MageBench", "BaseModel": "Qwen2-vl-2B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
154
- {"Score": -0.03, "Name": "MageBench", "BaseModel": "InternVL2-4B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
155
- {"Score": 11.04, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
156
- {"Score": -0.8, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-7B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
157
- {"Score": -3.64, "Name": "MageBench", "BaseModel": "Llava-1.6-mistral-7B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
158
- {"Score": 2.68, "Name": "MageBench", "BaseModel": "Qwen2-vl-7B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
159
- {"Score": 3.61, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
160
- {"Score": 7.9, "Name": "MageBench", "BaseModel": "InternVL2-8B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
161
- {"Score": 11.58, "Name": "MageBench", "BaseModel": "Llama-3.2-11B-Vision-Instruct", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
162
- {"Score": 16.59, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
163
- {"Score": 15.62, "Name": "MageBench", "BaseModel": "InternVL2-26B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
164
- {"Score": 3.04, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
165
- {"Score": 12.99, "Name": "MageBench", "BaseModel": "InternVL2-40B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
166
- {"Score": 15.02, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
167
- {"Score": 18.43, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
168
- {"Score": 8.04, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
169
- {"Score": 8.32, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
170
- {"Score": 17.78, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
171
- {"Score": 19.61, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
172
- {"Score": 0.0, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
173
- {"Score": 45.6, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
174
- {"Score": 45.6, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
175
- {"Score": 2.36, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
176
- {"Score": 0.0, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
177
- {"Score": 47.4, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "xxx", "State": "Checked"}
178
- {"Score": 47.4, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
179
- {"Score": 16.64, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "xxx", "State": "Checked"}
 
1
+ {"Score": 0.03, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
2
+ {"Score": 11.78, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
3
+ {"Score": 44.95, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
4
+ {"Score": 44.14, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
5
+ {"Score": 10.0, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
6
+ {"Score": 13.78, "Name": "MageBench", "BaseModel": "DeepSeek-VL-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
7
+ {"Score": 8.07, "Name": "MageBench", "BaseModel": "DeepSeek-VL-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
8
+ {"Score": 12.95, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
9
+ {"Score": 15.26, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
10
+ {"Score": 45.57, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
11
+ {"Score": 42.28, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
12
+ {"Score": 12.52, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
13
+ {"Score": 10.03, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
14
+ {"Score": 42.52, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
15
+ {"Score": 42.36, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
16
+ {"Score": 2.1, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
17
+ {"Score": 11.95, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
18
+ {"Score": 9.92, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
19
+ {"Score": 46.28, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
20
+ {"Score": 42.59, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
21
+ {"Score": 18.13, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
22
+ {"Score": 6.57, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
23
+ {"Score": 15.17, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
24
+ {"Score": 43.19, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
25
+ {"Score": 43.47, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
26
+ {"Score": 3.91, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
27
+ {"Score": 7.14, "Name": "MageBench", "BaseModel": "Yi-VL-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
28
+ {"Score": 12.08, "Name": "MageBench", "BaseModel": "Yi-VL-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
29
+ {"Score": 7.37, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
30
+ {"Score": 13.03, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
31
+ {"Score": 46.21, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
32
+ {"Score": 43.22, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
33
+ {"Score": 15.75, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
34
+ {"Score": 4.71, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
35
+ {"Score": 14.46, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
36
+ {"Score": 43.46, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
37
+ {"Score": 44.07, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
38
+ {"Score": 14.46, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
39
+ {"Score": 16.22, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
40
+ {"Score": 16.76, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
41
+ {"Score": 44.56, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
42
+ {"Score": 43.58, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
43
+ {"Score": 10.5, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
44
+ {"Score": 35.09, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
45
+ {"Score": 27.47, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
46
+ {"Score": 45.8, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
47
+ {"Score": 14.28, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
48
+ {"Score": 64.11, "Name": "MageBench", "BaseModel": "Claude-3.5-Sonnet", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
49
+ {"Score": 62.08, "Name": "MageBench", "BaseModel": "Claude-3.5-Sonnet", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
50
+ {"Score": 48.26, "Name": "MageBench", "BaseModel": "Claude-3.5-Sonnet", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
51
+ {"Score": 45.35, "Name": "MageBench", "BaseModel": "Claude-3.5-Sonnet", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
52
+ {"Score": 16.94, "Name": "MageBench", "BaseModel": "Claude-3.5-Sonnet", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
53
+ {"Score": 44.79, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
54
+ {"Score": 39.3, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
55
+ {"Score": 46.13, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
56
+ {"Score": 51.84, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
57
+ {"Score": 18.33, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
58
+ {"Score": 34.28, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
59
+ {"Score": 35.5, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
60
+ {"Score": 46.09, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
61
+ {"Score": 53.03, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
62
+ {"Score": 21.2, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
63
+ {"Score": 0.0, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
64
+ {"Score": 0.0, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
65
+ {"Score": 41.18, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
66
+ {"Score": 41.18, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
67
+ {"Score": 2.53, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
68
+ {"Score": 0.0, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
69
+ {"Score": 0.0, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
70
+ {"Score": 46.61, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
71
+ {"Score": 46.61, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
72
+ {"Score": 17.33, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
73
+ {"Score": 68.71, "Name": "MageBench", "BaseModel": "Human", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
74
+ {"Score": 94.32, "Name": "MageBench", "BaseModel": "Human", "Env.": "WebUI", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
75
+ {"Score": 83.63, "Name": "MageBench", "BaseModel": "Human", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
76
+ {"Score": 96.85, "Name": "MageBench", "BaseModel": "Human", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
77
+ {"Score": 54.68, "Name": "MageBench", "BaseModel": "Human", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "mini", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
78
+ {"Score": 10.61, "Name": "MageBench", "BaseModel": "InternVL2-1B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
79
+ {"Score": 16.14, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-1.8B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
80
+ {"Score": 17.33, "Name": "MageBench", "BaseModel": "InternVL2-2B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
81
+ {"Score": 9.26, "Name": "MageBench", "BaseModel": "Qwen2-vl-2B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
82
+ {"Score": 2.92, "Name": "MageBench", "BaseModel": "InternVL2-4B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
83
+ {"Score": 1.36, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
84
+ {"Score": 8.08, "Name": "MageBench", "BaseModel": "Yi-VL-6B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
85
+ {"Score": 18.22, "Name": "MageBench", "BaseModel": "DeepSeek-VL-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
86
+ {"Score": 15.32, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
87
+ {"Score": 11.42, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
88
+ {"Score": 9.07, "Name": "MageBench", "BaseModel": "Llava-1.6-mistral-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
89
+ {"Score": 11.98, "Name": "MageBench", "BaseModel": "Qwen2-vl-7B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
90
+ {"Score": 16.06, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
91
+ {"Score": 27.43, "Name": "MageBench", "BaseModel": "InternVL2-8B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
92
+ {"Score": 39.22, "Name": "MageBench", "BaseModel": "Llama-3.2-11B-Vision-Instruct", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
93
+ {"Score": 13.3, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
94
+ {"Score": 31.68, "Name": "MageBench", "BaseModel": "InternVL2-26B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
95
+ {"Score": 1.65, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
96
+ {"Score": 7.21, "Name": "MageBench", "BaseModel": "Yi-VL-34B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
97
+ {"Score": 7.2, "Name": "MageBench", "BaseModel": "InternVL2-40B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
98
+ {"Score": 11.7, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
99
+ {"Score": 10.36, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
100
+ {"Score": 28.45, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
101
+ {"Score": 42.21, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
102
+ {"Score": 40.02, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
103
+ {"Score": 52.1, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
104
+ {"Score": 46.01, "Name": "MageBench", "BaseModel": "InternVL2-1B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
105
+ {"Score": 45.76, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-1.8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
106
+ {"Score": 45.94, "Name": "MageBench", "BaseModel": "InternVL2-2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
107
+ {"Score": 46.43, "Name": "MageBench", "BaseModel": "Qwen2-vl-2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
108
+ {"Score": 46.32, "Name": "MageBench", "BaseModel": "InternVL2-4B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
109
+ {"Score": 46.62, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
110
+ {"Score": 45.6, "Name": "MageBench", "BaseModel": "DeepSeek-VL-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
111
+ {"Score": 46.84, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
112
+ {"Score": 45.6, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
113
+ {"Score": 46.37, "Name": "MageBench", "BaseModel": "Llava-1.6-mistral-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
114
+ {"Score": 46.38, "Name": "MageBench", "BaseModel": "Qwen2-vl-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
115
+ {"Score": 46.53, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
116
+ {"Score": 46.56, "Name": "MageBench", "BaseModel": "InternVL2-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
117
+ {"Score": 46.54, "Name": "MageBench", "BaseModel": "Llama-3.2-11B-Vision-Instruct", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
118
+ {"Score": 46.65, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
119
+ {"Score": 46.77, "Name": "MageBench", "BaseModel": "InternVL2-26B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
120
+ {"Score": 46.06, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
121
+ {"Score": 45.65, "Name": "MageBench", "BaseModel": "InternVL2-40B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
122
+ {"Score": 47.14, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
123
+ {"Score": 46.39, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
124
+ {"Score": 46.61, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
125
+ {"Score": 47.14, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
126
+ {"Score": 47.46, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
127
+ {"Score": 47.65, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
128
+ {"Score": 46.06, "Name": "MageBench", "BaseModel": "InternVL2-1B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
129
+ {"Score": 45.6, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-1.8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
130
+ {"Score": 46.11, "Name": "MageBench", "BaseModel": "InternVL2-2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
131
+ {"Score": 46.52, "Name": "MageBench", "BaseModel": "Qwen2-vl-2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
132
+ {"Score": 46.28, "Name": "MageBench", "BaseModel": "InternVL2-4B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
133
+ {"Score": 46.74, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
134
+ {"Score": 46.64, "Name": "MageBench", "BaseModel": "DeepSeek-VL-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
135
+ {"Score": 46.39, "Name": "MageBench", "BaseModel": "Xcomposer-2.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
136
+ {"Score": 46.19, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
137
+ {"Score": 46.23, "Name": "MageBench", "BaseModel": "Llava-1.6-mistral-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
138
+ {"Score": 46.41, "Name": "MageBench", "BaseModel": "Qwen2-vl-7B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
139
+ {"Score": 46.86, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
140
+ {"Score": 46.35, "Name": "MageBench", "BaseModel": "InternVL2-8B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
141
+ {"Score": 47.33, "Name": "MageBench", "BaseModel": "Llama-3.2-11B-Vision-Instruct", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
142
+ {"Score": 46.53, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
143
+ {"Score": 46.5, "Name": "MageBench", "BaseModel": "InternVL2-26B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
144
+ {"Score": 46.48, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
145
+ {"Score": 46.82, "Name": "MageBench", "BaseModel": "InternVL2-40B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
146
+ {"Score": 47.84, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
147
+ {"Score": 46.68, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
148
+ {"Score": 47.09, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
149
+ {"Score": 48.63, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
150
+ {"Score": 48.33, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
151
+ {"Score": -2.47, "Name": "MageBench", "BaseModel": "InternVL2-1B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
152
+ {"Score": 2.0, "Name": "MageBench", "BaseModel": "InternVL2-2B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
153
+ {"Score": 0.64, "Name": "MageBench", "BaseModel": "Qwen2-vl-2B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
154
+ {"Score": -0.03, "Name": "MageBench", "BaseModel": "InternVL2-4B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
155
+ {"Score": 11.04, "Name": "MageBench", "BaseModel": "Phi-3.5-V-4.2B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
156
+ {"Score": -0.8, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-7B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
157
+ {"Score": -3.64, "Name": "MageBench", "BaseModel": "Llava-1.6-mistral-7B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
158
+ {"Score": 2.68, "Name": "MageBench", "BaseModel": "Qwen2-vl-7B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
159
+ {"Score": 3.61, "Name": "MageBench", "BaseModel": "MiniCPM-V2.6-8B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
160
+ {"Score": 7.9, "Name": "MageBench", "BaseModel": "InternVL2-8B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
161
+ {"Score": 11.58, "Name": "MageBench", "BaseModel": "Llama-3.2-11B-Vision-Instruct", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
162
+ {"Score": 16.59, "Name": "MageBench", "BaseModel": "LLaVA-v1.5-13B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
163
+ {"Score": 15.62, "Name": "MageBench", "BaseModel": "InternVL2-26B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
164
+ {"Score": 3.04, "Name": "MageBench", "BaseModel": "Llava-1.6-34B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
165
+ {"Score": 12.99, "Name": "MageBench", "BaseModel": "InternVL2-40B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
166
+ {"Score": 15.02, "Name": "MageBench", "BaseModel": "Qwen2-vl-72B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
167
+ {"Score": 18.43, "Name": "MageBench", "BaseModel": "NVLM-72B", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
168
+ {"Score": 8.04, "Name": "MageBench", "BaseModel": "InternVL2-76B-LLaMA3", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
169
+ {"Score": 8.32, "Name": "MageBench", "BaseModel": "Llama-3.2-90B-Vision-Instruct", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
170
+ {"Score": 17.78, "Name": "MageBench", "BaseModel": "GPT-4o", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
171
+ {"Score": 19.61, "Name": "MageBench", "BaseModel": "Gemini-1.5-pro", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
172
+ {"Score": 0.0, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
173
+ {"Score": 45.6, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
174
+ {"Score": 45.6, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
175
+ {"Score": 2.36, "Name": "MageBench", "BaseModel": "Idle Baseline", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
176
+ {"Score": 0.0, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "WebUI", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
177
+ {"Score": 47.4, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Global", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
178
+ {"Score": 47.4, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Sokoban", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}
179
+ {"Score": 16.64, "Name": "MageBench", "BaseModel": "Random Baseline", "Env.": "Football", "Target-research": "Model-Eval-Online", "Subset": "all", "Link": "https://arxiv.org/abs/2412.04531", "State": "Checked"}