| 1 |
GPT-5 (high)
| 68.95 |
OpenAI
|
| 2 |
GPT-5 (medium)
| 67.53 |
OpenAI
|
| 3 |
Grok 4
| 67.52 |
xAI
|
| 4 |
o3
| 67.07 |
OpenAI
|
| 5 |
O3 Pro
| 67.07 |
OpenAI
|
| 6 |
O3 Mini
| 67.07 |
OpenAI
|
| 7 |
o4-mini (high)
| 65.05 |
OpenAI
|
| 8 |
Gemini 2.5 Pro
| 64.63 |
Google
|
| 9 |
Gemini 2 5 Pro 05 06
| 64.63 |
Google
|
| 10 |
Gemini 2 5 Pro 03 25
| 64.63 |
Google
|
| 11 |
Gpt 5 Mini
| 63.70 |
OpenAI
|
| 12 |
Qwen3 235B 2507 (Reasoning)
| 63.59 |
Alibaba
|
| 13 |
GPT-5 (low)
| 63.43 |
OpenAI
|
| 14 |
gpt-oss-120B (high)
| 61.34 |
OpenAI
|
| 15 |
Claude 4.1 Opus Thinking
| 61.00 |
Anthropic
|
| 16 |
Claude 4 Sonnet Thinking
| 58.83 |
Anthropic
|
| 17 |
DeepSeek R1 0528
| 58.58 |
Unknown
|
| 18 |
Gemini 2.5 Flash (Reasoning)
| 58.43 |
Google
|
| 19 |
Gemini 2 5 Flash Reasoning 04 2025
| 58.43 |
Google
|
| 20 |
Grok 3 mini Reasoning (high)
| 58.01 |
xAI
|
| 21 |
Gemini 2.5 Pro (May' 25)
| 57.98 |
Google
|
| 22 |
GLM-4.5
| 56.08 |
Zhipu AI
|
| 23 |
Glm 4 5 Air
| 56.08 |
Zhipu AI
|
| 24 |
O3 Mini High
| 55.46 |
OpenAI
|
| 25 |
Claude 4 Opus Thinking
| 55.34 |
Anthropic
|
| 26 |
Gpt 5 Nano
| 53.78 |
OpenAI
|
| 27 |
MiniMax M1 80k
| 53.15 |
Unknown
|
| 28 |
Llama Nemotron Super 49B v1.5 (Reasoning)
| 52.40 |
Meta
|
| 29 |
O1
| 51.68 |
Unknown
|
| 30 |
MiniMax M1 40k
| 51.11 |
Unknown
|
| 31 |
Sonar Reasoning Pro
| 50.74 |
Unknown
|
| 32 |
EXAONE 4.0 32B (Reasoning)
| 50.70 |
Unknown
|
| 33 |
o1-preview
| 49.30 |
Unknown
|
| 34 |
gpt-oss-20B (high)
| 49.01 |
OpenAI
|
| 35 |
Claude 4 1 Opus
| 49.00 |
Anthropic
|
| 36 |
Kimi K2
| 48.56 |
Unknown
|
| 37 |
QwQ-32B
| 47.68 |
Unknown
|
| 38 |
Gemini 2 5 Flash
| 47.26 |
Google
|
| 39 |
Gemini 2.5 Flash-Lite (Reasoning)
| 47.25 |
Google
|
| 40 |
Claude 3 7 Sonnet Thinking
| 46.98 |
Anthropic
|
| 41 |
GPT-4.1
| 46.77 |
OpenAI
|
| 42 |
Gpt 4 1 Nano
| 46.77 |
OpenAI
|
| 43 |
Gpt 4 1 Mini
| 46.77 |
OpenAI
|
| 44 |
Claude 4 Opus
| 46.58 |
Anthropic
|
| 45 |
Llama Nemotron Ultra Reasoning
| 46.42 |
Meta
|
| 46 |
Claude 4 Sonnet
| 45.72 |
Anthropic
|
| 47 |
o1-pro
| 45.63 |
Unknown
|
| 48 |
Qwen3 Coder 480B
| 45.19 |
Alibaba
|
| 49 |
Qwen3 Coder 480b A35b Instruct
| 45.19 |
Alibaba
|
| 50 |
Qwen3 32B (Reasoning)
| 44.28 |
Alibaba
|
| 51 |
DeepSeek V3 0324 (Mar '25)
| 43.99 |
Unknown
|
| 52 |
GPT-5 (minimal)
| 43.76 |
OpenAI
|
| 53 |
Solar Pro 2 (Reasoning)
| 43.30 |
Unknown
|
| 54 |
O1 Mini
| 43.25 |
Unknown
|
| 55 |
Nvidia Nemotron Nano 9b V2 Reasoning
| 43.22 |
Unknown
|
| 56 |
GPT-4.5 (Preview)
| 42.41 |
OpenAI
|
| 57 |
Llama 4 Maverick
| 41.73 |
Meta
|
| 58 |
Gemini 2.0 Flash Thinking exp. (Jan '25)
| 41.70 |
Google
|
| 59 |
Deepseek R1 Distill Qwen 32b
| 41.25 |
Alibaba
|
| 60 |
Llama 3.3 Nemotron Super 49B Reasoning
| 40.47 |
Meta
|
| 61 |
Grok 3
| 39.92 |
xAI
|
| 62 |
GPT-4o (March 2025)
| 39.52 |
OpenAI
|
| 63 |
Mistral Medium 3
| 38.59 |
Unknown
|
| 64 |
Gemini 2 0 Pro Experimental 02 05
| 38.43 |
Google
|
| 65 |
DeepSeek R1 Distill Qwen 14B
| 38.22 |
Alibaba
|
| 66 |
Sonar Reasoning
| 38.04 |
Unknown
|
| 67 |
Gemini 2.5 Flash (April '25)
| 37.97 |
Google
|
| 68 |
Mistral Medium 3 1
| 37.97 |
Unknown
|
| 69 |
Gemini 2.0 Flash
| 37.81 |
Google
|
| 70 |
Magistral Medium
| 37.57 |
Unknown
|
| 71 |
Deepseek R1 Distill Llama 70b
| 37.42 |
Meta
|
| 72 |
Claude 3.7 Sonnet
| 37.33 |
Anthropic
|
| 73 |
EXAONE 4.0 32B
| 37.22 |
Unknown
|
| 74 |
Qwen3 4B (Reasoning)
| 36.40 |
Alibaba
|
| 75 |
Reka Flash 3
| 36.26 |
Unknown
|
| 76 |
Magistral Small
| 35.95 |
Unknown
|
| 77 |
Gemini 2 0 Flash Experimental
| 35.51 |
Google
|
| 78 |
Gemini 2.5 Flash-Lite
| 34.92 |
Google
|
| 79 |
Nova Premier
| 34.76 |
Unknown
|
| 80 |
DeepSeek V3 (Dec '24)
| 34.65 |
Unknown
|
| 81 |
Qwen2.5 Max
| 34.33 |
Alibaba
|
| 82 |
Gemini 1.5 Pro (Sep)
| 33.62 |
Google
|
| 83 |
Solar Pro 2 Preview
| 33.62 |
Unknown
|
| 84 |
Claude 3.5 Sonnet (Oct)
| 33.46 |
Anthropic
|
| 85 |
Qwen3 Coder 30B
| 33.45 |
Alibaba
|
| 86 |
Qwen3 Coder 30b A3b Instruct
| 33.45 |
Alibaba
|
| 87 |
Qwen3 235B
| 33.33 |
Alibaba
|
| 88 |
Qwen3 235b A22b Instruct
| 33.33 |
Alibaba
|
| 89 |
Solar Pro 2
| 33.23 |
Unknown
|
| 90 |
Llama 4 Scout
| 33.06 |
Meta
|
| 91 |
Sonar
| 32.36 |
Unknown
|
| 92 |
Mistral Small 3 2
| 31.82 |
Unknown
|
| 93 |
Sonar Pro
| 31.67 |
Unknown
|
| 94 |
Command A
| 31.59 |
Unknown
|
| 95 |
Devstral Medium
| 31.26 |
Unknown
|
| 96 |
Llama 3.3 70B
| 31.21 |
Meta
|
| 97 |
Qwen3 30b A3b Instruct
| 29.91 |
Alibaba
|
| 98 |
Qwen3 14B
| 29.84 |
Alibaba
|
| 99 |
Qwen3 14b Instruct
| 29.84 |
Alibaba
|
| 100 |
Qwen3 32B
| 29.82 |
Alibaba
|
| 101 |
Qwen3 32b Instruct
| 29.82 |
Alibaba
|
| 102 |
Gemini 2 0 Flash Lite Preview
| 29.70 |
Google
|
| 103 |
GPT-4o (Aug '24)
| 29.38 |
OpenAI
|
| 104 |
Llama 3.1 405B
| 29.33 |
Meta
|
| 105 |
Qwen2.5 72B
| 29.25 |
Alibaba
|
| 106 |
Qwen2 5 72b Instruct
| 29.25 |
Alibaba
|
| 107 |
Minimax Text 01
| 29.06 |
Unknown
|
| 108 |
Nova Pro
| 28.83 |
Unknown
|
| 109 |
Claude 3.5 Sonnet (June)
| 28.82 |
Anthropic
|
| 110 |
Tulu3 405b
| 28.74 |
Unknown
|
| 111 |
GPT-4o (ChatGPT)
| 28.64 |
OpenAI
|
| 112 |
Grok 2
| 28.02 |
xAI
|
| 113 |
Grok 2 1212
| 28.02 |
xAI
|
| 114 |
Phi-4
| 27.95 |
Unknown
|
| 115 |
Gemini 1.5 Flash (Sep)
| 27.74 |
Google
|
| 116 |
Gpt 4 Turbo
| 27.52 |
OpenAI
|
| 117 |
Mistral Large 2 (Nov '24)
| 27.01 |
Unknown
|
| 118 |
Llama Nemotron Super 49b V1 5
| 26.94 |
Meta
|
| 119 |
Qwen3 1.7B (Reasoning)
| 26.89 |
Alibaba
|
| 120 |
Mistral Small 3 1
| 26.37 |
Unknown
|
| 121 |
Grok Beta
| 26.25 |
xAI
|
| 122 |
Pixtral Large 2411
| 26.12 |
Unknown
|
| 123 |
Qwen2.5 Instruct 32B
| 26.11 |
Alibaba
|
| 124 |
Qwen3 8B
| 25.39 |
Alibaba
|
| 125 |
Qwen3 8b Instruct
| 25.39 |
Alibaba
|
| 126 |
Gemma 3 27b
| 25.22 |
Unknown
|
| 127 |
Qwen2 5 Coder 32b Instruct
| 24.99 |
Alibaba
|
| 128 |
GPT-4
| 24.64 |
OpenAI
|
| 129 |
Nova Lite
| 24.54 |
Unknown
|
| 130 |
Gpt 4o Mini
| 24.31 |
OpenAI
|
| 131 |
Llama 3.1 70B
| 23.99 |
Meta
|
| 132 |
Gemma 3 12b
| 23.99 |
Unknown
|
| 133 |
Mistral Small 3
| 23.89 |
Unknown
|
| 134 |
DeepSeek-V2.5 (Dec '24)
| 23.88 |
Unknown
|
| 135 |
Qwen3 4b Instruct
| 23.82 |
Alibaba
|
| 136 |
Claude 3 Opus
| 23.69 |
Anthropic
|
| 137 |
Claude 3.5 Haiku
| 23.33 |
Anthropic
|
| 138 |
Gemini 2.0 Flash Thinking exp. (Dec '24)
| 23.31 |
Google
|
| 139 |
DeepSeek-V2.5
| 23.28 |
Unknown
|
| 140 |
Deepseek V2 5 Sep 2024
| 23.28 |
Unknown
|
| 141 |
Devstral Small (May '25)
| 22.67 |
Unknown
|
| 142 |
Mistral Saba
| 22.65 |
Unknown
|
| 143 |
Deepseek R1 Distill Llama 8b
| 22.55 |
Meta
|
| 144 |
Reka Core
| 22.42 |
Unknown
|
| 145 |
Gemini 1.5 Pro (May)
| 22.21 |
Google
|
| 146 |
Gemini 1 5 Pro May 2024
| 22.21 |
Google
|
| 147 |
R1 1776
| 22.19 |
Unknown
|
| 148 |
Qwen2.5 Turbo
| 22.14 |
Alibaba
|
| 149 |
Reka Flash
| 22.11 |
Unknown
|
| 150 |
Solar Mini
| 21.90 |
Unknown
|
| 151 |
Reka Edge
| 21.49 |
Unknown
|
| 152 |
Grok 1
| 21.19 |
xAI
|
| 153 |
Qwen2 72b Instruct
| 21.09 |
Alibaba
|
| 154 |
Devstral Small
| 20.54 |
Unknown
|
| 155 |
Nova Micro
| 20.24 |
Unknown
|
| 156 |
Gemma 2 27B
| 20.11 |
Unknown
|
| 157 |
Gemini 1.5 Flash-8B
| 19.24 |
Google
|
| 158 |
Llama 3.1 8B
| 19.01 |
Meta
|
| 159 |
Gemma 3n E4B
| 18.48 |
Unknown
|
| 160 |
Deephermes 3 Mistral 24b Preview
| 18.37 |
Unknown
|
| 161 |
Jamba 1.7 Large
| 17.91 |
Unknown
|
| 162 |
Jamba 1.5 Large
| 17.66 |
Unknown
|
| 163 |
Granite 3.3 8B
| 17.63 |
Unknown
|
| 164 |
Granite 3 3 8b Instruct
| 17.63 |
Unknown
|
| 165 |
Hermes 3 Llama 3 1 70b
| 17.48 |
Meta
|
| 166 |
Deepseek Coder V2
| 17.33 |
Unknown
|
| 167 |
Jamba 1.6 Large
| 17.13 |
Unknown
|
| 168 |
Gemini 1.5 Flash (May)
| 16.78 |
Google
|
| 169 |
Gemini 1 5 Flash May 2024
| 16.78 |
Google
|
| 170 |
Yi-Large
| 16.17 |
Unknown
|
| 171 |
Claude 3 Sonnet
| 16.05 |
Anthropic
|
| 172 |
Mistral Small (Sep '24)
| 15.69 |
Unknown
|
| 173 |
Gemini 1 0 Ultra
| 15.57 |
Google
|
| 174 |
Phi-4 Multimodal
| 15.15 |
Unknown
|
| 175 |
Qwen2 5 Coder 7b Instruct
| 14.92 |
Alibaba
|
| 176 |
Jamba Instruct
| 14.51 |
Unknown
|
| 177 |
Mixtral 8x22B
| 14.37 |
Unknown
|
| 178 |
Phi 4 Mini
| 14.16 |
Unknown
|
| 179 |
Llama 2 Chat 7B
| 13.94 |
Meta
|
| 180 |
Gemma 3 4b
| 13.52 |
Unknown
|
| 181 |
Claude 2.1
| 12.22 |
Anthropic
|
| 182 |
Claude 3 Haiku
| 12.11 |
Anthropic
|
| 183 |
Qwen3 0.6B (Reasoning)
| 11.28 |
Alibaba
|
| 184 |
Claude 2.0
| 11.07 |
Anthropic
|
| 185 |
DeepSeek-V2
| 11.07 |
Unknown
|
| 186 |
Mistral Medium
| 10.86 |
Unknown
|
| 187 |
Gemma 3n E2B
| 10.44 |
Unknown
|
| 188 |
Ministral 8b
| 10.37 |
Unknown
|
| 189 |
Gemma 2 9B
| 10.23 |
Unknown
|
| 190 |
Phi-3 Mini
| 10.13 |
Unknown
|
| 191 |
Qwen Chat 72b
| 10.02 |
Alibaba
|
| 192 |
Arctic Instruct
| 10.02 |
Unknown
|
| 193 |
Lfm 40b
| 9.74 |
Unknown
|
| 194 |
Command R Plus
| 9.49 |
Unknown
|
| 195 |
Llama 3 8B
| 9.47 |
Meta
|
| 196 |
PALM-2
| 8.98 |
Unknown
|
| 197 |
Gemini 1 0 Pro
| 8.55 |
Google
|
| 198 |
DeepSeek Coder V2 Lite
| 8.45 |
Unknown
|
| 199 |
Aya Expanse 32b
| 7.99 |
Unknown
|
| 200 |
Llama 2 Chat 70b
| 7.93 |
Meta
|
| 201 |
Llama 2 Chat 13B
| 7.87 |
Meta
|
| 202 |
OpenChat 3.5
| 7.69 |
Unknown
|
| 203 |
Dbrx
| 7.67 |
Unknown
|
| 204 |
Ministral 3b
| 7.54 |
Unknown
|
| 205 |
Mistral Nemo
| 7.52 |
Unknown
|
| 206 |
Llama 3.2 3B
| 7.43 |
Meta
|
| 207 |
Deepseek R1 Distill Qwen 1 5b
| 7.07 |
Alibaba
|
| 208 |
Jamba 1.5 Mini
| 6.27 |
Unknown
|
| 209 |
Jamba 1 7 Mini
| 5.75 |
Unknown
|
| 210 |
Jamba 1.6 Mini
| 5.49 |
Unknown
|
| 211 |
Mixtral 8x7b Instruct
| 4.78 |
Unknown
|
| 212 |
Qwen3 0.6B
| 4.49 |
Alibaba
|
| 213 |
DeepHermes 3 - Llama-3.1 8B
| 3.99 |
Meta
|
| 214 |
Deephermes 3 Llama 3 1 8b Preview
| 3.99 |
Meta
|
| 215 |
Aya Expanse 8B
| 3.79 |
Unknown
|
| 216 |
Command-R+
| 2.53 |
Unknown
|
| 217 |
Claude Instant
| 1.67 |
Anthropic
|
| 218 |
Qwen Chat 14b
| 1.67 |
Alibaba
|
| 219 |
Codestral-Mamba
| 1.56 |
Unknown
|
| 220 |
Gemma 3 1b
| 1.06 |
Unknown
|
| 221 |
Llama 3.2 1B
| 1.00 |
Meta
|
| 222 |
Mistral 7b Instruct
| 1.00 |
Unknown
|
| 223 |
Llama 65b
| 1.00 |
Meta
|
| 224 |
GPT-4o Realtime (Dec '24)
| N/A |
OpenAI
|
| 225 |
Grok 3 mini Reasoning (low)
| N/A |
xAI
|
| 226 |
Gpt 3 5 Turbo 0613
| N/A |
OpenAI
|