| 1 |
Grok 4
| 63.81 |
xAI
|
| 2 |
o4-mini (high)
| 63.48 |
OpenAI
|
| 3 |
Gemini 2.5 Pro
| 61.46 |
Google
|
| 4 |
Gemini 2 5 Pro 05 06
| 61.46 |
Google
|
| 5 |
Gemini 2 5 Pro 03 25
| 61.46 |
Google
|
| 6 |
Qwen3 235B 2507 (Reasoning)
| 60.60 |
Alibaba
|
| 7 |
o3
| 59.69 |
OpenAI
|
| 8 |
O3 Pro
| 59.69 |
OpenAI
|
| 9 |
Gemini 2.5 Pro (May' 25)
| 59.29 |
Google
|
| 10 |
DeepSeek R1 0528
| 58.66 |
Unknown
|
| 11 |
GPT-5 (low)
| 57.02 |
OpenAI
|
| 12 |
O3 Mini High
| 56.61 |
OpenAI
|
| 13 |
O3 Mini
| 55.84 |
OpenAI
|
| 14 |
GPT-5 (medium)
| 55.36 |
OpenAI
|
| 15 |
Grok 3 mini Reasoning (high)
| 55.13 |
xAI
|
| 16 |
Gpt 5 Mini
| 54.88 |
OpenAI
|
| 17 |
GPT-5 (high)
| 54.86 |
OpenAI
|
| 18 |
EXAONE 4.0 32B (Reasoning)
| 54.54 |
Unknown
|
| 19 |
Gemini 2.5 Flash (Reasoning)
| 54.44 |
Google
|
| 20 |
Gemini 2 5 Flash Reasoning 04 2025
| 54.44 |
Google
|
| 21 |
GLM-4.5
| 54.30 |
Zhipu AI
|
| 22 |
MiniMax M1 80k
| 54.25 |
Unknown
|
| 23 |
Llama Nemotron Super 49B v1.5 (Reasoning)
| 54.24 |
Meta
|
| 24 |
gpt-oss-20B (high)
| 53.74 |
OpenAI
|
| 25 |
Claude 4 Sonnet Thinking
| 52.77 |
Anthropic
|
| 26 |
O1
| 51.85 |
Unknown
|
| 27 |
MiniMax M1 40k
| 51.78 |
Unknown
|
| 28 |
Claude 4 Opus Thinking
| 51.71 |
Anthropic
|
| 29 |
gpt-oss-120B (high)
| 50.07 |
OpenAI
|
| 30 |
Glm 4 5 Air
| 49.46 |
Zhipu AI
|
| 31 |
Llama Nemotron Ultra Reasoning
| 49.42 |
Meta
|
| 32 |
QwQ-32B
| 49.42 |
Unknown
|
| 33 |
Claude 4 Opus
| 47.52 |
Anthropic
|
| 34 |
GPT-5 (minimal)
| 47.27 |
OpenAI
|
| 35 |
Qwen3 Coder 480B
| 47.20 |
Alibaba
|
| 36 |
Qwen3 Coder 480b A35b Instruct
| 47.20 |
Alibaba
|
| 37 |
Nvidia Nemotron Nano 9b V2 Reasoning
| 47.19 |
Unknown
|
| 38 |
Gpt 5 Nano
| 47.06 |
OpenAI
|
| 39 |
Solar Pro 2 (Reasoning)
| 45.90 |
Unknown
|
| 40 |
Qwen3 32B (Reasoning)
| 45.01 |
Alibaba
|
| 41 |
O1 Mini
| 44.93 |
Unknown
|
| 42 |
Claude 3 7 Sonnet Thinking
| 43.79 |
Anthropic
|
| 43 |
GPT-4.1
| 41.90 |
OpenAI
|
| 44 |
Gpt 4 1 Mini
| 41.90 |
OpenAI
|
| 45 |
Magistral Medium
| 41.19 |
Unknown
|
| 46 |
Claude 4 Sonnet
| 41.07 |
Anthropic
|
| 47 |
Grok 3
| 39.67 |
xAI
|
| 48 |
GPT-4o (March 2025)
| 39.56 |
OpenAI
|
| 49 |
Gemini 2.5 Flash-Lite (Reasoning)
| 39.29 |
Google
|
| 50 |
Gemini 2 5 Flash
| 39.29 |
Google
|
| 51 |
Claude 3.7 Sonnet
| 38.49 |
Anthropic
|
| 52 |
DeepSeek V3 0324 (Mar '25)
| 38.15 |
Unknown
|
| 53 |
Magistral Small
| 37.75 |
Unknown
|
| 54 |
Claude 3.5 Sonnet (Oct)
| 37.33 |
Anthropic
|
| 55 |
Mistral Medium 3 1
| 37.22 |
Unknown
|
| 56 |
Mistral Medium 3
| 36.55 |
Unknown
|
| 57 |
Kimi K2
| 36.52 |
Unknown
|
| 58 |
Llama 4 Maverick
| 36.39 |
Meta
|
| 59 |
EXAONE 4.0 32B
| 36.21 |
Unknown
|
| 60 |
DeepSeek V3 (Dec '24)
| 35.64 |
Unknown
|
| 61 |
Reka Flash 3
| 35.11 |
Unknown
|
| 62 |
Qwen2.5 Max
| 34.78 |
Alibaba
|
| 63 |
Qwen3 Coder 30B
| 34.05 |
Alibaba
|
| 64 |
Qwen3 Coder 30b A3b Instruct
| 34.05 |
Alibaba
|
| 65 |
Solar Pro 2
| 33.60 |
Unknown
|
| 66 |
Gemini 2.0 Flash
| 33.39 |
Google
|
| 67 |
Gemini 2 0 Pro Experimental 02 05
| 32.98 |
Google
|
| 68 |
Solar Pro 2 Preview
| 32.86 |
Unknown
|
| 69 |
Gemini 2.0 Flash Thinking exp. (Jan '25)
| 32.46 |
Google
|
| 70 |
Deepseek R1 Distill Qwen 32b
| 32.28 |
Alibaba
|
| 71 |
Qwen3 235B
| 32.07 |
Alibaba
|
| 72 |
Qwen3 235b A22b Instruct
| 32.07 |
Alibaba
|
| 73 |
Gemini 2.5 Flash (April '25)
| 31.95 |
Google
|
| 74 |
Devstral Medium
| 31.52 |
Unknown
|
| 75 |
DeepSeek R1 Distill Qwen 14B
| 30.72 |
Alibaba
|
| 76 |
Gemini 1.5 Pro (Sep)
| 30.58 |
Google
|
| 77 |
Gpt 4 Turbo
| 30.52 |
OpenAI
|
| 78 |
Llama 3.1 405B
| 30.17 |
Meta
|
| 79 |
Nova Premier
| 29.82 |
Unknown
|
| 80 |
Tulu3 405b
| 29.65 |
Unknown
|
| 81 |
Claude 3.5 Haiku
| 29.43 |
Anthropic
|
| 82 |
Qwen3 30b A3b Instruct
| 29.28 |
Alibaba
|
| 83 |
Gpt 4 1 Nano
| 29.26 |
OpenAI
|
| 84 |
Mistral Large 2 (Nov '24)
| 29.24 |
Unknown
|
| 85 |
Deepseek R1 Distill Llama 70b
| 28.91 |
Meta
|
| 86 |
Gemini 2.5 Flash-Lite
| 28.85 |
Google
|
| 87 |
Command A
| 28.40 |
Unknown
|
| 88 |
Qwen3 32B
| 28.40 |
Alibaba
|
| 89 |
Qwen3 32b Instruct
| 28.40 |
Alibaba
|
| 90 |
Qwen2 5 Coder 32b Instruct
| 28.30 |
Alibaba
|
| 91 |
Llama 3.3 Nemotron Super 49B Reasoning
| 27.98 |
Meta
|
| 92 |
Pixtral Large 2411
| 27.65 |
Unknown
|
| 93 |
Grok 2
| 27.57 |
xAI
|
| 94 |
Grok 2 1212
| 27.57 |
xAI
|
| 95 |
Gemini 2 0 Flash Experimental
| 27.49 |
Google
|
| 96 |
Llama 3.3 70B
| 27.41 |
Meta
|
| 97 |
Qwen3 14B
| 27.27 |
Alibaba
|
| 98 |
Qwen3 14b Instruct
| 27.27 |
Alibaba
|
| 99 |
Qwen2.5 72B
| 27.18 |
Alibaba
|
| 100 |
Qwen2 5 72b Instruct
| 27.18 |
Alibaba
|
| 101 |
Gemini 1.5 Flash (Sep)
| 27.02 |
Google
|
| 102 |
Mistral Small 3 2
| 26.95 |
Unknown
|
| 103 |
Grok Beta
| 26.82 |
xAI
|
| 104 |
Llama Nemotron Super 49b V1 5
| 26.42 |
Meta
|
| 105 |
Sonar
| 26.22 |
Unknown
|
| 106 |
Gemini 1.5 Pro (May)
| 25.90 |
Google
|
| 107 |
Gemini 1 5 Pro May 2024
| 25.90 |
Google
|
| 108 |
Claude 3 Opus
| 25.60 |
Anthropic
|
| 109 |
Devstral Small (May '25)
| 25.18 |
Unknown
|
| 110 |
Sonar Pro
| 25.04 |
Unknown
|
| 111 |
Qwen3 4B (Reasoning)
| 24.96 |
Alibaba
|
| 112 |
Llama 3.1 70B
| 24.96 |
Meta
|
| 113 |
Devstral Small
| 24.85 |
Unknown
|
| 114 |
Minimax Text 01
| 24.83 |
Unknown
|
| 115 |
Phi-4
| 24.56 |
Unknown
|
| 116 |
Mistral Small 3
| 24.40 |
Unknown
|
| 117 |
Qwen2.5 Instruct 32B
| 23.84 |
Alibaba
|
| 118 |
Mistral Small 3 1
| 23.83 |
Unknown
|
| 119 |
Llama 4 Scout
| 23.48 |
Meta
|
| 120 |
Gpt 4o Mini
| 23.15 |
OpenAI
|
| 121 |
Gemini 1.5 Flash-8B
| 22.30 |
Google
|
| 122 |
Nova Pro
| 22.06 |
Unknown
|
| 123 |
Gemini 2 0 Flash Lite Preview
| 21.27 |
Google
|
| 124 |
Deephermes 3 Mistral 24b Preview
| 21.14 |
Unknown
|
| 125 |
Hermes 3 Llama 3 1 70b
| 20.99 |
Meta
|
| 126 |
Gemma 2 27B
| 20.22 |
Unknown
|
| 127 |
Claude 3 Sonnet
| 20.19 |
Anthropic
|
| 128 |
Qwen3 4b Instruct
| 19.97 |
Alibaba
|
| 129 |
Qwen2 72b Instruct
| 19.39 |
Alibaba
|
| 130 |
Claude 2.1
| 18.94 |
Anthropic
|
| 131 |
Gemini 1.5 Flash (May)
| 18.82 |
Google
|
| 132 |
Gemini 1 5 Flash May 2024
| 18.82 |
Google
|
| 133 |
Qwen3 8B
| 18.50 |
Alibaba
|
| 134 |
Qwen3 8b Instruct
| 18.50 |
Alibaba
|
| 135 |
Jamba 1.7 Large
| 18.42 |
Unknown
|
| 136 |
Claude 2.0
| 18.29 |
Anthropic
|
| 137 |
Jamba 1.6 Large
| 17.83 |
Unknown
|
| 138 |
Deepseek R1 Distill Llama 8b
| 17.57 |
Meta
|
| 139 |
Qwen3 1.7B (Reasoning)
| 17.54 |
Alibaba
|
| 140 |
Gemma 3 27b
| 17.42 |
Unknown
|
| 141 |
Claude 3 Haiku
| 17.04 |
Anthropic
|
| 142 |
Mixtral 8x22B
| 16.78 |
Unknown
|
| 143 |
Qwen2.5 Turbo
| 15.79 |
Alibaba
|
| 144 |
Gemma 3 12b
| 15.51 |
Unknown
|
| 145 |
Jamba 1.5 Large
| 15.30 |
Unknown
|
| 146 |
Nova Lite
| 15.30 |
Unknown
|
| 147 |
Mistral Small (Sep '24)
| 14.85 |
Unknown
|
| 148 |
DeepSeek Coder V2 Lite
| 14.83 |
Unknown
|
| 149 |
Yi-Large
| 14.77 |
Unknown
|
| 150 |
Aya Expanse 32b
| 14.29 |
Unknown
|
| 151 |
Qwen2 5 Coder 7b Instruct
| 13.70 |
Alibaba
|
| 152 |
Llama 3.1 8B
| 12.42 |
Meta
|
| 153 |
Phi-4 Multimodal
| 12.06 |
Unknown
|
| 154 |
Codestral-Mamba
| 12.05 |
Unknown
|
| 155 |
Phi 4 Mini
| 11.68 |
Unknown
|
| 156 |
Nova Micro
| 11.67 |
Unknown
|
| 157 |
Gemini 1 0 Pro
| 11.67 |
Google
|
| 158 |
Command R Plus
| 11.63 |
Unknown
|
| 159 |
Granite 3.3 8B
| 11.38 |
Unknown
|
| 160 |
Granite 3 3 8b Instruct
| 11.38 |
Unknown
|
| 161 |
Gemma 3n E4B
| 11.35 |
Unknown
|
| 162 |
Ministral 8b
| 11.34 |
Unknown
|
| 163 |
Mistral Medium
| 10.88 |
Unknown
|
| 164 |
Llama 2 Chat 13B
| 10.82 |
Meta
|
| 165 |
Llama 3 8B
| 10.78 |
Meta
|
| 166 |
Dbrx
| 10.56 |
Unknown
|
| 167 |
Phi-3 Mini
| 10.33 |
Unknown
|
| 168 |
Gemma 3 4b
| 9.25 |
Unknown
|
| 169 |
DeepHermes 3 - Llama-3.1 8B
| 8.80 |
Meta
|
| 170 |
Deephermes 3 Llama 3 1 8b Preview
| 8.80 |
Meta
|
| 171 |
Jamba 1.6 Mini
| 8.58 |
Unknown
|
| 172 |
Lfm 40b
| 8.34 |
Unknown
|
| 173 |
Ministral 3b
| 8.13 |
Unknown
|
| 174 |
Mistral Nemo
| 8.07 |
Unknown
|
| 175 |
Jamba 1 7 Mini
| 7.70 |
Unknown
|
| 176 |
Qwen3 0.6B (Reasoning)
| 7.42 |
Alibaba
|
| 177 |
Aya Expanse 8B
| 7.40 |
Unknown
|
| 178 |
Gemma 3n E2B
| 7.37 |
Unknown
|
| 179 |
Jamba 1.5 Mini
| 7.11 |
Unknown
|
| 180 |
Deepseek R1 Distill Qwen 1 5b
| 6.80 |
Alibaba
|
| 181 |
Llama 3.2 3B
| 6.73 |
Meta
|
| 182 |
Gemma 2 9B
| 6.64 |
Unknown
|
| 183 |
Command-R+
| 6.56 |
Unknown
|
| 184 |
Jamba Instruct
| 6.44 |
Unknown
|
| 185 |
Qwen3 0.6B
| 5.68 |
Alibaba
|
| 186 |
Mixtral 8x7b Instruct
| 4.67 |
Unknown
|
| 187 |
Mistral 7b Instruct
| 3.49 |
Unknown
|
| 188 |
Llama 3.2 1B
| 1.82 |
Meta
|
| 189 |
Gemma 3 1b
| 1.19 |
Unknown
|
| 190 |
Llama 2 Chat 7B
| 0.11 |
Meta
|