| 1 |
GPT-5 (high)
| 97.53 |
OpenAI
|
| 2 |
Grok 4
| 96.67 |
xAI
|
| 3 |
o4-mini (high)
| 96.43 |
OpenAI
|
| 4 |
Grok 3 mini Reasoning (high)
| 96.27 |
xAI
|
| 5 |
Qwen3 235B 2507 (Reasoning)
| 96.20 |
Alibaba
|
| 6 |
GPT-5 (medium)
| 95.40 |
OpenAI
|
| 7 |
o3
| 94.77 |
OpenAI
|
| 8 |
O3 Pro
| 94.77 |
OpenAI
|
| 9 |
DeepSeek R1 0528
| 93.80 |
Unknown
|
| 10 |
Gemini 2.5 Pro
| 92.70 |
Google
|
| 11 |
Gemini 2 5 Pro 05 06
| 92.70 |
Google
|
| 12 |
Gemini 2 5 Pro 03 25
| 92.70 |
Google
|
| 13 |
GLM-4.5
| 92.60 |
Zhipu AI
|
| 14 |
O3 Mini High
| 92.23 |
OpenAI
|
| 15 |
Llama Nemotron Super 49B v1.5 (Reasoning)
| 92.17 |
Meta
|
| 16 |
Gemini 2.5 Pro (May' 25)
| 91.47 |
Google
|
| 17 |
MiniMax M1 80k
| 91.33 |
Unknown
|
| 18 |
EXAONE 4.0 32B (Reasoning)
| 91.00 |
Unknown
|
| 19 |
GPT-5 (low)
| 90.87 |
OpenAI
|
| 20 |
Gemini 2.5 Flash (Reasoning)
| 90.23 |
Google
|
| 21 |
Gemini 2 5 Flash Reasoning 04 2025
| 90.23 |
Google
|
| 22 |
MiniMax M1 40k
| 89.27 |
Unknown
|
| 23 |
Qwen3 32B (Reasoning)
| 88.37 |
Alibaba
|
| 24 |
Claude 4 Sonnet Thinking
| 88.20 |
Anthropic
|
| 25 |
Sonar Reasoning Pro
| 87.37 |
Unknown
|
| 26 |
O3 Mini
| 87.17 |
OpenAI
|
| 27 |
Claude 4 Opus Thinking
| 86.93 |
Anthropic
|
| 28 |
QwQ-32B
| 86.87 |
Unknown
|
| 29 |
Llama Nemotron Ultra Reasoning
| 84.93 |
Meta
|
| 30 |
O1
| 84.67 |
Unknown
|
| 31 |
Sonar Reasoning
| 84.57 |
Unknown
|
| 32 |
Magistral Small
| 83.80 |
Unknown
|
| 33 |
Gemini 2.5 Flash-Lite (Reasoning)
| 83.60 |
Google
|
| 34 |
Kimi K2
| 83.23 |
Unknown
|
| 35 |
Solar Pro 2 (Reasoning)
| 82.83 |
Unknown
|
| 36 |
Glm 4 5 Air
| 81.93 |
Zhipu AI
|
| 37 |
Deepseek R1 Distill Qwen 32b
| 81.37 |
Alibaba
|
| 38 |
Magistral Medium
| 80.87 |
Unknown
|
| 39 |
DeepSeek R1 Distill Qwen 14B
| 80.77 |
Alibaba
|
| 40 |
Deepseek R1 Distill Llama 70b
| 80.23 |
Meta
|
| 41 |
Qwen3 4B (Reasoning)
| 79.50 |
Alibaba
|
| 42 |
O1 Mini
| 77.37 |
Unknown
|
| 43 |
Llama 3.3 Nemotron Super 49B Reasoning
| 77.10 |
Meta
|
| 44 |
Claude 4 Opus
| 75.20 |
Anthropic
|
| 45 |
DeepSeek V3 0324 (Mar '25)
| 73.10 |
Unknown
|
| 46 |
Gemini 2.0 Flash Thinking exp. (Jan '25)
| 72.20 |
Google
|
| 47 |
Claude 3 7 Sonnet Thinking
| 71.67 |
Anthropic
|
| 48 |
Gemini 2 5 Flash
| 71.60 |
Google
|
| 49 |
Gemini 2.5 Flash-Lite
| 71.30 |
Google
|
| 50 |
Qwen3 Coder 480B
| 70.93 |
Alibaba
|
| 51 |
Qwen3 Coder 480b A35b Instruct
| 70.93 |
Alibaba
|
| 52 |
EXAONE 4.0 32B
| 70.47 |
Unknown
|
| 53 |
Qwen3 1.7B (Reasoning)
| 70.20 |
Alibaba
|
| 54 |
Reka Flash 3
| 70.13 |
Unknown
|
| 55 |
Gemini 2.5 Flash (April '25)
| 67.97 |
Google
|
| 56 |
GPT-4.1
| 67.47 |
OpenAI
|
| 57 |
Gpt 4 1 Mini
| 67.47 |
OpenAI
|
| 58 |
Mistral Medium 3
| 67.33 |
Unknown
|
| 59 |
Claude 4 Sonnet
| 67.03 |
Anthropic
|
| 60 |
Sonar
| 65.17 |
Unknown
|
| 61 |
Solar Pro 2
| 64.77 |
Unknown
|
| 62 |
Gemini 2 0 Pro Experimental 02 05
| 64.13 |
Google
|
| 63 |
Llama 4 Maverick
| 63.93 |
Meta
|
| 64 |
Gemini 2.0 Flash
| 63.00 |
Google
|
| 65 |
Qwen3 235B
| 61.43 |
Alibaba
|
| 66 |
Qwen3 235b A22b Instruct
| 61.43 |
Alibaba
|
| 67 |
GPT-5 (minimal)
| 61.40 |
OpenAI
|
| 68 |
GPT-4o (March 2025)
| 60.97 |
OpenAI
|
| 69 |
Gemini 2 0 Flash Experimental
| 60.53 |
Google
|
| 70 |
Mistral Small 3 2
| 60.30 |
Unknown
|
| 71 |
Grok 3
| 60.00 |
xAI
|
| 72 |
Qwen3 Coder 30B
| 59.50 |
Alibaba
|
| 73 |
Qwen3 Coder 30b A3b Instruct
| 59.50 |
Alibaba
|
| 74 |
Deepseek R1 Distill Llama 8b
| 59.30 |
Meta
|
| 75 |
Gemini 2 0 Flash Lite Preview
| 58.83 |
Google
|
| 76 |
Qwen3 32B
| 58.60 |
Alibaba
|
| 77 |
Qwen3 32b Instruct
| 58.60 |
Alibaba
|
| 78 |
Solar Pro 2 Preview
| 58.37 |
Unknown
|
| 79 |
Qwen3 14B
| 57.57 |
Alibaba
|
| 80 |
Qwen3 14b Instruct
| 57.57 |
Alibaba
|
| 81 |
DeepSeek V3 (Dec '24)
| 57.00 |
Unknown
|
| 82 |
Gemma 3 27b
| 56.80 |
Unknown
|
| 83 |
Llama 4 Scout
| 56.37 |
Meta
|
| 84 |
Qwen3 30b A3b Instruct
| 56.13 |
Alibaba
|
| 85 |
Gemini 1.5 Pro (Sep)
| 55.30 |
Google
|
| 86 |
Gpt 4 1 Nano
| 54.23 |
OpenAI
|
| 87 |
Claude 3.7 Sonnet
| 53.67 |
Anthropic
|
| 88 |
Gemma 3 12b
| 53.67 |
Unknown
|
| 89 |
Llama 3.3 70B
| 53.63 |
Meta
|
| 90 |
Qwen3 8B
| 53.57 |
Alibaba
|
| 91 |
Qwen3 8b Instruct
| 53.57 |
Alibaba
|
| 92 |
Qwen2.5 Max
| 53.40 |
Alibaba
|
| 93 |
Qwen3 4b Instruct
| 52.80 |
Alibaba
|
| 94 |
Sonar Pro
| 51.73 |
Unknown
|
| 95 |
Qwen2.5 72B
| 50.90 |
Alibaba
|
| 96 |
Qwen2 5 72b Instruct
| 50.90 |
Alibaba
|
| 97 |
Nova Premier
| 50.47 |
Unknown
|
| 98 |
Gemini 1.5 Flash (Sep)
| 50.37 |
Google
|
| 99 |
Phi-4
| 47.67 |
Unknown
|
| 100 |
Claude 3.5 Sonnet (Oct)
| 46.40 |
Anthropic
|
| 101 |
Qwen2.5 Turbo
| 46.27 |
Alibaba
|
| 102 |
Llama 3.1 405B
| 45.83 |
Meta
|
| 103 |
Qwen2.5 Instruct 32B
| 45.77 |
Alibaba
|
| 104 |
Command A
| 45.77 |
Unknown
|
| 105 |
GPT-4o (Aug '24)
| 45.57 |
OpenAI
|
| 106 |
Grok 2
| 45.57 |
xAI
|
| 107 |
Grok 2 1212
| 45.57 |
xAI
|
| 108 |
Tulu3 405b
| 45.57 |
Unknown
|
| 109 |
Gemma 3n E4B
| 45.37 |
Unknown
|
| 110 |
Llama Nemotron Super 49b V1 5
| 45.33 |
Meta
|
| 111 |
Gpt 4o Mini
| 45.27 |
OpenAI
|
| 112 |
GPT-4o (ChatGPT)
| 45.03 |
OpenAI
|
| 113 |
Nova Pro
| 44.63 |
Unknown
|
| 114 |
Qwen2 5 Coder 32b Instruct
| 44.33 |
Alibaba
|
| 115 |
Gpt 4 Turbo
| 44.33 |
OpenAI
|
| 116 |
Minimax Text 01
| 44.17 |
Unknown
|
| 117 |
Nova Lite
| 43.57 |
Unknown
|
| 118 |
Deepseek R1 Distill Qwen 1 5b
| 43.20 |
Alibaba
|
| 119 |
Qwen3 0.6B (Reasoning)
| 42.50 |
Alibaba
|
| 120 |
Qwen2 72b Instruct
| 42.40 |
Alibaba
|
| 121 |
Mistral Large 2 (Nov '24)
| 42.30 |
Unknown
|
| 122 |
Grok Beta
| 42.00 |
xAI
|
| 123 |
Gemma 2 27B
| 41.87 |
Unknown
|
| 124 |
Gemma 3 4b
| 41.47 |
Unknown
|
| 125 |
Llama 3.1 70B
| 41.13 |
Meta
|
| 126 |
Mistral Saba
| 40.33 |
Unknown
|
| 127 |
Mistral Small 3 1
| 40.00 |
Unknown
|
| 128 |
Mistral Small 3
| 39.77 |
Unknown
|
| 129 |
Claude 3.5 Sonnet (June)
| 39.60 |
Anthropic
|
| 130 |
Phi-4 Multimodal
| 39.30 |
Unknown
|
| 131 |
Pixtral Large 2411
| 39.20 |
Unknown
|
| 132 |
Nova Micro
| 39.17 |
Unknown
|
| 133 |
Gemma 3n E2B
| 39.07 |
Unknown
|
| 134 |
Devstral Medium
| 38.70 |
Unknown
|
| 135 |
Claude 3.5 Haiku
| 37.70 |
Anthropic
|
| 136 |
Gemini 1.5 Pro (May)
| 37.67 |
Google
|
| 137 |
Gemini 1 5 Pro May 2024
| 37.67 |
Google
|
| 138 |
Devstral Small (May '25)
| 37.53 |
Unknown
|
| 139 |
Phi 4 Mini
| 36.30 |
Unknown
|
| 140 |
Gemini 1.5 Flash-8B
| 36.13 |
Google
|
| 141 |
Qwen2 5 Coder 7b Instruct
| 35.67 |
Alibaba
|
| 142 |
Granite 3.3 8B
| 35.57 |
Unknown
|
| 143 |
Granite 3 3 8b Instruct
| 35.57 |
Unknown
|
| 144 |
Claude 3 Opus
| 33.70 |
Anthropic
|
| 145 |
Jamba 1.7 Large
| 32.83 |
Unknown
|
| 146 |
Jamba 1.5 Large
| 32.63 |
Unknown
|
| 147 |
Gemini 1.5 Flash (May)
| 32.37 |
Google
|
| 148 |
Gemini 1 5 Flash May 2024
| 32.37 |
Google
|
| 149 |
Deephermes 3 Mistral 24b Preview
| 32.07 |
Unknown
|
| 150 |
Devstral Small
| 31.90 |
Unknown
|
| 151 |
Yi-Large
| 31.43 |
Unknown
|
| 152 |
Jamba 1.6 Large
| 31.33 |
Unknown
|
| 153 |
Mistral Small (Sep '24)
| 31.30 |
Unknown
|
| 154 |
Ministral 8b
| 30.37 |
Unknown
|
| 155 |
Llama 3.1 8B
| 29.80 |
Meta
|
| 156 |
Hermes 3 Llama 3 1 70b
| 28.07 |
Meta
|
| 157 |
Llama 3.2 3B
| 27.77 |
Meta
|
| 158 |
Mixtral 8x22B
| 27.23 |
Unknown
|
| 159 |
Qwen3 0.6B
| 26.87 |
Alibaba
|
| 160 |
Ministral 3b
| 26.83 |
Unknown
|
| 161 |
Gemma 2 9B
| 25.87 |
Unknown
|
| 162 |
Lfm 40b
| 25.17 |
Unknown
|
| 163 |
Llama 3 8B
| 24.97 |
Meta
|
| 164 |
Phi-3 Mini
| 24.83 |
Unknown
|
| 165 |
Gemma 3 1b
| 24.20 |
Unknown
|
| 166 |
Claude 3 Sonnet
| 23.03 |
Anthropic
|
| 167 |
Aya Expanse 32b
| 22.43 |
Unknown
|
| 168 |
Mistral Medium
| 22.07 |
Unknown
|
| 169 |
Gemini 1 0 Pro
| 20.47 |
Google
|
| 170 |
Claude 2.1
| 20.37 |
Anthropic
|
| 171 |
Claude 3 Haiku
| 20.20 |
Anthropic
|
| 172 |
Command R Plus
| 20.10 |
Unknown
|
| 173 |
Mistral Nemo
| 19.90 |
Unknown
|
| 174 |
Jamba 1.5 Mini
| 18.33 |
Unknown
|
| 175 |
Llama 2 Chat 13B
| 17.27 |
Meta
|
| 176 |
Llama 2 Chat 70b
| 16.13 |
Meta
|
| 177 |
Aya Expanse 8B
| 16.03 |
Unknown
|
| 178 |
Dbrx
| 15.47 |
Unknown
|
| 179 |
OpenChat 3.5
| 15.37 |
Unknown
|
| 180 |
Mixtral 8x7b Instruct
| 14.97 |
Unknown
|
| 181 |
Jamba 1.6 Mini
| 14.50 |
Unknown
|
| 182 |
Jamba 1 7 Mini
| 13.57 |
Unknown
|
| 183 |
Claude Instant
| 13.20 |
Anthropic
|
| 184 |
Codestral-Mamba
| 12.00 |
Unknown
|
| 185 |
DeepHermes 3 - Llama-3.1 8B
| 10.90 |
Meta
|
| 186 |
Deephermes 3 Llama 3 1 8b Preview
| 10.90 |
Meta
|
| 187 |
Command-R+
| 7.63 |
Unknown
|
| 188 |
Llama 3.2 1B
| 7.00 |
Meta
|
| 189 |
Mistral 7b Instruct
| 6.07 |
Unknown
|
| 190 |
Llama 2 Chat 7B
| 2.93 |
Meta
|