|
171 | 171 | "cost_per_1m_in_cached": 3.75, |
172 | 172 | "cost_per_1m_out_cached": 0.3, |
173 | 173 | "context_window": 200000, |
174 | | - "default_max_tokens": 32000, |
| 174 | + "default_max_tokens": 64000, |
175 | 175 | "can_reason": true, |
176 | 176 | "has_reasoning_efforts": true, |
177 | 177 | "supports_attachments": true |
|
322 | 322 | { |
323 | 323 | "id": "deepseek/deepseek-chat", |
324 | 324 | "name": "DeepSeek: DeepSeek V3", |
325 | | - "cost_per_1m_in": 0.8999999999999999, |
326 | | - "cost_per_1m_out": 0.8999999999999999, |
| 325 | + "cost_per_1m_in": 0.39999999999999997, |
| 326 | + "cost_per_1m_out": 1.3, |
327 | 327 | "cost_per_1m_in_cached": 0, |
328 | 328 | "cost_per_1m_out_cached": 0, |
329 | | - "context_window": 131072, |
330 | | - "default_max_tokens": 13107, |
| 329 | + "context_window": 64000, |
| 330 | + "default_max_tokens": 8000, |
331 | 331 | "can_reason": false, |
332 | 332 | "has_reasoning_efforts": false, |
333 | 333 | "supports_attachments": false |
|
361 | 361 | { |
362 | 362 | "id": "deepseek/deepseek-chat-v3.1", |
363 | 363 | "name": "DeepSeek: DeepSeek V3.1", |
364 | | - "cost_per_1m_in": 0.27, |
365 | | - "cost_per_1m_out": 1, |
| 364 | + "cost_per_1m_in": 0.56, |
| 365 | + "cost_per_1m_out": 1.68, |
366 | 366 | "cost_per_1m_in_cached": 0, |
367 | 367 | "cost_per_1m_out_cached": 0, |
368 | 368 | "context_window": 163840, |
|
400 | 400 | { |
401 | 401 | "id": "deepseek/deepseek-r1-0528", |
402 | 402 | "name": "DeepSeek: R1 0528", |
403 | | - "cost_per_1m_in": 2.5500000000000003, |
404 | | - "cost_per_1m_out": 5.95, |
| 403 | + "cost_per_1m_in": 3, |
| 404 | + "cost_per_1m_out": 8, |
405 | 405 | "cost_per_1m_in_cached": 0, |
406 | 406 | "cost_per_1m_out_cached": 0, |
407 | 407 | "context_window": 163840, |
408 | | - "default_max_tokens": 65536, |
| 408 | + "default_max_tokens": 16384, |
409 | 409 | "can_reason": true, |
410 | 410 | "has_reasoning_efforts": true, |
411 | 411 | "supports_attachments": false |
|
660 | 660 | { |
661 | 661 | "id": "meta-llama/llama-3.1-70b-instruct", |
662 | 662 | "name": "Meta: Llama 3.1 70B Instruct", |
663 | | - "cost_per_1m_in": 0.8999999999999999, |
664 | | - "cost_per_1m_out": 0.8999999999999999, |
| 663 | + "cost_per_1m_in": 0.39999999999999997, |
| 664 | + "cost_per_1m_out": 0.39999999999999997, |
665 | 665 | "cost_per_1m_in_cached": 0, |
666 | 666 | "cost_per_1m_out_cached": 0, |
667 | 667 | "context_window": 131072, |
|
738 | 738 | { |
739 | 739 | "id": "meta-llama/llama-4-maverick", |
740 | 740 | "name": "Meta: Llama 4 Maverick", |
741 | | - "cost_per_1m_in": 0.18, |
742 | | - "cost_per_1m_out": 0.6, |
| 741 | + "cost_per_1m_in": 0.22, |
| 742 | + "cost_per_1m_out": 0.88, |
743 | 743 | "cost_per_1m_in_cached": 0, |
744 | 744 | "cost_per_1m_out_cached": 0, |
745 | 745 | "context_window": 1048576, |
746 | | - "default_max_tokens": 524288, |
| 746 | + "default_max_tokens": 104857, |
747 | 747 | "can_reason": false, |
748 | 748 | "has_reasoning_efforts": false, |
749 | 749 | "supports_attachments": true |
|
1128 | 1128 | { |
1129 | 1129 | "id": "mistralai/mistral-small-3.2-24b-instruct", |
1130 | 1130 | "name": "Mistral: Mistral Small 3.2 24B", |
1131 | | - "cost_per_1m_in": 0.09999999999999999, |
1132 | | - "cost_per_1m_out": 0.3, |
| 1131 | + "cost_per_1m_in": 0.14, |
| 1132 | + "cost_per_1m_out": 0.5700000000000001, |
1133 | 1133 | "cost_per_1m_in_cached": 0, |
1134 | 1134 | "cost_per_1m_out_cached": 0, |
1135 | 1135 | "context_window": 131072, |
|
1177 | 1177 | "has_reasoning_efforts": false, |
1178 | 1178 | "supports_attachments": false |
1179 | 1179 | }, |
| 1180 | + { |
| 1181 | + "id": "mistralai/pixtral-12b", |
| 1182 | + "name": "Mistral: Pixtral 12B", |
| 1183 | + "cost_per_1m_in": 0.15, |
| 1184 | + "cost_per_1m_out": 0.15, |
| 1185 | + "cost_per_1m_in_cached": 0, |
| 1186 | + "cost_per_1m_out_cached": 0, |
| 1187 | + "context_window": 131072, |
| 1188 | + "default_max_tokens": 13107, |
| 1189 | + "can_reason": false, |
| 1190 | + "has_reasoning_efforts": false, |
| 1191 | + "supports_attachments": true |
| 1192 | + }, |
1180 | 1193 | { |
1181 | 1194 | "id": "mistralai/pixtral-large-2411", |
1182 | 1195 | "name": "Mistral: Pixtral Large 2411", |
|
1232 | 1245 | { |
1233 | 1246 | "id": "moonshotai/kimi-k2-0905", |
1234 | 1247 | "name": "MoonshotAI: Kimi K2 0905", |
1235 | | - "cost_per_1m_in": 0.5, |
1236 | | - "cost_per_1m_out": 2, |
| 1248 | + "cost_per_1m_in": 0.38, |
| 1249 | + "cost_per_1m_out": 1.52, |
1237 | 1250 | "cost_per_1m_in_cached": 0, |
1238 | 1251 | "cost_per_1m_out_cached": 0, |
1239 | 1252 | "context_window": 262144, |
|
1640 | 1653 | "cost_per_1m_in_cached": 0, |
1641 | 1654 | "cost_per_1m_out_cached": 0, |
1642 | 1655 | "context_window": 131072, |
1643 | | - "default_max_tokens": 16384, |
| 1656 | + "default_max_tokens": 65536, |
1644 | 1657 | "can_reason": true, |
1645 | 1658 | "has_reasoning_efforts": true, |
1646 | 1659 | "supports_attachments": false |
|
1856 | 1869 | { |
1857 | 1870 | "id": "qwen/qwen3-235b-a22b-2507", |
1858 | 1871 | "name": "Qwen: Qwen3 235B A22B Instruct 2507", |
1859 | | - "cost_per_1m_in": 0.19999999999999998, |
1860 | | - "cost_per_1m_out": 0.6, |
| 1872 | + "cost_per_1m_in": 0.22, |
| 1873 | + "cost_per_1m_out": 0.7999999999999999, |
1861 | 1874 | "cost_per_1m_in_cached": 0, |
1862 | 1875 | "cost_per_1m_out_cached": 0, |
1863 | 1876 | "context_window": 262144, |
1864 | | - "default_max_tokens": 26214, |
| 1877 | + "default_max_tokens": 131072, |
1865 | 1878 | "can_reason": false, |
1866 | 1879 | "has_reasoning_efforts": false, |
1867 | 1880 | "supports_attachments": false |
1868 | 1881 | }, |
1869 | 1882 | { |
1870 | 1883 | "id": "qwen/qwen3-235b-a22b-thinking-2507", |
1871 | 1884 | "name": "Qwen: Qwen3 235B A22B Thinking 2507", |
1872 | | - "cost_per_1m_in": 0.13, |
1873 | | - "cost_per_1m_out": 0.6, |
| 1885 | + "cost_per_1m_in": 0.09999999999999999, |
| 1886 | + "cost_per_1m_out": 0.39, |
1874 | 1887 | "cost_per_1m_in_cached": 0, |
1875 | 1888 | "cost_per_1m_out_cached": 0, |
1876 | 1889 | "context_window": 262144, |
1877 | | - "default_max_tokens": 131072, |
| 1890 | + "default_max_tokens": 26214, |
1878 | 1891 | "can_reason": true, |
1879 | 1892 | "has_reasoning_efforts": true, |
1880 | 1893 | "supports_attachments": false |
1881 | 1894 | }, |
1882 | 1895 | { |
1883 | 1896 | "id": "qwen/qwen3-30b-a3b", |
1884 | 1897 | "name": "Qwen: Qwen3 30B A3B", |
1885 | | - "cost_per_1m_in": 0.09, |
1886 | | - "cost_per_1m_out": 0.44999999999999996, |
| 1898 | + "cost_per_1m_in": 0.08, |
| 1899 | + "cost_per_1m_out": 0.28, |
1887 | 1900 | "cost_per_1m_in_cached": 0, |
1888 | 1901 | "cost_per_1m_out_cached": 0, |
1889 | 1902 | "context_window": 131072, |
|
1895 | 1908 | { |
1896 | 1909 | "id": "qwen/qwen3-30b-a3b-instruct-2507", |
1897 | 1910 | "name": "Qwen: Qwen3 30B A3B Instruct 2507", |
1898 | | - "cost_per_1m_in": 0.09999999999999999, |
| 1911 | + "cost_per_1m_in": 0.09, |
1899 | 1912 | "cost_per_1m_out": 0.3, |
1900 | 1913 | "cost_per_1m_in_cached": 0, |
1901 | 1914 | "cost_per_1m_out_cached": 0, |
1902 | 1915 | "context_window": 262144, |
1903 | | - "default_max_tokens": 26214, |
| 1916 | + "default_max_tokens": 131072, |
1904 | 1917 | "can_reason": false, |
1905 | 1918 | "has_reasoning_efforts": false, |
1906 | 1919 | "supports_attachments": false |
1907 | 1920 | }, |
1908 | 1921 | { |
1909 | 1922 | "id": "qwen/qwen3-30b-a3b-thinking-2507", |
1910 | 1923 | "name": "Qwen: Qwen3 30B A3B Thinking 2507", |
1911 | | - "cost_per_1m_in": 0.09, |
| 1924 | + "cost_per_1m_in": 0.09999999999999999, |
1912 | 1925 | "cost_per_1m_out": 0.3, |
1913 | 1926 | "cost_per_1m_in_cached": 0, |
1914 | 1927 | "cost_per_1m_out_cached": 0, |
1915 | 1928 | "context_window": 262144, |
1916 | | - "default_max_tokens": 65536, |
| 1929 | + "default_max_tokens": 26214, |
1917 | 1930 | "can_reason": true, |
1918 | 1931 | "has_reasoning_efforts": true, |
1919 | 1932 | "supports_attachments": false |
1920 | 1933 | }, |
1921 | 1934 | { |
1922 | 1935 | "id": "qwen/qwen3-32b", |
1923 | 1936 | "name": "Qwen: Qwen3 32B", |
1924 | | - "cost_per_1m_in": 0.15, |
1925 | | - "cost_per_1m_out": 0.5, |
| 1937 | + "cost_per_1m_in": 0.39999999999999997, |
| 1938 | + "cost_per_1m_out": 0.7999999999999999, |
1926 | 1939 | "cost_per_1m_in_cached": 0, |
1927 | 1940 | "cost_per_1m_out_cached": 0, |
1928 | 1941 | "context_window": 131072, |
1929 | | - "default_max_tokens": 4000, |
| 1942 | + "default_max_tokens": 16384, |
1930 | 1943 | "can_reason": true, |
1931 | 1944 | "has_reasoning_efforts": true, |
1932 | 1945 | "supports_attachments": false |
|
1947 | 1960 | { |
1948 | 1961 | "id": "qwen/qwen3-coder-30b-a3b-instruct", |
1949 | 1962 | "name": "Qwen: Qwen3 Coder 30B A3B Instruct", |
1950 | | - "cost_per_1m_in": 0.07, |
1951 | | - "cost_per_1m_out": 0.28, |
| 1963 | + "cost_per_1m_in": 0.09999999999999999, |
| 1964 | + "cost_per_1m_out": 0.3, |
1952 | 1965 | "cost_per_1m_in_cached": 0, |
1953 | 1966 | "cost_per_1m_out_cached": 0, |
1954 | 1967 | "context_window": 262144, |
1955 | | - "default_max_tokens": 131072, |
| 1968 | + "default_max_tokens": 26214, |
1956 | 1969 | "can_reason": false, |
1957 | 1970 | "has_reasoning_efforts": false, |
1958 | 1971 | "supports_attachments": false |
1959 | 1972 | }, |
1960 | 1973 | { |
1961 | 1974 | "id": "qwen/qwen3-coder", |
1962 | 1975 | "name": "Qwen: Qwen3 Coder 480B A35B", |
1963 | | - "cost_per_1m_in": 0.29, |
1964 | | - "cost_per_1m_out": 1.2, |
| 1976 | + "cost_per_1m_in": 0.39999999999999997, |
| 1977 | + "cost_per_1m_out": 1.5999999999999999, |
1965 | 1978 | "cost_per_1m_in_cached": 0, |
1966 | 1979 | "cost_per_1m_out_cached": 0, |
1967 | 1980 | "context_window": 262144, |
1968 | | - "default_max_tokens": 32768, |
| 1981 | + "default_max_tokens": 131072, |
1969 | 1982 | "can_reason": false, |
1970 | 1983 | "has_reasoning_efforts": false, |
1971 | 1984 | "supports_attachments": false |
|
2025 | 2038 | { |
2026 | 2039 | "id": "qwen/qwen3-next-80b-a3b-instruct", |
2027 | 2040 | "name": "Qwen: Qwen3 Next 80B A3B Instruct", |
2028 | | - "cost_per_1m_in": 0.14, |
2029 | | - "cost_per_1m_out": 1.4, |
| 2041 | + "cost_per_1m_in": 0.15, |
| 2042 | + "cost_per_1m_out": 1.5, |
2030 | 2043 | "cost_per_1m_in_cached": 0, |
2031 | 2044 | "cost_per_1m_out_cached": 0, |
2032 | 2045 | "context_window": 262144, |
2033 | | - "default_max_tokens": 26214, |
| 2046 | + "default_max_tokens": 131072, |
2034 | 2047 | "can_reason": false, |
2035 | 2048 | "has_reasoning_efforts": false, |
2036 | 2049 | "supports_attachments": false |
|
2113 | 2126 | "has_reasoning_efforts": false, |
2114 | 2127 | "supports_attachments": false |
2115 | 2128 | }, |
| 2129 | + { |
| 2130 | + "id": "alibaba/tongyi-deepresearch-30b-a3b", |
| 2131 | + "name": "Tongyi DeepResearch 30B A3B", |
| 2132 | + "cost_per_1m_in": 0.09, |
| 2133 | + "cost_per_1m_out": 0.44999999999999996, |
| 2134 | + "cost_per_1m_in_cached": 0, |
| 2135 | + "cost_per_1m_out_cached": 0, |
| 2136 | + "context_window": 131072, |
| 2137 | + "default_max_tokens": 65536, |
| 2138 | + "can_reason": true, |
| 2139 | + "has_reasoning_efforts": true, |
| 2140 | + "supports_attachments": false |
| 2141 | + }, |
2116 | 2142 | { |
2117 | 2143 | "id": "z-ai/glm-4-32b", |
2118 | 2144 | "name": "Z.AI: GLM 4 32B ", |
|
0 commit comments