|
171 | 171 | "cost_per_1m_in_cached": 3.75, |
172 | 172 | "cost_per_1m_out_cached": 0.3, |
173 | 173 | "context_window": 200000, |
174 | | - "default_max_tokens": 32000, |
| 174 | + "default_max_tokens": 64000, |
175 | 175 | "can_reason": true, |
176 | 176 | "reasoning_levels": [ |
177 | 177 | "low", |
|
266 | 266 | "cost_per_1m_in_cached": 6.25, |
267 | 267 | "cost_per_1m_out_cached": 0.5, |
268 | 268 | "context_window": 200000, |
269 | | - "default_max_tokens": 32000, |
| 269 | + "default_max_tokens": 16000, |
270 | 270 | "can_reason": true, |
271 | 271 | "reasoning_levels": [ |
272 | 272 | "low", |
|
360 | 360 | "supports_attachments": true, |
361 | 361 | "options": {} |
362 | 362 | }, |
| 363 | + { |
| 364 | + "id": "openrouter/bert-nebulon-alpha", |
| 365 | + "name": "Bert-Nebulon Alpha", |
| 366 | + "cost_per_1m_in": 0, |
| 367 | + "cost_per_1m_out": 0, |
| 368 | + "cost_per_1m_in_cached": 0, |
| 369 | + "cost_per_1m_out_cached": 0, |
| 370 | + "context_window": 256000, |
| 371 | + "default_max_tokens": 25600, |
| 372 | + "can_reason": false, |
| 373 | + "supports_attachments": true, |
| 374 | + "options": {} |
| 375 | + }, |
363 | 376 | { |
364 | 377 | "id": "deepcogito/cogito-v2-preview-llama-109b-moe", |
365 | 378 | "name": "Cogito V2 Preview Llama 109B", |
|
497 | 510 | { |
498 | 511 | "id": "deepseek/deepseek-v3.1-terminus", |
499 | 512 | "name": "DeepSeek: DeepSeek V3.1 Terminus", |
500 | | - "cost_per_1m_in": 0.22999999999999998, |
501 | | - "cost_per_1m_out": 0.8999999999999999, |
| 513 | + "cost_per_1m_in": 0.27, |
| 514 | + "cost_per_1m_out": 1, |
502 | 515 | "cost_per_1m_in_cached": 0, |
503 | 516 | "cost_per_1m_out_cached": 0, |
504 | 517 | "context_window": 163840, |
505 | | - "default_max_tokens": 81920, |
| 518 | + "default_max_tokens": 16384, |
506 | 519 | "can_reason": true, |
507 | 520 | "reasoning_levels": [ |
508 | 521 | "low", |
|
693 | 706 | "cost_per_1m_in_cached": 0, |
694 | 707 | "cost_per_1m_out_cached": 0, |
695 | 708 | "context_window": 1048576, |
696 | | - "default_max_tokens": 32767, |
| 709 | + "default_max_tokens": 32768, |
697 | 710 | "can_reason": true, |
698 | 711 | "reasoning_levels": [ |
699 | 712 | "low", |
|
880 | 893 | { |
881 | 894 | "id": "meta-llama/llama-3.1-70b-instruct", |
882 | 895 | "name": "Meta: Llama 3.1 70B Instruct", |
883 | | - "cost_per_1m_in": 0.39999999999999997, |
884 | | - "cost_per_1m_out": 0.39999999999999997, |
| 896 | + "cost_per_1m_in": 0.88, |
| 897 | + "cost_per_1m_out": 0.88, |
885 | 898 | "cost_per_1m_in_cached": 0, |
886 | 899 | "cost_per_1m_out_cached": 0, |
887 | 900 | "context_window": 131072, |
888 | | - "default_max_tokens": 8192, |
| 901 | + "default_max_tokens": 13107, |
889 | 902 | "can_reason": false, |
890 | 903 | "supports_attachments": false, |
891 | 904 | "options": {} |
892 | 905 | }, |
893 | 906 | { |
894 | 907 | "id": "meta-llama/llama-3.1-8b-instruct", |
895 | 908 | "name": "Meta: Llama 3.1 8B Instruct", |
896 | | - "cost_per_1m_in": 0.03, |
897 | | - "cost_per_1m_out": 0.049999999999999996, |
| 909 | + "cost_per_1m_in": 0.02, |
| 910 | + "cost_per_1m_out": 0.03, |
898 | 911 | "cost_per_1m_in_cached": 0, |
899 | 912 | "cost_per_1m_out_cached": 0, |
900 | 913 | "context_window": 131072, |
|
919 | 932 | { |
920 | 933 | "id": "meta-llama/llama-3.3-70b-instruct", |
921 | 934 | "name": "Meta: Llama 3.3 70B Instruct", |
922 | | - "cost_per_1m_in": 0.25, |
923 | | - "cost_per_1m_out": 0.75, |
| 935 | + "cost_per_1m_in": 0.13, |
| 936 | + "cost_per_1m_out": 0.38, |
924 | 937 | "cost_per_1m_in_cached": 0, |
925 | 938 | "cost_per_1m_out_cached": 0, |
926 | 939 | "context_window": 131072, |
927 | | - "default_max_tokens": 13107, |
| 940 | + "default_max_tokens": 8192, |
928 | 941 | "can_reason": false, |
929 | 942 | "supports_attachments": false, |
930 | 943 | "options": {} |
|
1411 | 1424 | { |
1412 | 1425 | "id": "moonshotai/kimi-k2", |
1413 | 1426 | "name": "MoonshotAI: Kimi K2 0711", |
1414 | | - "cost_per_1m_in": 0.7, |
1415 | | - "cost_per_1m_out": 2.5, |
| 1427 | + "cost_per_1m_in": 0.5, |
| 1428 | + "cost_per_1m_out": 2.4, |
1416 | 1429 | "cost_per_1m_in_cached": 0, |
1417 | 1430 | "cost_per_1m_out_cached": 0, |
1418 | 1431 | "context_window": 131072, |
1419 | | - "default_max_tokens": 65536, |
| 1432 | + "default_max_tokens": 13107, |
1420 | 1433 | "can_reason": false, |
1421 | 1434 | "supports_attachments": false, |
1422 | 1435 | "options": {} |
1423 | 1436 | }, |
1424 | 1437 | { |
1425 | 1438 | "id": "moonshotai/kimi-k2-0905", |
1426 | 1439 | "name": "MoonshotAI: Kimi K2 0905", |
1427 | | - "cost_per_1m_in": 0.39, |
1428 | | - "cost_per_1m_out": 1.9, |
| 1440 | + "cost_per_1m_in": 0.39999999999999997, |
| 1441 | + "cost_per_1m_out": 2, |
1429 | 1442 | "cost_per_1m_in_cached": 0, |
1430 | 1443 | "cost_per_1m_out_cached": 0, |
1431 | 1444 | "context_window": 262144, |
|
1437 | 1450 | { |
1438 | 1451 | "id": "moonshotai/kimi-k2-0905:exacto", |
1439 | 1452 | "name": "MoonshotAI: Kimi K2 0905 (exacto)", |
1440 | | - "cost_per_1m_in": 1, |
1441 | | - "cost_per_1m_out": 3, |
| 1453 | + "cost_per_1m_in": 0.6, |
| 1454 | + "cost_per_1m_out": 2.5, |
1442 | 1455 | "cost_per_1m_in_cached": 0, |
1443 | 1456 | "cost_per_1m_out_cached": 0, |
1444 | 1457 | "context_window": 262144, |
1445 | | - "default_max_tokens": 8192, |
| 1458 | + "default_max_tokens": 26214, |
1446 | 1459 | "can_reason": false, |
1447 | 1460 | "supports_attachments": false, |
1448 | 1461 | "options": {} |
|
1659 | 1672 | "cost_per_1m_in_cached": 0, |
1660 | 1673 | "cost_per_1m_out_cached": 0.5, |
1661 | 1674 | "context_window": 1047576, |
1662 | | - "default_max_tokens": 104757, |
| 1675 | + "default_max_tokens": 16384, |
1663 | 1676 | "can_reason": false, |
1664 | 1677 | "supports_attachments": true, |
1665 | 1678 | "options": {} |
|
2277 | 2290 | "supports_attachments": false, |
2278 | 2291 | "options": {} |
2279 | 2292 | }, |
2280 | | - { |
2281 | | - "id": "qwen/qwq-32b", |
2282 | | - "name": "Qwen: QwQ 32B", |
2283 | | - "cost_per_1m_in": 0.15, |
2284 | | - "cost_per_1m_out": 0.58, |
2285 | | - "cost_per_1m_in_cached": 0, |
2286 | | - "cost_per_1m_out_cached": 0, |
2287 | | - "context_window": 131072, |
2288 | | - "default_max_tokens": 65536, |
2289 | | - "can_reason": true, |
2290 | | - "reasoning_levels": [ |
2291 | | - "low", |
2292 | | - "medium", |
2293 | | - "high" |
2294 | | - ], |
2295 | | - "default_reasoning_effort": "medium", |
2296 | | - "supports_attachments": false, |
2297 | | - "options": {} |
2298 | | - }, |
2299 | 2293 | { |
2300 | 2294 | "id": "qwen/qwen-plus-2025-07-28", |
2301 | 2295 | "name": "Qwen: Qwen Plus 0728", |
|
2402 | 2396 | { |
2403 | 2397 | "id": "qwen/qwen3-235b-a22b", |
2404 | 2398 | "name": "Qwen: Qwen3 235B A22B", |
2405 | | - "cost_per_1m_in": 0.22, |
2406 | | - "cost_per_1m_out": 0.88, |
| 2399 | + "cost_per_1m_in": 0.18, |
| 2400 | + "cost_per_1m_out": 0.54, |
2407 | 2401 | "cost_per_1m_in_cached": 0, |
2408 | 2402 | "cost_per_1m_out_cached": 0, |
2409 | | - "context_window": 131072, |
2410 | | - "default_max_tokens": 13107, |
| 2403 | + "context_window": 40960, |
| 2404 | + "default_max_tokens": 20480, |
2411 | 2405 | "can_reason": true, |
2412 | 2406 | "reasoning_levels": [ |
2413 | 2407 | "low", |
|
2440 | 2434 | { |
2441 | 2435 | "id": "qwen/qwen3-235b-a22b-2507", |
2442 | 2436 | "name": "Qwen: Qwen3 235B A22B Instruct 2507", |
2443 | | - "cost_per_1m_in": 0.25, |
2444 | | - "cost_per_1m_out": 1, |
| 2437 | + "cost_per_1m_in": 0.09, |
| 2438 | + "cost_per_1m_out": 0.6, |
2445 | 2439 | "cost_per_1m_in_cached": 0, |
2446 | 2440 | "cost_per_1m_out_cached": 0, |
2447 | 2441 | "context_window": 262144, |
2448 | | - "default_max_tokens": 8192, |
| 2442 | + "default_max_tokens": 131072, |
2449 | 2443 | "can_reason": false, |
2450 | 2444 | "supports_attachments": false, |
2451 | 2445 | "options": {} |
2452 | 2446 | }, |
2453 | 2447 | { |
2454 | 2448 | "id": "qwen/qwen3-235b-a22b-thinking-2507", |
2455 | 2449 | "name": "Qwen: Qwen3 235B A22B Thinking 2507", |
2456 | | - "cost_per_1m_in": 0.11, |
| 2450 | + "cost_per_1m_in": 0.13, |
2457 | 2451 | "cost_per_1m_out": 0.6, |
2458 | 2452 | "cost_per_1m_in_cached": 0, |
2459 | 2453 | "cost_per_1m_out_cached": 0, |
|
2491 | 2485 | { |
2492 | 2486 | "id": "qwen/qwen3-30b-a3b-instruct-2507", |
2493 | 2487 | "name": "Qwen: Qwen3 30B A3B Instruct 2507", |
2494 | | - "cost_per_1m_in": 0.08, |
2495 | | - "cost_per_1m_out": 0.33, |
| 2488 | + "cost_per_1m_in": 0.09, |
| 2489 | + "cost_per_1m_out": 0.3, |
2496 | 2490 | "cost_per_1m_in_cached": 0, |
2497 | 2491 | "cost_per_1m_out_cached": 0, |
2498 | 2492 | "context_window": 262144, |
|
2561 | 2555 | { |
2562 | 2556 | "id": "qwen/qwen3-coder-30b-a3b-instruct", |
2563 | 2557 | "name": "Qwen: Qwen3 Coder 30B A3B Instruct", |
2564 | | - "cost_per_1m_in": 0.06, |
2565 | | - "cost_per_1m_out": 0.25, |
| 2558 | + "cost_per_1m_in": 0.09999999999999999, |
| 2559 | + "cost_per_1m_out": 0.3, |
2566 | 2560 | "cost_per_1m_in_cached": 0, |
2567 | 2561 | "cost_per_1m_out_cached": 0, |
2568 | 2562 | "context_window": 262144, |
2569 | | - "default_max_tokens": 131072, |
| 2563 | + "default_max_tokens": 26214, |
2570 | 2564 | "can_reason": false, |
2571 | 2565 | "supports_attachments": false, |
2572 | 2566 | "options": {} |
2573 | 2567 | }, |
2574 | 2568 | { |
2575 | 2569 | "id": "qwen/qwen3-coder", |
2576 | 2570 | "name": "Qwen: Qwen3 Coder 480B A35B", |
2577 | | - "cost_per_1m_in": 0.25, |
2578 | | - "cost_per_1m_out": 1, |
| 2571 | + "cost_per_1m_in": 0.29, |
| 2572 | + "cost_per_1m_out": 1.2, |
2579 | 2573 | "cost_per_1m_in_cached": 0, |
2580 | 2574 | "cost_per_1m_out_cached": 0, |
2581 | 2575 | "context_window": 262144, |
2582 | | - "default_max_tokens": 131072, |
| 2576 | + "default_max_tokens": 32768, |
2583 | 2577 | "can_reason": false, |
2584 | 2578 | "supports_attachments": false, |
2585 | 2579 | "options": {} |
|
2658 | 2652 | { |
2659 | 2653 | "id": "qwen/qwen3-next-80b-a3b-instruct", |
2660 | 2654 | "name": "Qwen: Qwen3 Next 80B A3B Instruct", |
2661 | | - "cost_per_1m_in": 0.09999999999999999, |
2662 | | - "cost_per_1m_out": 0.7999999999999999, |
| 2655 | + "cost_per_1m_in": 0.14, |
| 2656 | + "cost_per_1m_out": 1.4, |
2663 | 2657 | "cost_per_1m_in_cached": 0, |
2664 | 2658 | "cost_per_1m_out_cached": 0, |
2665 | 2659 | "context_window": 262144, |
|
2690 | 2684 | { |
2691 | 2685 | "id": "qwen/qwen3-vl-235b-a22b-instruct", |
2692 | 2686 | "name": "Qwen: Qwen3 VL 235B A22B Instruct", |
2693 | | - "cost_per_1m_in": 0.22, |
2694 | | - "cost_per_1m_out": 0.88, |
| 2687 | + "cost_per_1m_in": 0.3, |
| 2688 | + "cost_per_1m_out": 1.5, |
2695 | 2689 | "cost_per_1m_in_cached": 0, |
2696 | 2690 | "cost_per_1m_out_cached": 0, |
2697 | 2691 | "context_window": 262144, |
2698 | | - "default_max_tokens": 26214, |
| 2692 | + "default_max_tokens": 131072, |
2699 | 2693 | "can_reason": false, |
2700 | 2694 | "supports_attachments": true, |
2701 | 2695 | "options": {} |
|
2722 | 2716 | { |
2723 | 2717 | "id": "qwen/qwen3-vl-30b-a3b-instruct", |
2724 | 2718 | "name": "Qwen: Qwen3 VL 30B A3B Instruct", |
2725 | | - "cost_per_1m_in": 0.29, |
2726 | | - "cost_per_1m_out": 1, |
| 2719 | + "cost_per_1m_in": 0.15, |
| 2720 | + "cost_per_1m_out": 0.6, |
2727 | 2721 | "cost_per_1m_in_cached": 0, |
2728 | 2722 | "cost_per_1m_out_cached": 0, |
2729 | 2723 | "context_window": 262144, |
2730 | | - "default_max_tokens": 131072, |
| 2724 | + "default_max_tokens": 26214, |
2731 | 2725 | "can_reason": false, |
2732 | 2726 | "supports_attachments": true, |
2733 | 2727 | "options": {} |
|
2889 | 2883 | "id": "alibaba/tongyi-deepresearch-30b-a3b", |
2890 | 2884 | "name": "Tongyi DeepResearch 30B A3B", |
2891 | 2885 | "cost_per_1m_in": 0.09, |
2892 | | - "cost_per_1m_out": 0.39999999999999997, |
| 2886 | + "cost_per_1m_out": 0.44999999999999996, |
2893 | 2887 | "cost_per_1m_in_cached": 0, |
2894 | 2888 | "cost_per_1m_out_cached": 0, |
2895 | 2889 | "context_window": 131072, |
|
2939 | 2933 | { |
2940 | 2934 | "id": "z-ai/glm-4.5", |
2941 | 2935 | "name": "Z.AI: GLM 4.5", |
2942 | | - "cost_per_1m_in": 0.35, |
2943 | | - "cost_per_1m_out": 1.5, |
| 2936 | + "cost_per_1m_in": 0.6, |
| 2937 | + "cost_per_1m_out": 2.2, |
2944 | 2938 | "cost_per_1m_in_cached": 0, |
2945 | | - "cost_per_1m_out_cached": 0, |
| 2939 | + "cost_per_1m_out_cached": 0.11, |
2946 | 2940 | "context_window": 131072, |
2947 | | - "default_max_tokens": 65536, |
| 2941 | + "default_max_tokens": 48000, |
2948 | 2942 | "can_reason": true, |
2949 | 2943 | "reasoning_levels": [ |
2950 | 2944 | "low", |
|
2996 | 2990 | { |
2997 | 2991 | "id": "z-ai/glm-4.5v", |
2998 | 2992 | "name": "Z.AI: GLM 4.5V", |
2999 | | - "cost_per_1m_in": 0.6, |
3000 | | - "cost_per_1m_out": 1.7999999999999998, |
| 2993 | + "cost_per_1m_in": 0.48, |
| 2994 | + "cost_per_1m_out": 1.44, |
3001 | 2995 | "cost_per_1m_in_cached": 0, |
3002 | 2996 | "cost_per_1m_out_cached": 0.11, |
3003 | 2997 | "context_window": 65536, |
|
3015 | 3009 | { |
3016 | 3010 | "id": "z-ai/glm-4.6", |
3017 | 3011 | "name": "Z.AI: GLM 4.6", |
3018 | | - "cost_per_1m_in": 0.48, |
| 3012 | + "cost_per_1m_in": 0.44, |
3019 | 3013 | "cost_per_1m_out": 1.76, |
3020 | 3014 | "cost_per_1m_in_cached": 0, |
3021 | 3015 | "cost_per_1m_out_cached": 0.11, |
|
3034 | 3028 | { |
3035 | 3029 | "id": "z-ai/glm-4.6:exacto", |
3036 | 3030 | "name": "Z.AI: GLM 4.6 (exacto)", |
3037 | | - "cost_per_1m_in": 0.48, |
| 3031 | + "cost_per_1m_in": 0.44, |
3038 | 3032 | "cost_per_1m_out": 1.76, |
3039 | 3033 | "cost_per_1m_in_cached": 0, |
3040 | 3034 | "cost_per_1m_out_cached": 0, |
|
3053 | 3047 | { |
3054 | 3048 | "id": "x-ai/grok-3", |
3055 | 3049 | "name": "xAI: Grok 3", |
3056 | | - "cost_per_1m_in": 5, |
3057 | | - "cost_per_1m_out": 25, |
| 3050 | + "cost_per_1m_in": 3, |
| 3051 | + "cost_per_1m_out": 15, |
3058 | 3052 | "cost_per_1m_in_cached": 0, |
3059 | | - "cost_per_1m_out_cached": 1.25, |
| 3053 | + "cost_per_1m_out_cached": 0.75, |
3060 | 3054 | "context_window": 131072, |
3061 | 3055 | "default_max_tokens": 13107, |
3062 | 3056 | "can_reason": false, |
|
3066 | 3060 | { |
3067 | 3061 | "id": "x-ai/grok-3-beta", |
3068 | 3062 | "name": "xAI: Grok 3 Beta", |
3069 | | - "cost_per_1m_in": 5, |
3070 | | - "cost_per_1m_out": 25, |
| 3063 | + "cost_per_1m_in": 3, |
| 3064 | + "cost_per_1m_out": 15, |
3071 | 3065 | "cost_per_1m_in_cached": 0, |
3072 | | - "cost_per_1m_out_cached": 1.25, |
| 3066 | + "cost_per_1m_out_cached": 0.75, |
3073 | 3067 | "context_window": 131072, |
3074 | 3068 | "default_max_tokens": 13107, |
3075 | 3069 | "can_reason": false, |
|
0 commit comments