Skip to content

Commit 5e2df8e

Browse files
committed
chore: auto-update generated files
1 parent 607a9df commit 5e2df8e

File tree

1 file changed

+69
-95
lines changed

1 file changed

+69
-95
lines changed

internal/providers/configs/openrouter.json

Lines changed: 69 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@
1010
{
1111
"id": "qwen/qwen3-next-80b-a3b-thinking",
1212
"name": "Qwen: Qwen3 Next 80B A3B Thinking",
13-
"cost_per_1m_in": 0.14673906,
14-
"cost_per_1m_out": 0.586956456,
13+
"cost_per_1m_in": 0.14,
14+
"cost_per_1m_out": 1.4,
1515
"cost_per_1m_in_cached": 0,
1616
"cost_per_1m_out_cached": 0,
1717
"context_window": 262144,
@@ -23,25 +23,12 @@
2323
{
2424
"id": "qwen/qwen3-next-80b-a3b-instruct",
2525
"name": "Qwen: Qwen3 Next 80B A3B Instruct",
26-
"cost_per_1m_in": 0.3,
27-
"cost_per_1m_out": 0.3,
26+
"cost_per_1m_in": 0.14,
27+
"cost_per_1m_out": 1.4,
2828
"cost_per_1m_in_cached": 0,
2929
"cost_per_1m_out_cached": 0,
3030
"context_window": 262144,
31-
"default_max_tokens": 131072,
32-
"can_reason": false,
33-
"has_reasoning_efforts": false,
34-
"supports_attachments": false
35-
},
36-
{
37-
"id": "meituan/longcat-flash-chat",
38-
"name": "Meituan: LongCat Flash Chat",
39-
"cost_per_1m_in": 0.24999987999999998,
40-
"cost_per_1m_out": 0.999999888,
41-
"cost_per_1m_in_cached": 0,
42-
"cost_per_1m_out_cached": 0,
43-
"context_window": 131072,
44-
"default_max_tokens": 13107,
31+
"default_max_tokens": 26214,
4532
"can_reason": false,
4633
"has_reasoning_efforts": false,
4734
"supports_attachments": false
@@ -140,12 +127,12 @@
140127
{
141128
"id": "moonshotai/kimi-k2-0905",
142129
"name": "MoonshotAI: Kimi K2 0905",
143-
"cost_per_1m_in": 0.58,
144-
"cost_per_1m_out": 2.29,
130+
"cost_per_1m_in": 0.5,
131+
"cost_per_1m_out": 2,
145132
"cost_per_1m_in_cached": 0,
146133
"cost_per_1m_out_cached": 0,
147134
"context_window": 262144,
148-
"default_max_tokens": 131072,
135+
"default_max_tokens": 26214,
149136
"can_reason": false,
150137
"has_reasoning_efforts": false,
151138
"supports_attachments": false
@@ -244,8 +231,8 @@
244231
{
245232
"id": "deepseek/deepseek-chat-v3.1",
246233
"name": "DeepSeek: DeepSeek V3.1",
247-
"cost_per_1m_in": 0.27,
248-
"cost_per_1m_out": 1,
234+
"cost_per_1m_in": 0.56,
235+
"cost_per_1m_out": 1.68,
249236
"cost_per_1m_in_cached": 0,
250237
"cost_per_1m_out_cached": 0,
251238
"context_window": 163840,
@@ -283,12 +270,12 @@
283270
{
284271
"id": "z-ai/glm-4.5v",
285272
"name": "Z.AI: GLM 4.5V",
286-
"cost_per_1m_in": 0.5,
273+
"cost_per_1m_in": 0.6,
287274
"cost_per_1m_out": 1.7999999999999998,
288275
"cost_per_1m_in_cached": 0,
289-
"cost_per_1m_out_cached": 0,
276+
"cost_per_1m_out_cached": 0.11,
290277
"context_window": 65536,
291-
"default_max_tokens": 32768,
278+
"default_max_tokens": 8192,
292279
"can_reason": true,
293280
"has_reasoning_efforts": true,
294281
"supports_attachments": true
@@ -361,12 +348,12 @@
361348
{
362349
"id": "openai/gpt-oss-120b",
363350
"name": "OpenAI: gpt-oss-120b",
364-
"cost_per_1m_in": 0.15,
365-
"cost_per_1m_out": 0.6,
351+
"cost_per_1m_in": 0.09999999999999999,
352+
"cost_per_1m_out": 0.49,
366353
"cost_per_1m_in_cached": 0,
367354
"cost_per_1m_out_cached": 0,
368355
"context_window": 131072,
369-
"default_max_tokens": 16384,
356+
"default_max_tokens": 13107,
370357
"can_reason": true,
371358
"has_reasoning_efforts": true,
372359
"supports_attachments": false
@@ -439,8 +426,8 @@
439426
{
440427
"id": "z-ai/glm-4.5",
441428
"name": "Z.AI: GLM 4.5",
442-
"cost_per_1m_in": 0.6,
443-
"cost_per_1m_out": 2.2,
429+
"cost_per_1m_in": 0.41249980199999997,
430+
"cost_per_1m_out": 1.6499998152000002,
444431
"cost_per_1m_in_cached": 0,
445432
"cost_per_1m_out_cached": 0,
446433
"context_window": 131072,
@@ -517,12 +504,12 @@
517504
{
518505
"id": "qwen/qwen3-coder",
519506
"name": "Qwen: Qwen3 Coder 480B A35B",
520-
"cost_per_1m_in": 0.39999999999999997,
521-
"cost_per_1m_out": 1.7999999999999998,
507+
"cost_per_1m_in": 0.29,
508+
"cost_per_1m_out": 1.2,
522509
"cost_per_1m_in_cached": 0,
523510
"cost_per_1m_out_cached": 0,
524511
"context_window": 262144,
525-
"default_max_tokens": 131072,
512+
"default_max_tokens": 32768,
526513
"can_reason": false,
527514
"has_reasoning_efforts": false,
528515
"supports_attachments": false
@@ -543,12 +530,12 @@
543530
{
544531
"id": "qwen/qwen3-235b-a22b-2507",
545532
"name": "Qwen: Qwen3 235B A22B Instruct 2507",
546-
"cost_per_1m_in": 0.22,
547-
"cost_per_1m_out": 0.7999999999999999,
533+
"cost_per_1m_in": 0.13,
534+
"cost_per_1m_out": 0.6,
548535
"cost_per_1m_in_cached": 0,
549536
"cost_per_1m_out_cached": 0,
550537
"context_window": 262144,
551-
"default_max_tokens": 131072,
538+
"default_max_tokens": 26214,
552539
"can_reason": false,
553540
"has_reasoning_efforts": false,
554541
"supports_attachments": false
@@ -644,19 +631,6 @@
644631
"has_reasoning_efforts": false,
645632
"supports_attachments": true
646633
},
647-
{
648-
"id": "minimax/minimax-m1",
649-
"name": "MiniMax: MiniMax M1",
650-
"cost_per_1m_in": 0.55,
651-
"cost_per_1m_out": 2.2,
652-
"cost_per_1m_in_cached": 0,
653-
"cost_per_1m_out_cached": 0,
654-
"context_window": 1000000,
655-
"default_max_tokens": 20000,
656-
"can_reason": true,
657-
"has_reasoning_efforts": true,
658-
"supports_attachments": false
659-
},
660634
{
661635
"id": "google/gemini-2.5-flash-lite-preview-06-17",
662636
"name": "Google: Gemini 2.5 Flash Lite Preview 06-17",
@@ -742,7 +716,7 @@
742716
"cost_per_1m_out": 1.5,
743717
"cost_per_1m_in_cached": 0,
744718
"cost_per_1m_out_cached": 0,
745-
"context_window": 40960,
719+
"context_window": 40000,
746720
"default_max_tokens": 20000,
747721
"can_reason": true,
748722
"has_reasoning_efforts": true,
@@ -946,12 +920,12 @@
946920
{
947921
"id": "qwen/qwen3-30b-a3b",
948922
"name": "Qwen: Qwen3 30B A3B",
949-
"cost_per_1m_in": 0.09,
950-
"cost_per_1m_out": 0.44999999999999996,
923+
"cost_per_1m_in": 0.15,
924+
"cost_per_1m_out": 0.6,
951925
"cost_per_1m_in_cached": 0,
952926
"cost_per_1m_out_cached": 0,
953927
"context_window": 131072,
954-
"default_max_tokens": 65536,
928+
"default_max_tokens": 4000,
955929
"can_reason": true,
956930
"has_reasoning_efforts": true,
957931
"supports_attachments": false
@@ -982,19 +956,6 @@
982956
"has_reasoning_efforts": true,
983957
"supports_attachments": false
984958
},
985-
{
986-
"id": "qwen/qwen3-235b-a22b:free",
987-
"name": "Qwen: Qwen3 235B A22B (free)",
988-
"cost_per_1m_in": 0,
989-
"cost_per_1m_out": 0,
990-
"cost_per_1m_in_cached": 0,
991-
"cost_per_1m_out_cached": 0,
992-
"context_window": 131072,
993-
"default_max_tokens": 13107,
994-
"can_reason": false,
995-
"has_reasoning_efforts": false,
996-
"supports_attachments": false
997-
},
998959
{
999960
"id": "qwen/qwen3-235b-a22b",
1000961
"name": "Qwen: Qwen3 235B A22B",
@@ -1128,12 +1089,12 @@
11281089
{
11291090
"id": "meta-llama/llama-4-maverick",
11301091
"name": "Meta: Llama 4 Maverick",
1131-
"cost_per_1m_in": 0.18,
1132-
"cost_per_1m_out": 0.6,
1092+
"cost_per_1m_in": 0.22,
1093+
"cost_per_1m_out": 0.88,
11331094
"cost_per_1m_in_cached": 0,
11341095
"cost_per_1m_out_cached": 0,
11351096
"context_window": 1048576,
1136-
"default_max_tokens": 524288,
1097+
"default_max_tokens": 104857,
11371098
"can_reason": false,
11381099
"has_reasoning_efforts": false,
11391100
"supports_attachments": true
@@ -1180,8 +1141,8 @@
11801141
{
11811142
"id": "deepseek/deepseek-chat-v3-0324",
11821143
"name": "DeepSeek: DeepSeek V3 0324",
1183-
"cost_per_1m_in": 0.28,
1184-
"cost_per_1m_out": 1.1400000000000001,
1144+
"cost_per_1m_in": 0.27,
1145+
"cost_per_1m_out": 1.13,
11851146
"cost_per_1m_in_cached": 0,
11861147
"cost_per_1m_out_cached": 0,
11871148
"context_window": 163840,
@@ -1250,7 +1211,7 @@
12501211
"cost_per_1m_in_cached": 3.75,
12511212
"cost_per_1m_out_cached": 0.3,
12521213
"context_window": 200000,
1253-
"default_max_tokens": 64000,
1214+
"default_max_tokens": 32000,
12541215
"can_reason": true,
12551216
"has_reasoning_efforts": true,
12561217
"supports_attachments": true
@@ -1457,21 +1418,21 @@
14571418
"cost_per_1m_out": 0,
14581419
"cost_per_1m_in_cached": 0,
14591420
"cost_per_1m_out_cached": 0,
1460-
"context_window": 65536,
1461-
"default_max_tokens": 6553,
1421+
"context_window": 128000,
1422+
"default_max_tokens": 2014,
14621423
"can_reason": false,
14631424
"has_reasoning_efforts": false,
14641425
"supports_attachments": false
14651426
},
14661427
{
14671428
"id": "meta-llama/llama-3.3-70b-instruct",
14681429
"name": "Meta: Llama 3.3 70B Instruct",
1469-
"cost_per_1m_in": 0.039,
1470-
"cost_per_1m_out": 0.12,
1430+
"cost_per_1m_in": 0.09999999999999999,
1431+
"cost_per_1m_out": 0.25,
14711432
"cost_per_1m_in_cached": 0,
14721433
"cost_per_1m_out_cached": 0,
14731434
"context_window": 131072,
1474-
"default_max_tokens": 4096,
1435+
"default_max_tokens": 13107,
14751436
"can_reason": false,
14761437
"has_reasoning_efforts": false,
14771438
"supports_attachments": false
@@ -1661,12 +1622,12 @@
16611622
{
16621623
"id": "thedrummer/rocinante-12b",
16631624
"name": "TheDrummer: Rocinante 12B",
1664-
"cost_per_1m_in": 0.16999999999999998,
1665-
"cost_per_1m_out": 0.43,
1625+
"cost_per_1m_in": 0.24,
1626+
"cost_per_1m_out": 0.44999999999999996,
16661627
"cost_per_1m_in_cached": 0,
16671628
"cost_per_1m_out_cached": 0,
16681629
"context_window": 32768,
1669-
"default_max_tokens": 3276,
1630+
"default_max_tokens": 16384,
16701631
"can_reason": false,
16711632
"has_reasoning_efforts": false,
16721633
"supports_attachments": false
@@ -1736,6 +1697,19 @@
17361697
"has_reasoning_efforts": false,
17371698
"supports_attachments": false
17381699
},
1700+
{
1701+
"id": "nousresearch/hermes-3-llama-3.1-70b",
1702+
"name": "Nous: Hermes 3 70B Instruct",
1703+
"cost_per_1m_in": 0.39999999999999997,
1704+
"cost_per_1m_out": 0.39999999999999997,
1705+
"cost_per_1m_in_cached": 0,
1706+
"cost_per_1m_out_cached": 0,
1707+
"context_window": 12288,
1708+
"default_max_tokens": 1228,
1709+
"can_reason": false,
1710+
"has_reasoning_efforts": false,
1711+
"supports_attachments": false
1712+
},
17391713
{
17401714
"id": "openai/gpt-4o-2024-08-06",
17411715
"name": "OpenAI: GPT-4o (2024-08-06)",
@@ -1752,51 +1726,51 @@
17521726
{
17531727
"id": "meta-llama/llama-3.1-8b-instruct",
17541728
"name": "Meta: Llama 3.1 8B Instruct",
1755-
"cost_per_1m_in": 0.09999999999999999,
1756-
"cost_per_1m_out": 0.09999999999999999,
1729+
"cost_per_1m_in": 0.03,
1730+
"cost_per_1m_out": 0.049999999999999996,
17571731
"cost_per_1m_in_cached": 0,
17581732
"cost_per_1m_out_cached": 0,
17591733
"context_window": 131072,
1760-
"default_max_tokens": 4000,
1734+
"default_max_tokens": 8192,
17611735
"can_reason": false,
17621736
"has_reasoning_efforts": false,
17631737
"supports_attachments": false
17641738
},
17651739
{
17661740
"id": "meta-llama/llama-3.1-405b-instruct",
17671741
"name": "Meta: Llama 3.1 405B Instruct",
1768-
"cost_per_1m_in": 3,
1769-
"cost_per_1m_out": 3,
1742+
"cost_per_1m_in": 0.7999999999999999,
1743+
"cost_per_1m_out": 0.7999999999999999,
17701744
"cost_per_1m_in_cached": 0,
17711745
"cost_per_1m_out_cached": 0,
1772-
"context_window": 131072,
1773-
"default_max_tokens": 13107,
1746+
"context_window": 32768,
1747+
"default_max_tokens": 8192,
17741748
"can_reason": false,
17751749
"has_reasoning_efforts": false,
17761750
"supports_attachments": false
17771751
},
17781752
{
17791753
"id": "meta-llama/llama-3.1-70b-instruct",
17801754
"name": "Meta: Llama 3.1 70B Instruct",
1781-
"cost_per_1m_in": 0.88,
1782-
"cost_per_1m_out": 0.88,
1755+
"cost_per_1m_in": 0.09999999999999999,
1756+
"cost_per_1m_out": 0.28,
17831757
"cost_per_1m_in_cached": 0,
17841758
"cost_per_1m_out_cached": 0,
17851759
"context_window": 131072,
1786-
"default_max_tokens": 13107,
1760+
"default_max_tokens": 8192,
17871761
"can_reason": false,
17881762
"has_reasoning_efforts": false,
17891763
"supports_attachments": false
17901764
},
17911765
{
17921766
"id": "mistralai/mistral-nemo",
17931767
"name": "Mistral: Mistral Nemo",
1794-
"cost_per_1m_in": 0.15,
1795-
"cost_per_1m_out": 0.15,
1768+
"cost_per_1m_in": 0.08,
1769+
"cost_per_1m_out": 0.14,
17961770
"cost_per_1m_in_cached": 0,
17971771
"cost_per_1m_out_cached": 0,
17981772
"context_window": 131072,
1799-
"default_max_tokens": 13107,
1773+
"default_max_tokens": 65536,
18001774
"can_reason": false,
18011775
"has_reasoning_efforts": false,
18021776
"supports_attachments": false
@@ -1937,7 +1911,7 @@
19371911
"cost_per_1m_in": 2.5,
19381912
"cost_per_1m_out": 10,
19391913
"cost_per_1m_in_cached": 0,
1940-
"cost_per_1m_out_cached": 0,
1914+
"cost_per_1m_out_cached": 1.25,
19411915
"context_window": 128000,
19421916
"default_max_tokens": 8192,
19431917
"can_reason": false,

0 commit comments

Comments
 (0)