Skip to content

Commit 7b5e879

Browse files
chore: add gpt-5.1 (#102)
1 parent e529ec6 commit 7b5e879

File tree

1 file changed

+59
-3
lines changed

1 file changed

+59
-3
lines changed

internal/providers/configs/openai.json

Lines changed: 59 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,65 @@
44
"type": "openai",
55
"api_key": "$OPENAI_API_KEY",
66
"api_endpoint": "$OPENAI_API_ENDPOINT",
7-
"default_large_model_id": "gpt-5-codex",
7+
"default_large_model_id": "gpt-5-1-codex",
88
"default_small_model_id": "gpt-4o",
99
"models": [
10+
{
11+
"id": "gpt-5.1",
12+
"name": "GPT-5.1",
13+
"cost_per_1m_in": 1.25,
14+
"cost_per_1m_out": 10,
15+
"cost_per_1m_in_cached": 0.13,
16+
"cost_per_1m_out_cached": 0.13,
17+
"context_window": 400000,
18+
"default_max_tokens": 128000,
19+
"can_reason": true,
20+
"reasoning_levels": [
21+
"minimal",
22+
"low",
23+
"medium",
24+
"high"
25+
],
26+
"default_reasoning_effort": "medium",
27+
"supports_attachments": true
28+
},
29+
{
30+
"id": "gpt-5.1-codex",
31+
"name": "GPT-5.1 Codex",
32+
"cost_per_1m_in": 1.25,
33+
"cost_per_1m_out": 10,
34+
"cost_per_1m_in_cached": 0.25,
35+
"cost_per_1m_out_cached": 0.25,
36+
"context_window": 400000,
37+
"default_max_tokens": 128000,
38+
"can_reason": true,
39+
"reasoning_levels": [
40+
"minimal",
41+
"low",
42+
"medium",
43+
"high"
44+
],
45+
"default_reasoning_effort": "medium",
46+
"supports_attachments": true
47+
},
48+
{
49+
"id": "gpt-5.1-codex-mini",
50+
"name": "GPT-5.1 Codex Mini",
51+
"cost_per_1m_in": 0.25,
52+
"cost_per_1m_out": 2,
53+
"cost_per_1m_in_cached": 0.03,
54+
"cost_per_1m_out_cached": 0.03,
55+
"context_window": 400000,
56+
"default_max_tokens": 128000,
57+
"can_reason": true,
58+
"reasoning_levels": [
59+
"low",
60+
"medium",
61+
"high"
62+
],
63+
"default_reasoning_effort": "medium",
64+
"supports_attachments": true
65+
},
1066
{
1167
"id": "gpt-5-codex",
1268
"name": "GPT-5 Codex",
@@ -31,8 +87,8 @@
3187
"name": "GPT-5",
3288
"cost_per_1m_in": 1.25,
3389
"cost_per_1m_out": 10,
34-
"cost_per_1m_in_cached": 0.25,
35-
"cost_per_1m_out_cached": 0.25,
90+
"cost_per_1m_in_cached": 0.13,
91+
"cost_per_1m_out_cached": 0.13,
3692
"context_window": 400000,
3793
"default_max_tokens": 128000,
3894
"can_reason": true,

0 commit comments

Comments
 (0)