Skip to content

Commit 20ceee6

Browse files
committed
proxy
1 parent c409f99 commit 20ceee6

39 files changed

+7609
-120
lines changed

Cargo.lock

Lines changed: 2 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ eventsource-stream = "0.2.3"
3636
fast-glob = "1.0.0"
3737
indexmap = "2.7.0"
3838
serde_with = "3"
39+
strum = { version = "0.27", features = ["derive"] }
3940
# TODO: use crates.io again when this one is released: https://github.com/fast/fastrace/pull/134
4041
# fastrace = "0.7.14"
4142
# fastrace-futures = "0.7.5"
@@ -50,6 +51,7 @@ governor = { version = "0.10", features = ["std", "jitter"] }
5051
header-rules = { path = "crates/header-rules" }
5152
http = "1.3.1"
5253
http-body = "1.0.1"
54+
http-body-util = "0.1"
5355
indoc = "2.0.6"
5456
insta = "1.43.1"
5557
integration-test-macros = { path = "crates/integration-test-macros" }

crates/config/src/lib.rs

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ mod llm;
1313
mod loader;
1414
mod mcp;
1515
mod oauth;
16+
mod proxy;
1617
mod rate_limit;
1718
mod server;
1819
mod telemetry;
@@ -136,6 +137,12 @@ mod tests {
136137
},
137138
llm: LlmConfig {
138139
enabled: true,
140+
proxy: ProxyConfig {
141+
anthropic: AnthropicProxyConfig {
142+
enabled: false,
143+
path: "/proxy/anthropic",
144+
},
145+
},
139146
protocols: LlmProtocolsConfig {
140147
openai: OpenAIProtocolConfig {
141148
enabled: true,

crates/config/src/llm.rs

Lines changed: 90 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ use std::{borrow::Cow, collections::BTreeMap, fmt};
44

55
use indexmap::IndexMap;
66

7-
use crate::headers::HeaderRule;
87
use crate::rate_limit::TokenRateLimitsConfig;
8+
use crate::{headers::HeaderRule, proxy::ProxyConfig};
99
use regex::{Regex, RegexBuilder};
1010
use secrecy::SecretString;
1111
use serde::{Deserialize, Deserializer};
@@ -195,7 +195,10 @@ pub struct LlmProtocolsConfig {
195195
#[serde(default, deny_unknown_fields)]
196196
pub struct LlmConfig {
197197
/// Whether the LLM functionality is enabled.
198-
enabled: bool,
198+
pub enabled: bool,
199+
200+
/// Proxy configuration for LLM requests.
201+
pub proxy: ProxyConfig,
199202

200203
/// Protocol-specific endpoint configurations.
201204
pub protocols: LlmProtocolsConfig,
@@ -210,16 +213,12 @@ impl Default for LlmConfig {
210213
enabled: true,
211214
protocols: LlmProtocolsConfig::default(),
212215
providers: IndexMap::new(),
216+
proxy: Default::default(),
213217
}
214218
}
215219
}
216220

217221
impl LlmConfig {
218-
/// Whether the LLM functionality is enabled.
219-
pub fn enabled(&self) -> bool {
220-
self.enabled
221-
}
222-
223222
/// Whether there are any LLM providers configured.
224223
pub fn has_providers(&self) -> bool {
225224
!self.providers.is_empty()
@@ -494,6 +493,12 @@ mod tests {
494493
assert_debug_snapshot!(&config, @r#"
495494
LlmConfig {
496495
enabled: true,
496+
proxy: ProxyConfig {
497+
anthropic: AnthropicProxyConfig {
498+
enabled: false,
499+
path: "/proxy/anthropic",
500+
},
501+
},
497502
protocols: LlmProtocolsConfig {
498503
openai: OpenAIProtocolConfig {
499504
enabled: true,
@@ -532,6 +537,12 @@ mod tests {
532537
assert_debug_snapshot!(&config, @r#"
533538
LlmConfig {
534539
enabled: true,
540+
proxy: ProxyConfig {
541+
anthropic: AnthropicProxyConfig {
542+
enabled: false,
543+
path: "/proxy/anthropic",
544+
},
545+
},
535546
protocols: LlmProtocolsConfig {
536547
openai: OpenAIProtocolConfig {
537548
enabled: true,
@@ -595,6 +606,12 @@ mod tests {
595606
assert_debug_snapshot!(&config, @r#"
596607
LlmConfig {
597608
enabled: true,
609+
proxy: ProxyConfig {
610+
anthropic: AnthropicProxyConfig {
611+
enabled: false,
612+
path: "/proxy/anthropic",
613+
},
614+
},
598615
protocols: LlmProtocolsConfig {
599616
openai: OpenAIProtocolConfig {
600617
enabled: true,
@@ -656,6 +673,12 @@ mod tests {
656673
assert_debug_snapshot!(&config, @r#"
657674
LlmConfig {
658675
enabled: true,
676+
proxy: ProxyConfig {
677+
anthropic: AnthropicProxyConfig {
678+
enabled: false,
679+
path: "/proxy/anthropic",
680+
},
681+
},
659682
protocols: LlmProtocolsConfig {
660683
openai: OpenAIProtocolConfig {
661684
enabled: true,
@@ -728,6 +751,12 @@ path = "/ai"
728751
assert_debug_snapshot!(&config, @r#"
729752
LlmConfig {
730753
enabled: true,
754+
proxy: ProxyConfig {
755+
anthropic: AnthropicProxyConfig {
756+
enabled: false,
757+
path: "/proxy/anthropic",
758+
},
759+
},
731760
protocols: LlmProtocolsConfig {
732761
openai: OpenAIProtocolConfig {
733762
enabled: true,
@@ -926,6 +955,12 @@ path = "/ai"
926955
assert_debug_snapshot!(&config, @r#"
927956
LlmConfig {
928957
enabled: false,
958+
proxy: ProxyConfig {
959+
anthropic: AnthropicProxyConfig {
960+
enabled: false,
961+
path: "/proxy/anthropic",
962+
},
963+
},
929964
protocols: LlmProtocolsConfig {
930965
openai: OpenAIProtocolConfig {
931966
enabled: true,
@@ -954,6 +989,12 @@ path = "/models"
954989
assert_debug_snapshot!(&config, @r#"
955990
LlmConfig {
956991
enabled: true,
992+
proxy: ProxyConfig {
993+
anthropic: AnthropicProxyConfig {
994+
enabled: false,
995+
path: "/proxy/anthropic",
996+
},
997+
},
957998
protocols: LlmProtocolsConfig {
958999
openai: OpenAIProtocolConfig {
9591000
enabled: true,
@@ -1002,6 +1043,12 @@ path = "/llm"
10021043
assert_debug_snapshot!(&config, @r#"
10031044
LlmConfig {
10041045
enabled: true,
1046+
proxy: ProxyConfig {
1047+
anthropic: AnthropicProxyConfig {
1048+
enabled: false,
1049+
path: "/proxy/anthropic",
1050+
},
1051+
},
10051052
protocols: LlmProtocolsConfig {
10061053
openai: OpenAIProtocolConfig {
10071054
enabled: true,
@@ -1059,6 +1106,12 @@ path = "/llm"
10591106
assert_debug_snapshot!(&config, @r#"
10601107
LlmConfig {
10611108
enabled: true,
1109+
proxy: ProxyConfig {
1110+
anthropic: AnthropicProxyConfig {
1111+
enabled: false,
1112+
path: "/proxy/anthropic",
1113+
},
1114+
},
10621115
protocols: LlmProtocolsConfig {
10631116
openai: OpenAIProtocolConfig {
10641117
enabled: true,
@@ -1125,6 +1178,12 @@ path = "/llm"
11251178
assert_debug_snapshot!(&config, @r#"
11261179
LlmConfig {
11271180
enabled: true,
1181+
proxy: ProxyConfig {
1182+
anthropic: AnthropicProxyConfig {
1183+
enabled: false,
1184+
path: "/proxy/anthropic",
1185+
},
1186+
},
11281187
protocols: LlmProtocolsConfig {
11291188
openai: OpenAIProtocolConfig {
11301189
enabled: true,
@@ -1194,6 +1253,12 @@ path = "/llm"
11941253
assert_debug_snapshot!(&config, @r#"
11951254
LlmConfig {
11961255
enabled: true,
1256+
proxy: ProxyConfig {
1257+
anthropic: AnthropicProxyConfig {
1258+
enabled: false,
1259+
path: "/proxy/anthropic",
1260+
},
1261+
},
11971262
protocols: LlmProtocolsConfig {
11981263
openai: OpenAIProtocolConfig {
11991264
enabled: true,
@@ -1382,6 +1447,12 @@ path = "/llm"
13821447
assert_debug_snapshot!(&config, @r#"
13831448
LlmConfig {
13841449
enabled: true,
1450+
proxy: ProxyConfig {
1451+
anthropic: AnthropicProxyConfig {
1452+
enabled: false,
1453+
path: "/proxy/anthropic",
1454+
},
1455+
},
13851456
protocols: LlmProtocolsConfig {
13861457
openai: OpenAIProtocolConfig {
13871458
enabled: true,
@@ -1476,6 +1547,12 @@ path = "/llm"
14761547
assert_debug_snapshot!(&config, @r#"
14771548
LlmConfig {
14781549
enabled: true,
1550+
proxy: ProxyConfig {
1551+
anthropic: AnthropicProxyConfig {
1552+
enabled: false,
1553+
path: "/proxy/anthropic",
1554+
},
1555+
},
14791556
protocols: LlmProtocolsConfig {
14801557
openai: OpenAIProtocolConfig {
14811558
enabled: true,
@@ -1530,6 +1607,12 @@ path = "/llm"
15301607
assert_debug_snapshot!(&config, @r#"
15311608
LlmConfig {
15321609
enabled: true,
1610+
proxy: ProxyConfig {
1611+
anthropic: AnthropicProxyConfig {
1612+
enabled: false,
1613+
path: "/proxy/anthropic",
1614+
},
1615+
},
15331616
protocols: LlmProtocolsConfig {
15341617
openai: OpenAIProtocolConfig {
15351618
enabled: true,

crates/config/src/loader.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,9 +35,9 @@ pub fn load<P: AsRef<Path>>(path: P) -> anyhow::Result<Config> {
3535
pub(crate) fn validate_has_downstreams(config: &Config) -> anyhow::Result<()> {
3636
// Check if any downstreams are actually configured (not just enabled)
3737
let has_mcp_servers = config.mcp.enabled() && config.mcp.has_servers();
38-
let has_llm_providers = config.llm.enabled() && config.llm.has_providers();
38+
let has_llm_providers = config.llm.enabled && config.llm.has_providers();
3939

40-
if !has_mcp_servers && !has_llm_providers {
40+
if !has_mcp_servers && !has_llm_providers && !config.llm.proxy.anthropic.enabled {
4141
bail!(indoc! {r#"
4242
No downstream servers configured. Nexus requires at least one MCP server or LLM provider to function.
4343

crates/config/src/proxy.rs

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
use serde::Deserialize;
2+
3+
#[derive(Default, Debug, Clone, Deserialize)]
4+
#[serde(default, deny_unknown_fields)]
5+
pub struct ProxyConfig {
6+
pub anthropic: AnthropicProxyConfig,
7+
}
8+
9+
#[derive(Debug, Clone, Deserialize)]
10+
#[serde(default, deny_unknown_fields)]
11+
pub struct AnthropicProxyConfig {
12+
pub enabled: bool,
13+
pub path: String,
14+
}
15+
16+
impl Default for AnthropicProxyConfig {
17+
fn default() -> Self {
18+
Self {
19+
enabled: false,
20+
path: "/proxy/anthropic".to_string(),
21+
}
22+
}
23+
}

crates/llm/Cargo.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ fastrace-utils.workspace = true
2727
futures.workspace = true
2828
header-rules.workspace = true
2929
http.workspace = true
30+
http-body-util.workspace = true
3031
itertools.workspace = true
3132
log.workspace = true
3233
opentelemetry.workspace = true
@@ -41,6 +42,7 @@ telemetry.workspace = true
4142
thiserror.workspace = true
4243
tiktoken-rs.workspace = true
4344
tokio = { workspace = true, features = ["sync"] }
45+
tower.workspace = true
4446
uuid = { workspace = true, features = ["v4"] }
4547

4648
[dev-dependencies]

crates/llm/src/error.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -150,13 +150,13 @@ impl IntoResponse for LlmError {
150150

151151
pub struct AnthropicErrorResponse {
152152
status: StatusCode,
153-
body: anthropic::AnthropicError,
153+
body: anthropic::ErrorResponse,
154154
}
155155

156156
impl From<LlmError> for AnthropicErrorResponse {
157157
fn from(error: LlmError) -> Self {
158158
let status = error.status_code();
159-
let body = anthropic::AnthropicError::from(error);
159+
let body = anthropic::ErrorResponse::from(error);
160160

161161
Self { status, body }
162162
}
Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
1-
use std::time::Duration;
1+
use std::{sync::OnceLock, time::Duration};
22

33
use axum::http;
44
use reqwest::Client;
55

6-
pub(super) fn default_http_client_builder(mut headers: http::HeaderMap) -> reqwest::ClientBuilder {
6+
pub(crate) fn default_http_client_builder() -> reqwest::ClientBuilder {
7+
let mut headers = http::HeaderMap::new();
78
headers.insert(http::header::CONNECTION, http::HeaderValue::from_static("keep-alive"));
89

910
Client::builder()
@@ -21,3 +22,16 @@ pub(super) fn default_http_client_builder(mut headers: http::HeaderMap) -> reqwe
2122
.tcp_keepalive(Some(Duration::from_secs(60)))
2223
.default_headers(headers)
2324
}
25+
26+
/// Common HTTP client to re-use as much as possible the same connections.
27+
pub(crate) fn http_client() -> reqwest::Client {
28+
static CLIENT: OnceLock<Client> = OnceLock::new();
29+
30+
CLIENT
31+
.get_or_init(|| {
32+
default_http_client_builder()
33+
.build()
34+
.expect("Failed to build default HTTP client")
35+
})
36+
.clone()
37+
}

0 commit comments

Comments
 (0)