Skip to content

Commit 6e8235b

Browse files
jamadeoDouwe Osinga
authored andcommitted
improve provider request logging a bit (#5236)
1 parent ad79be7 commit 6e8235b

19 files changed

+201
-101
lines changed

crates/goose/src/providers/anthropic.rs

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,12 @@ use super::errors::ProviderError;
1414
use super::formats::anthropic::{
1515
create_request, get_usage, response_to_message, response_to_streaming_message,
1616
};
17-
use super::utils::{emit_debug_trace, get_model, map_http_error_to_provider_error};
17+
use super::utils::{get_model, map_http_error_to_provider_error};
1818
use crate::config::declarative_providers::DeclarativeProviderConfig;
1919
use crate::conversation::message::Message;
2020
use crate::model::ModelConfig;
2121
use crate::providers::retry::ProviderRetry;
22+
use crate::providers::utils::RequestLog;
2223
use rmcp::model::Tool;
2324

2425
pub const ANTHROPIC_DEFAULT_MODEL: &str = "claude-sonnet-4-0";
@@ -204,7 +205,8 @@ impl Provider for AnthropicProvider {
204205
usage.input_tokens, usage.output_tokens, usage.total_tokens);
205206

206207
let response_model = get_model(&json_response);
207-
emit_debug_trace(&self.model, &payload, &json_response, &usage);
208+
let mut log = RequestLog::start(&self.model, &payload)?;
209+
log.write(&json_response, Some(&usage))?;
208210
let provider_usage = ProviderUsage::new(response_model, usage);
209211
tracing::debug!(
210212
"🔍 Anthropic non-streaming returning ProviderUsage: {:?}",
@@ -258,22 +260,26 @@ impl Provider for AnthropicProvider {
258260
.insert("stream".to_string(), Value::Bool(true));
259261

260262
let mut request = self.api_client.request("v1/messages");
263+
let mut log = RequestLog::start(&self.model, &payload)?;
261264

262265
for (key, value) in self.get_conditional_headers() {
263266
request = request.header(key, value)?;
264267
}
265268

266-
let response = request.response_post(&payload).await?;
269+
let response = request.response_post(&payload).await.inspect_err(|e| {
270+
let _ = log.error(e);
271+
})?;
267272
if !response.status().is_success() {
268273
let status = response.status();
269274
let error_text = response.text().await.unwrap_or_default();
270275
let error_json = serde_json::from_str::<Value>(&error_text).ok();
271-
return Err(map_http_error_to_provider_error(status, error_json));
276+
let error = map_http_error_to_provider_error(status, error_json);
277+
let _ = log.error(&error);
278+
return Err(error);
272279
}
273280

274281
let stream = response.bytes_stream().map_err(io::Error::other);
275282

276-
let model = self.model.clone();
277283
Ok(Box::pin(try_stream! {
278284
let stream_reader = StreamReader::new(stream);
279285
let framed = tokio_util::codec::FramedRead::new(stream_reader, tokio_util::codec::LinesCodec::new()).map_err(anyhow::Error::from);
@@ -282,7 +288,7 @@ impl Provider for AnthropicProvider {
282288
pin!(message_stream);
283289
while let Some(message) = futures::StreamExt::next(&mut message_stream).await {
284290
let (message, usage) = message.map_err(|e| ProviderError::RequestFailed(format!("Stream decode error: {}", e)))?;
285-
emit_debug_trace(&model, &payload, &message, &usage.as_ref().map(|f| f.usage).unwrap_or_default());
291+
log.write(&message, usage.as_ref().map(|f| f.usage).as_ref())?;
286292
yield (message, usage);
287293
}
288294
}))

crates/goose/src/providers/azure.rs

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,10 @@ use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage, Usage};
99
use super::errors::ProviderError;
1010
use super::formats::openai::{create_request, get_usage, response_to_message};
1111
use super::retry::ProviderRetry;
12-
use super::utils::{emit_debug_trace, get_model, handle_response_openai_compat, ImageFormat};
12+
use super::utils::{get_model, handle_response_openai_compat, ImageFormat};
1313
use crate::conversation::message::Message;
1414
use crate::model::ModelConfig;
15+
use crate::providers::utils::RequestLog;
1516
use rmcp::model::Tool;
1617

1718
pub const AZURE_DEFAULT_MODEL: &str = "gpt-4o";
@@ -156,7 +157,8 @@ impl Provider for AzureProvider {
156157
Usage::default()
157158
});
158159
let response_model = get_model(&response);
159-
emit_debug_trace(model_config, &payload, &response, &usage);
160+
let mut log = RequestLog::start(model_config, &payload)?;
161+
log.write(&response, Some(&usage))?;
160162
Ok((message, ProviderUsage::new(response_model, usage)))
161163
}
162164
}

crates/goose/src/providers/bedrock.rs

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use super::errors::ProviderError;
55
use super::retry::{ProviderRetry, RetryConfig};
66
use crate::conversation::message::Message;
77
use crate::model::ModelConfig;
8-
use crate::providers::utils::emit_debug_trace;
8+
use crate::providers::utils::RequestLog;
99
use anyhow::Result;
1010
use async_trait::async_trait;
1111
use aws_sdk_bedrockruntime::config::ProvideCredentials;
@@ -222,12 +222,11 @@ impl Provider for BedrockProvider {
222222
"messages": messages,
223223
"tools": tools
224224
});
225-
emit_debug_trace(
226-
&self.model,
227-
&debug_payload,
225+
let mut log = RequestLog::start(&self.model, &debug_payload)?;
226+
log.write(
228227
&serde_json::to_value(&message).unwrap_or_default(),
229-
&usage,
230-
);
228+
Some(&usage),
229+
)?;
231230

232231
let provider_usage = ProviderUsage::new(model_name.to_string(), usage);
233232
Ok((message, provider_usage))

crates/goose/src/providers/claude_code.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ use tokio::process::Command;
99

1010
use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage, Usage};
1111
use super::errors::ProviderError;
12-
use super::utils::emit_debug_trace;
12+
use super::utils::RequestLog;
1313
use crate::config::Config;
1414
use crate::conversation::message::{Message, MessageContent};
1515
use crate::model::ModelConfig;
@@ -495,13 +495,14 @@ impl Provider for ClaudeCodeProvider {
495495
"system": system,
496496
"messages": messages.len()
497497
});
498+
let mut log = RequestLog::start(model_config, &payload)?;
498499

499500
let response = json!({
500501
"lines": json_lines.len(),
501502
"usage": usage
502503
});
503504

504-
emit_debug_trace(model_config, &payload, &response, &usage);
505+
log.write(&response, Some(&usage))?;
505506

506507
Ok((
507508
message,

crates/goose/src/providers/cursor_agent.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ use tokio::process::Command;
99

1010
use super::base::{ConfigKey, Provider, ProviderMetadata, ProviderUsage, Usage};
1111
use super::errors::ProviderError;
12-
use super::utils::emit_debug_trace;
12+
use super::utils::RequestLog;
1313
use crate::conversation::message::{Message, MessageContent};
1414
use crate::model::ModelConfig;
1515
use rmcp::model::Tool;
@@ -433,7 +433,8 @@ impl Provider for CursorAgentProvider {
433433
"usage": usage
434434
});
435435

436-
emit_debug_trace(model_config, &payload, &response, &usage);
436+
let mut log = RequestLog::start(&self.model, &payload)?;
437+
log.write(&response, Some(&usage))?;
437438

438439
Ok((
439440
message,

crates/goose/src/providers/databricks.rs

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ use super::oauth;
1818
use super::retry::ProviderRetry;
1919
use super::utils::{
2020
get_model, handle_response_openai_compat, map_http_error_to_provider_error, ImageFormat,
21+
RequestLog,
2122
};
2223
use crate::config::ConfigError;
2324
use crate::conversation::message::Message;
@@ -285,6 +286,8 @@ impl Provider for DatabricksProvider {
285286
.expect("payload should have model key")
286287
.remove("model");
287288

289+
let mut log = RequestLog::start(&self.model, &payload)?;
290+
288291
let response = self
289292
.with_retry(|| self.post(payload.clone(), Some(&model_config.model_name)))
290293
.await?;
@@ -295,7 +298,7 @@ impl Provider for DatabricksProvider {
295298
Usage::default()
296299
});
297300
let response_model = get_model(&response);
298-
super::utils::emit_debug_trace(&self.model, &payload, &response, &usage);
301+
log.write(&response, Some(&usage))?;
299302

300303
Ok((message, ProviderUsage::new(response_model, usage)))
301304
}
@@ -321,6 +324,7 @@ impl Provider for DatabricksProvider {
321324
.insert("stream".to_string(), Value::Bool(true));
322325

323326
let path = self.get_endpoint_path(&model_config.model_name, false);
327+
let mut log = RequestLog::start(&self.model, &payload)?;
324328
let response = self
325329
.with_retry(|| async {
326330
let resp = self.api_client.response_post(&path, &payload).await?;
@@ -334,11 +338,13 @@ impl Provider for DatabricksProvider {
334338
}
335339
Ok(resp)
336340
})
337-
.await?;
341+
.await
342+
.inspect_err(|e| {
343+
let _ = log.error(e);
344+
})?;
338345

339346
let stream = response.bytes_stream().map_err(io::Error::other);
340347

341-
let model = self.model.clone();
342348
Ok(Box::pin(try_stream! {
343349
let stream_reader = StreamReader::new(stream);
344350
let framed = FramedRead::new(stream_reader, LinesCodec::new()).map_err(anyhow::Error::from);
@@ -347,7 +353,7 @@ impl Provider for DatabricksProvider {
347353
pin!(message_stream);
348354
while let Some(message) = message_stream.next().await {
349355
let (message, usage) = message.map_err(|e| ProviderError::RequestFailed(format!("Stream decode error: {}", e)))?;
350-
super::utils::emit_debug_trace(&model, &payload, &message, &usage.as_ref().map(|f| f.usage).unwrap_or_default());
356+
log.write(&message, usage.as_ref().map(|f| f.usage).as_ref())?;
351357
yield (message, usage);
352358
}
353359
}))

crates/goose/src/providers/gcpvertexai.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ use crate::providers::formats::gcpvertexai::{
2121
use crate::providers::formats::gcpvertexai::GcpLocation::Iowa;
2222
use crate::providers::gcpauth::GcpAuth;
2323
use crate::providers::retry::RetryConfig;
24-
use crate::providers::utils::emit_debug_trace;
24+
use crate::providers::utils::RequestLog;
2525
use rmcp::model::Tool;
2626

2727
/// Base URL for GCP Vertex AI documentation
@@ -518,7 +518,8 @@ impl Provider for GcpVertexAIProvider {
518518
let response = self.post(&request, &context).await?;
519519
let usage = get_usage(&response, &context)?;
520520

521-
emit_debug_trace(model_config, &request, &response, &usage);
521+
let mut log = RequestLog::start(model_config, &request)?;
522+
log.write(&response, Some(&usage))?;
522523

523524
// Convert response to message
524525
let message = response_to_message(response, context)?;

crates/goose/src/providers/gemini_cli.rs

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ use tokio::process::Command;
88

99
use super::base::{Provider, ProviderMetadata, ProviderUsage, Usage};
1010
use super::errors::ProviderError;
11-
use super::utils::emit_debug_trace;
11+
use super::utils::RequestLog;
1212
use crate::conversation::message::{Message, MessageContent};
1313

1414
use crate::model::ModelConfig;
@@ -317,12 +317,12 @@ impl Provider for GeminiCliProvider {
317317
}
318318

319319
#[tracing::instrument(
320-
skip(self, model_config, system, messages, tools),
320+
skip(self, _model_config, system, messages, tools),
321321
fields(model_config, input, output, input_tokens, output_tokens, total_tokens)
322322
)]
323323
async fn complete_with_model(
324324
&self,
325-
model_config: &ModelConfig,
325+
_model_config: &ModelConfig,
326326
system: &str,
327327
messages: &[Message],
328328
tools: &[Tool],
@@ -332,10 +332,6 @@ impl Provider for GeminiCliProvider {
332332
return self.generate_simple_session_description(messages);
333333
}
334334

335-
let lines = self.execute_command(system, messages, tools).await?;
336-
337-
let (message, usage) = self.parse_response(&lines)?;
338-
339335
// Create a dummy payload for debug tracing
340336
let payload = json!({
341337
"command": self.command,
@@ -344,12 +340,22 @@ impl Provider for GeminiCliProvider {
344340
"messages": messages.len()
345341
});
346342

343+
let mut log = RequestLog::start(&self.model, &payload).map_err(|e| {
344+
ProviderError::RequestFailed(format!("Failed to start request log: {}", e))
345+
})?;
346+
347+
let lines = self.execute_command(system, messages, tools).await?;
348+
349+
let (message, usage) = self.parse_response(&lines)?;
350+
347351
let response = json!({
348352
"lines": lines.len(),
349353
"usage": usage
350354
});
351355

352-
emit_debug_trace(model_config, &payload, &response, &usage);
356+
log.write(&response, Some(&usage)).map_err(|e| {
357+
ProviderError::RequestFailed(format!("Failed to write request log: {}", e))
358+
})?;
353359

354360
Ok((
355361
message,

crates/goose/src/providers/githubcopilot.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ use super::base::{Provider, ProviderMetadata, ProviderUsage, Usage};
1515
use super::errors::ProviderError;
1616
use super::formats::openai::{create_request, get_usage, response_to_message};
1717
use super::retry::ProviderRetry;
18-
use super::utils::{emit_debug_trace, get_model, handle_response_openai_compat, ImageFormat};
18+
use super::utils::{get_model, handle_response_openai_compat, ImageFormat, RequestLog};
1919

2020
use crate::config::{Config, ConfigError};
2121
use crate::conversation::message::Message;
@@ -408,6 +408,7 @@ impl Provider for GithubCopilotProvider {
408408
tools: &[Tool],
409409
) -> Result<(Message, ProviderUsage), ProviderError> {
410410
let payload = create_request(model_config, system, messages, tools, &ImageFormat::OpenAi)?;
411+
let mut log = RequestLog::start(model_config, &payload)?;
411412

412413
// Make request with retry
413414
let response = self
@@ -424,7 +425,7 @@ impl Provider for GithubCopilotProvider {
424425
Usage::default()
425426
});
426427
let response_model = get_model(&response);
427-
emit_debug_trace(model_config, &payload, &response, &usage);
428+
log.write(&response, Some(&usage))?;
428429
Ok((message, ProviderUsage::new(response_model, usage)))
429430
}
430431

crates/goose/src/providers/google.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use super::api_client::{ApiClient, AuthMethod};
22
use super::errors::ProviderError;
33
use super::retry::ProviderRetry;
4-
use super::utils::{emit_debug_trace, handle_response_google_compat, unescape_json_values};
4+
use super::utils::{handle_response_google_compat, unescape_json_values, RequestLog};
55
use crate::conversation::message::Message;
66

77
use crate::model::ModelConfig;
@@ -102,6 +102,7 @@ impl Provider for GoogleProvider {
102102
tools: &[Tool],
103103
) -> Result<(Message, ProviderUsage), ProviderError> {
104104
let payload = create_request(model_config, system, messages, tools)?;
105+
let mut log = RequestLog::start(model_config, &payload)?;
105106

106107
let response = self
107108
.with_retry(|| async {
@@ -116,7 +117,7 @@ impl Provider for GoogleProvider {
116117
Some(model_version) => model_version.as_str().unwrap_or_default().to_string(),
117118
None => model_config.model_name.clone(),
118119
};
119-
emit_debug_trace(model_config, &payload, &response, &usage);
120+
log.write(&response, Some(&usage))?;
120121
let provider_usage = ProviderUsage::new(response_model, usage);
121122
Ok((message, provider_usage))
122123
}

0 commit comments

Comments
 (0)