diff --git a/src/crates/ai-adapters/src/stream/stream_handler/openai.rs b/src/crates/ai-adapters/src/stream/stream_handler/openai.rs index ef670581e..bf0ac4ec3 100644 --- a/src/crates/ai-adapters/src/stream/stream_handler/openai.rs +++ b/src/crates/ai-adapters/src/stream/stream_handler/openai.rs @@ -51,6 +51,22 @@ fn is_valid_chat_completion_chunk_weak(event_json: &Value) -> bool { fn extract_sse_api_error_message(event_json: &Value) -> Option { let error = event_json.get("error")?; if let Some(message) = error.get("message").and_then(|value| value.as_str()) { + if let Some(code) = error + .get("code") + .and_then(|value| value.as_str()) + .map(str::to_string) + .or_else(|| error.get("code").map(|value| value.to_string())) + { + let mut formatted = format!("code={}, message={}", code, message); + if let Some(request_id) = event_json + .get("request_id") + .or_else(|| event_json.get("requestId")) + .and_then(|value| value.as_str()) + { + formatted.push_str(&format!(", request_id={}", request_id)); + } + return Some(formatted); + } return Some(message.to_string()); } if let Some(message) = error.as_str() { @@ -274,6 +290,24 @@ mod tests { ); } + #[test] + fn extracts_api_error_code_message_and_request_id_from_object_shape() { + let event = serde_json::json!({ + "error": { + "code": "1305", + "message": "该模型当前访问量过大,请您稍后再试" + }, + "request_id": "20260410100425fab0cd73dea74cc3" + }); + + assert_eq!( + extract_sse_api_error_message(&event).as_deref(), + Some( + "code=1305, message=该模型当前访问量过大,请您稍后再试, request_id=20260410100425fab0cd73dea74cc3" + ) + ); + } + #[test] fn extracts_api_error_message_from_string_shape() { let event = serde_json::json!({ diff --git a/src/crates/core/tests/fixtures/stream/openai/provider_error_with_code.sse b/src/crates/core/tests/fixtures/stream/openai/provider_error_with_code.sse new file mode 100644 index 000000000..66a0c4d64 --- /dev/null +++ b/src/crates/core/tests/fixtures/stream/openai/provider_error_with_code.sse @@ -0,0 +1 @@ +data: {"error":{"code":"1305","message":"provider temporarily overloaded"},"request_id":"req_1305"} diff --git a/src/crates/core/tests/stream_processor_openai.rs b/src/crates/core/tests/stream_processor_openai.rs index 3cb6b5c83..076fd4bd5 100644 --- a/src/crates/core/tests/stream_processor_openai.rs +++ b/src/crates/core/tests/stream_processor_openai.rs @@ -180,6 +180,29 @@ async fn openai_fixture_parses_inline_think_tags_into_reasoning_content() { assert_eq!(text_chunks, vec!["Final answer."]); } +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn openai_fixture_preserves_provider_error_code_and_request_id() { + let output = run_stream_fixture( + StreamFixtureProvider::OpenAi, + "stream/openai/provider_error_with_code.sse", + FixtureSseServerOptions::default(), + ) + .await; + + let error = output + .result + .expect_err("provider error fixture should fail") + .error + .to_string(); + + assert!(error.contains("SSE API error")); + assert!(error.contains("code=1305")); + assert!(error.contains("message=provider temporarily overloaded")); + assert!(error.contains("request_id=req_1305")); + assert!(!error.contains("missing field")); + assert!(!error.contains("SSE data schema error")); +} + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn openai_fixture_reattaches_id_only_prelude_to_following_payload_chunk() { let output = run_stream_fixture(