Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 14 additions & 2 deletions src-tauri/src/proxy/providers/streaming.rs
Original file line number Diff line number Diff line change
Expand Up @@ -450,7 +450,8 @@ pub fn create_anthropic_sse_stream<E: std::error::Error + Send + 'static>(
"content_block": {
"type": "tool_use",
"id": id,
"name": name
"name": name,
"input": {}
}
});
let sse_data = format!("event: content_block_start\ndata: {}\n\n",
Expand Down Expand Up @@ -558,7 +559,8 @@ pub fn create_anthropic_sse_stream<E: std::error::Error + Send + 'static>(
"content_block": {
"type": "tool_use",
"id": id,
"name": name
"name": name,
"input": {}
}
});
let sse_data = format!("event: content_block_start\ndata: {}\n\n",
Expand Down Expand Up @@ -777,6 +779,15 @@ mod tests {
}

assert_eq!(tool_index_by_call.len(), 2);
for event in events.iter().filter(|event| {
event.get("type").and_then(|v| v.as_str()) == Some("content_block_start")
&& event
.pointer("/content_block/type")
.and_then(|v| v.as_str())
== Some("tool_use")
}) {
assert_eq!(event["content_block"]["input"], json!({}));
}
assert_ne!(
tool_index_by_call.get("call_0"),
tool_index_by_call.get("call_1")
Expand Down Expand Up @@ -873,6 +884,7 @@ mod tests {
.unwrap_or(""),
"first_tool"
);
assert_eq!(starts[0]["content_block"]["input"], json!({}));

let deltas: Vec<&str> = events
.iter()
Expand Down
27 changes: 24 additions & 3 deletions src-tauri/src/proxy/providers/streaming_gemini.rs
Original file line number Diff line number Diff line change
Expand Up @@ -530,7 +530,8 @@ pub fn create_anthropic_sse_stream_from_gemini<E: std::error::Error + Send + 'st
"content_block": {
"type": "tool_use",
"id": tool_call.id.clone().unwrap_or_default(),
"name": tool_call.name
"name": tool_call.name,
"input": {}
}
});
yield Ok(encode_sse("content_block_start", &start_event));
Expand Down Expand Up @@ -650,8 +651,28 @@ mod tests {
"data: {\"responseId\":\"resp_2\",\"modelVersion\":\"gemini-2.5-pro\",\"candidates\":[{\"finishReason\":\"STOP\",\"content\":{\"parts\":[{\"functionCall\":{\"id\":\"call_1\",\"name\":\"get_weather\",\"args\":{\"city\":\"Tokyo\"}},\"thoughtSignature\":\"sig-1\"}]}}],\"usageMetadata\":{\"promptTokenCount\":5,\"totalTokenCount\":8}}\n\n",
]);

assert!(output.contains("\"type\":\"tool_use\""));
assert!(output.contains("\"name\":\"get_weather\""));
let events: Vec<Value> = output
.split("\n\n")
.filter_map(|block| {
let data = block
.lines()
.find_map(|line| strip_sse_field(line, "data"))?;
serde_json::from_str::<Value>(data).ok()
})
.collect();
let tool_start = events
.iter()
.find(|event| {
event.get("type").and_then(|v| v.as_str()) == Some("content_block_start")
&& event
.pointer("/content_block/type")
.and_then(|v| v.as_str())
== Some("tool_use")
})
.unwrap();

assert_eq!(tool_start["content_block"]["name"], json!("get_weather"));
assert_eq!(tool_start["content_block"]["input"], json!({}));
assert!(output.contains("\"type\":\"input_json_delta\""));
assert!(output.contains("\"stop_reason\":\"tool_use\""));
}
Expand Down
102 changes: 100 additions & 2 deletions src-tauri/src/proxy/providers/streaming_responses.rs
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,8 @@ pub fn create_anthropic_sse_stream_from_responses<E: std::error::Error + Send +
"content_block": {
"type": "tool_use",
"id": call_id,
"name": name
"name": name,
"input": {}
}
});
let sse = format!("event: content_block_start\ndata: {}\n\n",
Expand Down Expand Up @@ -473,7 +474,8 @@ pub fn create_anthropic_sse_stream_from_responses<E: std::error::Error + Send +
"name": data
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("")
.unwrap_or(""),
"input": {}
}
});
let start_sse = format!("event: content_block_start\ndata: {}\n\n",
Expand Down Expand Up @@ -1033,6 +1035,102 @@ mod tests {
assert_eq!(text_deltas, vec!["你".to_string(), "好".to_string()]);
}

#[tokio::test]
async fn test_streaming_responses_tool_start_normal_path_includes_empty_input() {
let input = concat!(
"event: response.created\n",
"data: {\"type\":\"response.created\",\"response\":{\"id\":\"resp_tool\",\"model\":\"gpt-4o\"}}\n\n",
"event: response.output_item.added\n",
"data: {\"type\":\"response.output_item.added\",\"item\":{\"id\":\"fc_1\",\"type\":\"function_call\",\"call_id\":\"call_1\",\"name\":\"get_weather\"}}\n\n",
"event: response.function_call_arguments.delta\n",
"data: {\"type\":\"response.function_call_arguments.delta\",\"item_id\":\"fc_1\",\"delta\":\"{\\\"city\\\":\\\"Tokyo\\\"}\"}\n\n",
"event: response.function_call_arguments.done\n",
"data: {\"type\":\"response.function_call_arguments.done\",\"item_id\":\"fc_1\"}\n\n",
"event: response.completed\n",
"data: {\"type\":\"response.completed\",\"response\":{\"status\":\"completed\"}}\n\n"
);

let upstream = stream::iter(vec![Ok::<_, std::io::Error>(Bytes::from(
input.as_bytes().to_vec(),
))]);
let converted = create_anthropic_sse_stream_from_responses(upstream);
let chunks: Vec<_> = converted.collect().await;
let events: Vec<Value> = chunks
.into_iter()
.flat_map(|chunk| {
let bytes = chunk.unwrap();
let text = String::from_utf8_lossy(bytes.as_ref()).to_string();
text.split("\n\n")
.filter_map(|block| {
block.lines().find_map(|line| {
strip_sse_field(line, "data")
.and_then(|payload| serde_json::from_str::<Value>(payload).ok())
})
})
.collect::<Vec<_>>()
})
.collect();

let tool_start = events
.iter()
.find(|event| {
event.get("type").and_then(|v| v.as_str()) == Some("content_block_start")
&& event
.pointer("/content_block/type")
.and_then(|v| v.as_str())
== Some("tool_use")
})
.unwrap();
assert_eq!(tool_start["content_block"]["input"], json!({}));
}

#[tokio::test]
async fn test_streaming_responses_tool_start_fallback_path_includes_empty_input() {
let input = concat!(
"event: response.created\n",
"data: {\"type\":\"response.created\",\"response\":{\"id\":\"resp_tool\",\"model\":\"gpt-4o\"}}\n\n",
"event: response.function_call_arguments.delta\n",
"data: {\"type\":\"response.function_call_arguments.delta\",\"item_id\":\"fc_1\",\"call_id\":\"call_1\",\"name\":\"get_weather\",\"delta\":\"{\\\"city\\\":\"}\n\n",
"event: response.function_call_arguments.done\n",
"data: {\"type\":\"response.function_call_arguments.done\",\"item_id\":\"fc_1\"}\n\n",
"event: response.completed\n",
"data: {\"type\":\"response.completed\",\"response\":{\"status\":\"completed\"}}\n\n"
);

let upstream = stream::iter(vec![Ok::<_, std::io::Error>(Bytes::from(
input.as_bytes().to_vec(),
))]);
let converted = create_anthropic_sse_stream_from_responses(upstream);
let chunks: Vec<_> = converted.collect().await;
let events: Vec<Value> = chunks
.into_iter()
.flat_map(|chunk| {
let bytes = chunk.unwrap();
let text = String::from_utf8_lossy(bytes.as_ref()).to_string();
text.split("\n\n")
.filter_map(|block| {
block.lines().find_map(|line| {
strip_sse_field(line, "data")
.and_then(|payload| serde_json::from_str::<Value>(payload).ok())
})
})
.collect::<Vec<_>>()
})
.collect();

let tool_start = events
.iter()
.find(|event| {
event.get("type").and_then(|v| v.as_str()) == Some("content_block_start")
&& event
.pointer("/content_block/type")
.and_then(|v| v.as_str())
== Some("tool_use")
})
.unwrap();
assert_eq!(tool_start["content_block"]["input"], json!({}));
}

#[tokio::test]
async fn test_streaming_responses_chinese_split_across_chunks_no_replacement_chars() {
// Chinese text delta split across two TCP chunks.
Expand Down