mirror of
https://github.com/openclaw/openclaw.git
synced 2026-02-09 05:19:32 +08:00
fix: preserve reasoning on tool-only turns
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
diff --git a/dist/providers/google-gemini-cli.js b/dist/providers/google-gemini-cli.js
|
||||
index 93aa26c395e9bd0df64376408a13d15ee9e7cce7..41a439e5fc370038a5febef9e8f021ee279cf8aa 100644
|
||||
index 93aa26c395e9bd0df64376408a13d15ee9e7cce7..beb585e2f2c13eec3bca98acade761101e4572ff 100644
|
||||
--- a/dist/providers/google-gemini-cli.js
|
||||
+++ b/dist/providers/google-gemini-cli.js
|
||||
@@ -248,6 +248,11 @@ export const streamGoogleGeminiCli = (model, context, options) => {
|
||||
@@ -15,7 +15,7 @@ index 93aa26c395e9bd0df64376408a13d15ee9e7cce7..41a439e5fc370038a5febef9e8f021ee
|
||||
if (attempt < MAX_RETRIES && isRetryableError(response.status, errorText)) {
|
||||
// Use server-provided delay or exponential backoff
|
||||
diff --git a/dist/providers/openai-codex-responses.js b/dist/providers/openai-codex-responses.js
|
||||
index 188a8294f26fe1bfe3fb298a7f58e4d8eaf2a529..2f543a3ee9ea7603519562ac52ee00b6a7d0432c 100644
|
||||
index 188a8294f26fe1bfe3fb298a7f58e4d8eaf2a529..ccfe2e835918530ddf9d2ce17b44b0069b41648e 100644
|
||||
--- a/dist/providers/openai-codex-responses.js
|
||||
+++ b/dist/providers/openai-codex-responses.js
|
||||
@@ -515,7 +515,7 @@ function convertTools(tools) {
|
||||
@@ -28,7 +28,7 @@ index 188a8294f26fe1bfe3fb298a7f58e4d8eaf2a529..2f543a3ee9ea7603519562ac52ee00b6
|
||||
}
|
||||
function mapStopReason(status) {
|
||||
diff --git a/dist/providers/openai-responses.js b/dist/providers/openai-responses.js
|
||||
index f07085c64390b211340d6a826b28ea9c2e77302f..71509b70c5aa762872eab3b5ffc7a42579aa881f 100644
|
||||
index f07085c64390b211340d6a826b28ea9c2e77302f..7f758532246cc7b062df48e9cec4e6c904b76a99 100644
|
||||
--- a/dist/providers/openai-responses.js
|
||||
+++ b/dist/providers/openai-responses.js
|
||||
@@ -396,10 +396,16 @@ function convertMessages(model, context) {
|
||||
@@ -48,14 +48,20 @@ index f07085c64390b211340d6a826b28ea9c2e77302f..71509b70c5aa762872eab3b5ffc7a425
|
||||
const reasoningItem = JSON.parse(block.thinkingSignature);
|
||||
output.push(reasoningItem);
|
||||
}
|
||||
@@ -430,7 +431,7 @@ export const streamOpenAIResponses = (model, context, options) => {
|
||||
const hasAssistantMessage = output.some((item) => item.type === "message");
|
||||
const hasFunctionCall = output.some((item) => item.type === "function_call");
|
||||
- if (!hasAssistantMessage && hasFunctionCall) {
|
||||
@@ -434,6 +440,16 @@ function convertMessages(model, context) {
|
||||
});
|
||||
}
|
||||
}
|
||||
+ const hasAssistantMessage = output.some((item) => item.type === "message");
|
||||
+ const hasFunctionCall = output.some((item) => item.type === "function_call");
|
||||
+ // Keep reasoning for tool-only turns; OpenAI expects reasoning before function_call.
|
||||
+ if (!hasAssistantMessage && !hasFunctionCall) {
|
||||
for (let i = output.length - 1; i >= 0; i -= 1) {
|
||||
if (output[i].type === "reasoning") {
|
||||
output.splice(i, 1);
|
||||
}
|
||||
}
|
||||
+ for (let i = output.length - 1; i >= 0; i -= 1) {
|
||||
+ if (output[i].type === "reasoning") {
|
||||
+ output.splice(i, 1);
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
if (output.length === 0)
|
||||
continue;
|
||||
messages.push(...output);
|
||||
|
||||
Reference in New Issue
Block a user