mirror of
https://github.com/openclaw/openclaw.git
synced 2026-02-09 05:19:32 +08:00
Deps: update Pi + Vitest and add Bedrock docs
This commit is contained in:
@@ -1,5 +1,12 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2026.1.13-2
|
||||||
|
|
||||||
|
### Changes
|
||||||
|
- Dependencies: bump Pi packages to 0.45.3 and refresh patched pi-ai.
|
||||||
|
- Testing: update Vitest + browser-playwright to 4.0.17.
|
||||||
|
- Docs: add Amazon Bedrock provider notes and link from models/FAQ.
|
||||||
|
|
||||||
## 2026.1.13-1
|
## 2026.1.13-1
|
||||||
|
|
||||||
### Changes
|
### Changes
|
||||||
|
|||||||
71
docs/bedrock.md
Normal file
71
docs/bedrock.md
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
---
|
||||||
|
summary: "Use Amazon Bedrock (Converse API) models with Clawdbot"
|
||||||
|
read_when:
|
||||||
|
- You want to use Amazon Bedrock models with Clawdbot
|
||||||
|
- You need AWS credential/region setup for model calls
|
||||||
|
---
|
||||||
|
# Amazon Bedrock
|
||||||
|
|
||||||
|
Clawdbot can use **Amazon Bedrock** models via pi‑ai’s **Bedrock Converse**
|
||||||
|
streaming provider. Bedrock auth uses the **AWS SDK default credential chain**,
|
||||||
|
not an API key.
|
||||||
|
|
||||||
|
## What pi‑ai supports
|
||||||
|
|
||||||
|
- Provider: `amazon-bedrock`
|
||||||
|
- API: `bedrock-converse-stream`
|
||||||
|
- Auth: AWS credentials (env vars, shared config, or instance role)
|
||||||
|
- Region: `AWS_REGION` or `AWS_DEFAULT_REGION` (default: `us-east-1`)
|
||||||
|
|
||||||
|
## Setup (manual)
|
||||||
|
|
||||||
|
1) Ensure AWS credentials are available on the **gateway host**:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export AWS_ACCESS_KEY_ID="AKIA..."
|
||||||
|
export AWS_SECRET_ACCESS_KEY="..."
|
||||||
|
export AWS_REGION="us-east-1"
|
||||||
|
# Optional:
|
||||||
|
export AWS_SESSION_TOKEN="..."
|
||||||
|
export AWS_PROFILE="your-profile"
|
||||||
|
```
|
||||||
|
|
||||||
|
2) Add a Bedrock provider and model to your config:
|
||||||
|
|
||||||
|
```json5
|
||||||
|
{
|
||||||
|
models: {
|
||||||
|
providers: {
|
||||||
|
"amazon-bedrock": {
|
||||||
|
baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com",
|
||||||
|
api: "bedrock-converse-stream",
|
||||||
|
models: [
|
||||||
|
{
|
||||||
|
id: "anthropic.claude-3-7-sonnet-20250219-v1:0",
|
||||||
|
name: "Claude 3.7 Sonnet (Bedrock)",
|
||||||
|
reasoning: true,
|
||||||
|
input: ["text"],
|
||||||
|
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||||
|
contextWindow: 200000,
|
||||||
|
maxTokens: 8192
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
agents: {
|
||||||
|
defaults: {
|
||||||
|
model: { primary: "amazon-bedrock/anthropic.claude-3-7-sonnet-20250219-v1:0" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Bedrock requires **model access** enabled in your AWS account/region.
|
||||||
|
- If you use profiles, set `AWS_PROFILE` on the gateway host.
|
||||||
|
- Reasoning support depends on the model; check the Bedrock model card for
|
||||||
|
current capabilities.
|
||||||
|
- If you prefer a managed key flow, you can also place an OpenAI‑compatible
|
||||||
|
proxy in front of Bedrock and configure it as an OpenAI provider instead.
|
||||||
@@ -31,6 +31,7 @@ model as `provider/model`.
|
|||||||
- [Z.AI](/providers/zai)
|
- [Z.AI](/providers/zai)
|
||||||
- [GLM models](/providers/glm)
|
- [GLM models](/providers/glm)
|
||||||
- [MiniMax](/providers/minimax)
|
- [MiniMax](/providers/minimax)
|
||||||
|
- [Amazon Bedrock](/bedrock)
|
||||||
|
|
||||||
For the full provider catalog (xAI, Groq, Mistral, etc.) and advanced configuration,
|
For the full provider catalog (xAI, Groq, Mistral, etc.) and advanced configuration,
|
||||||
see [Model providers](/concepts/model-providers).
|
see [Model providers](/concepts/model-providers).
|
||||||
|
|||||||
@@ -212,7 +212,7 @@ Yes. Clawdbot can **reuse Claude Code CLI credentials** (OAuth) and also support
|
|||||||
|
|
||||||
### Is AWS Bedrock supported?
|
### Is AWS Bedrock supported?
|
||||||
|
|
||||||
Not currently. Clawdbot doesn’t ship a Bedrock provider today. If you must use Bedrock, the common workaround is an OpenAI‑compatible proxy in front of Bedrock, then point Clawdbot at that endpoint. See [Model providers](/providers/models) and [Model providers (full list)](/concepts/model-providers).
|
Yes — via pi‑ai’s **Amazon Bedrock (Converse)** provider with **manual config**. You must supply AWS credentials/region on the gateway host and add a Bedrock provider entry in your models config. See [Amazon Bedrock](/bedrock) and [Model providers](/providers/models). If you prefer a managed key flow, an OpenAI‑compatible proxy in front of Bedrock is still a valid option.
|
||||||
|
|
||||||
### How does Codex auth work?
|
### How does Codex auth work?
|
||||||
|
|
||||||
|
|||||||
10
package.json
10
package.json
@@ -132,10 +132,10 @@
|
|||||||
"@grammyjs/runner": "^2.0.3",
|
"@grammyjs/runner": "^2.0.3",
|
||||||
"@grammyjs/transformer-throttler": "^1.2.1",
|
"@grammyjs/transformer-throttler": "^1.2.1",
|
||||||
"@homebridge/ciao": "^1.3.4",
|
"@homebridge/ciao": "^1.3.4",
|
||||||
"@mariozechner/pi-agent-core": "^0.43.0",
|
"@mariozechner/pi-agent-core": "^0.45.3",
|
||||||
"@mariozechner/pi-ai": "^0.43.0",
|
"@mariozechner/pi-ai": "^0.45.3",
|
||||||
"@mariozechner/pi-coding-agent": "^0.43.0",
|
"@mariozechner/pi-coding-agent": "^0.45.3",
|
||||||
"@mariozechner/pi-tui": "^0.43.0",
|
"@mariozechner/pi-tui": "^0.45.3",
|
||||||
"@microsoft/agents-hosting": "^1.1.1",
|
"@microsoft/agents-hosting": "^1.1.1",
|
||||||
"@microsoft/agents-hosting-express": "^1.1.1",
|
"@microsoft/agents-hosting-express": "^1.1.1",
|
||||||
"@microsoft/agents-hosting-extensions-teams": "^1.1.1",
|
"@microsoft/agents-hosting-extensions-teams": "^1.1.1",
|
||||||
@@ -207,7 +207,7 @@
|
|||||||
"@sinclair/typebox": "0.34.47"
|
"@sinclair/typebox": "0.34.47"
|
||||||
},
|
},
|
||||||
"patchedDependencies": {
|
"patchedDependencies": {
|
||||||
"@mariozechner/pi-ai@0.43.0": "patches/@mariozechner__pi-ai@0.43.0.patch"
|
"@mariozechner/pi-ai@0.45.3": "patches/@mariozechner__pi-ai@0.45.3.patch"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"vitest": {
|
"vitest": {
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
diff --git a/dist/providers/google-gemini-cli.js b/dist/providers/google-gemini-cli.js
|
diff --git a/dist/providers/google-gemini-cli.js b/dist/providers/google-gemini-cli.js
|
||||||
index 12540bb1069087a0d0a2967f792008627b9f79d9..f30b525620e6d8e45146b439ec3733e4053c9d2a 100644
|
index cc9e0cb..2b18ec4 100644
|
||||||
--- a/dist/providers/google-gemini-cli.js
|
--- a/dist/providers/google-gemini-cli.js
|
||||||
+++ b/dist/providers/google-gemini-cli.js
|
+++ b/dist/providers/google-gemini-cli.js
|
||||||
@@ -248,6 +248,11 @@ export const streamGoogleGeminiCli = (model, context, options) => {
|
@@ -329,6 +329,11 @@ export const streamGoogleGeminiCli = (model, context, options) => {
|
||||||
break; // Success, exit retry loop
|
break; // Success, exit retry loop
|
||||||
}
|
}
|
||||||
const errorText = await response.text();
|
const errorText = await response.text();
|
||||||
@@ -14,41 +14,8 @@ index 12540bb1069087a0d0a2967f792008627b9f79d9..f30b525620e6d8e45146b439ec3733e4
|
|||||||
// Check if retryable
|
// Check if retryable
|
||||||
if (attempt < MAX_RETRIES && isRetryableError(response.status, errorText)) {
|
if (attempt < MAX_RETRIES && isRetryableError(response.status, errorText)) {
|
||||||
// Use server-provided delay or exponential backoff
|
// Use server-provided delay or exponential backoff
|
||||||
diff --git a/dist/providers/google-shared.js b/dist/providers/google-shared.js
|
|
||||||
index ae4710b0f134ac4a48f5b7053f454d1068bee71f..b1b5bd94586f68461ccc44e4a9cdf3acb4e0d084 100644
|
|
||||||
--- a/dist/providers/google-shared.js
|
|
||||||
+++ b/dist/providers/google-shared.js
|
|
||||||
@@ -42,6 +42,8 @@ export function retainThoughtSignature(existing, incoming) {
|
|
||||||
export function convertMessages(model, context) {
|
|
||||||
const contents = [];
|
|
||||||
const transformedMessages = transformMessages(context.messages, model);
|
|
||||||
+ const shouldStripFunctionId = typeof model.provider === "string" &&
|
|
||||||
+ model.provider.startsWith("google");
|
|
||||||
for (const msg of transformedMessages) {
|
|
||||||
if (msg.role === "user") {
|
|
||||||
if (typeof msg.content === "string") {
|
|
||||||
@@ -113,6 +115,9 @@ export function convertMessages(model, context) {
|
|
||||||
args: block.arguments,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
+ if (shouldStripFunctionId && part?.functionCall?.id) {
|
|
||||||
+ delete part.functionCall.id; // Google Gemini/Vertex do not support 'id' in functionCall
|
|
||||||
+ }
|
|
||||||
if (block.thoughtSignature) {
|
|
||||||
part.thoughtSignature = block.thoughtSignature;
|
|
||||||
}
|
|
||||||
@@ -155,6 +160,9 @@ export function convertMessages(model, context) {
|
|
||||||
...(hasImages && supportsMultimodalFunctionResponse && { parts: imageParts }),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
+ if (shouldStripFunctionId && functionResponsePart.functionResponse?.id) {
|
|
||||||
+ delete functionResponsePart.functionResponse.id; // Google Gemini/Vertex do not support 'id' in functionResponse
|
|
||||||
+ }
|
|
||||||
// Cloud Code Assist API requires all function responses to be in a single user turn.
|
|
||||||
// Check if the last content is already a user turn with function responses and merge.
|
|
||||||
const lastContent = contents[contents.length - 1];
|
|
||||||
diff --git a/dist/providers/openai-codex-responses.js b/dist/providers/openai-codex-responses.js
|
diff --git a/dist/providers/openai-codex-responses.js b/dist/providers/openai-codex-responses.js
|
||||||
index ad0a2aabbe10382cee4e463b68a02864dd235e57..8c001acfd0b4e0743181c246f1bedcf8cd2ffb02 100644
|
index 7488c79..4c34587 100644
|
||||||
--- a/dist/providers/openai-codex-responses.js
|
--- a/dist/providers/openai-codex-responses.js
|
||||||
+++ b/dist/providers/openai-codex-responses.js
|
+++ b/dist/providers/openai-codex-responses.js
|
||||||
@@ -517,7 +517,7 @@ function convertTools(tools) {
|
@@ -517,7 +517,7 @@ function convertTools(tools) {
|
||||||
@@ -61,10 +28,10 @@ index ad0a2aabbe10382cee4e463b68a02864dd235e57..8c001acfd0b4e0743181c246f1bedcf8
|
|||||||
}
|
}
|
||||||
function mapStopReason(status) {
|
function mapStopReason(status) {
|
||||||
diff --git a/dist/providers/openai-responses.js b/dist/providers/openai-responses.js
|
diff --git a/dist/providers/openai-responses.js b/dist/providers/openai-responses.js
|
||||||
index f07085c64390b211340d6a826b28ea9c2e77302f..7f758532246cc7b062df48e9cec4e6c904b76a99 100644
|
index c4714f4..4d1e6b0 100644
|
||||||
--- a/dist/providers/openai-responses.js
|
--- a/dist/providers/openai-responses.js
|
||||||
+++ b/dist/providers/openai-responses.js
|
+++ b/dist/providers/openai-responses.js
|
||||||
@@ -396,10 +396,16 @@ function convertMessages(model, context) {
|
@@ -400,10 +400,16 @@ function convertMessages(model, context) {
|
||||||
}
|
}
|
||||||
else if (msg.role === "assistant") {
|
else if (msg.role === "assistant") {
|
||||||
const output = [];
|
const output = [];
|
||||||
@@ -81,7 +48,7 @@ index f07085c64390b211340d6a826b28ea9c2e77302f..7f758532246cc7b062df48e9cec4e6c9
|
|||||||
const reasoningItem = JSON.parse(block.thinkingSignature);
|
const reasoningItem = JSON.parse(block.thinkingSignature);
|
||||||
output.push(reasoningItem);
|
output.push(reasoningItem);
|
||||||
}
|
}
|
||||||
@@ -434,6 +440,16 @@ function convertMessages(model, context) {
|
@@ -438,6 +444,16 @@ function convertMessages(model, context) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
1573
pnpm-lock.yaml
generated
1573
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -10,4 +10,4 @@ onlyBuiltDependencies:
|
|||||||
- sharp
|
- sharp
|
||||||
|
|
||||||
patchedDependencies:
|
patchedDependencies:
|
||||||
'@mariozechner/pi-ai@0.43.0': patches/@mariozechner__pi-ai@0.43.0.patch
|
'@mariozechner/pi-ai@0.45.3': patches/@mariozechner__pi-ai@0.45.3.patch
|
||||||
|
|||||||
@@ -171,7 +171,7 @@ describe("google-shared convertMessages", () => {
|
|||||||
{
|
{
|
||||||
type: "thinking",
|
type: "thinking",
|
||||||
thinking: "hidden",
|
thinking: "hidden",
|
||||||
thinkingSignature: "sig",
|
thinkingSignature: "c2ln",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
api: "google-generative-ai",
|
api: "google-generative-ai",
|
||||||
@@ -202,7 +202,7 @@ describe("google-shared convertMessages", () => {
|
|||||||
expect(contents[0].role).toBe("model");
|
expect(contents[0].role).toBe("model");
|
||||||
expect(contents[0].parts?.[0]).toMatchObject({
|
expect(contents[0].parts?.[0]).toMatchObject({
|
||||||
thought: true,
|
thought: true,
|
||||||
thoughtSignature: "sig",
|
thoughtSignature: "c2ln",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -216,7 +216,7 @@ describe("google-shared convertMessages", () => {
|
|||||||
{
|
{
|
||||||
type: "thinking",
|
type: "thinking",
|
||||||
thinking: "structured",
|
thinking: "structured",
|
||||||
thinkingSignature: "sig",
|
thinkingSignature: "c2ln",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
api: "google-generative-ai",
|
api: "google-generative-ai",
|
||||||
@@ -247,7 +247,7 @@ describe("google-shared convertMessages", () => {
|
|||||||
expect(parts).toHaveLength(1);
|
expect(parts).toHaveLength(1);
|
||||||
expect(parts[0]).toMatchObject({
|
expect(parts[0]).toMatchObject({
|
||||||
thought: true,
|
thought: true,
|
||||||
thoughtSignature: "sig",
|
thoughtSignature: "c2ln",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -15,10 +15,10 @@
|
|||||||
"vite": "7.3.1"
|
"vite": "7.3.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@vitest/browser-playwright": "4.0.16",
|
"@vitest/browser-playwright": "4.0.17",
|
||||||
"playwright": "^1.57.0",
|
"playwright": "^1.57.0",
|
||||||
"typescript": "^5.9.3",
|
"typescript": "^5.9.3",
|
||||||
"vitest": "4.0.16"
|
"vitest": "4.0.17"
|
||||||
},
|
},
|
||||||
"pnpm": {
|
"pnpm": {
|
||||||
"minimumReleaseAge": 2880
|
"minimumReleaseAge": 2880
|
||||||
|
|||||||
Reference in New Issue
Block a user