feat: add auth-aware cache defaults

This commit is contained in:
Peter Steinberger
2026-01-21 20:23:30 +00:00
parent e4b3c8b98d
commit 6492e90c1b
6 changed files with 245 additions and 4 deletions

View File

@@ -15,6 +15,17 @@ Session pruning trims **old tool results** from the in-memory context right befo
- For best results, match `ttl` to your model `cacheControlTtl`.
- After a prune, the TTL window resets so subsequent requests keep cache until `ttl` expires again.
## Smart defaults (Anthropic)
- **OAuth or setup-token** profiles: enable `cache-ttl` pruning and set heartbeat to `1h`.
- **API key** profiles: enable `cache-ttl` pruning, set heartbeat to `30m`, and default `cacheControlTtl` to `1h` on Anthropic models.
- If you set any of these values explicitly, Clawdbot does **not** override them.
## What this improves (cost + cache behavior)
- **Why prune:** Anthropic prompt caching only applies within the TTL. If a session goes idle past the TTL, the next request re-caches the full prompt unless you trim it first.
- **What gets cheaper:** pruning reduces the **cacheWrite** size for that first request after the TTL expires.
- **Why the TTL reset matters:** once pruning runs, the cache window resets, so followup requests can reuse the freshly cached prompt instead of re-caching the full history again.
- **What it does not do:** pruning doesnt add tokens or “double” costs; it only changes what gets cached on that first postTTL request.
## What can be pruned
- Only `toolResult` messages.
- User + assistant messages are **never** modified.

View File

@@ -1600,7 +1600,7 @@ Notes / current limitations:
- After a prune, the TTL window resets so subsequent requests keep cache until `ttl` expires again.
- For best results, match `contextPruning.ttl` to the model `cacheControlTtl` you set in `agents.defaults.models.*.params`.
Default (off):
Default (off, unless Anthropic auth profiles are detected):
```json5
{
agents: { defaults: { contextPruning: { mode: "off" } } }

View File

@@ -10,7 +10,7 @@ surface anything that needs attention without spamming you.
## Quick start (beginner)
1. Leave heartbeats enabled (default is `30m`) or set your own cadence.
1. Leave heartbeats enabled (default is `30m`, or `1h` for Anthropic OAuth/setup-token) or set your own cadence.
2. Create a tiny `HEARTBEAT.md` checklist in the agent workspace (optional but recommended).
3. Decide where heartbeat messages should go (`target: "last"` is the default).
4. Optional: enable heartbeat reasoning delivery for transparency.
@@ -33,7 +33,7 @@ Example config:
## Defaults
- Interval: `30m` (set `agents.defaults.heartbeat.every` or per-agent `agents.list[].heartbeat.every`; use `0m` to disable).
- Interval: `30m` (or `1h` when Anthropic OAuth/setup-token is the detected auth mode). Set `agents.defaults.heartbeat.every` or per-agent `agents.list[].heartbeat.every`; use `0m` to disable.
- Prompt body (configurable via `agents.defaults.heartbeat.prompt`):
`Read HEARTBEAT.md if it exists (workspace context). Follow it strictly. Do not infer or repeat old tasks from prior chats. If nothing needs attention, reply HEARTBEAT_OK.`
- The heartbeat prompt is sent **verbatim** as the user message. The system

View File

@@ -65,6 +65,41 @@ These are **USD per 1M tokens** for `input`, `output`, `cacheRead`, and
`cacheWrite`. If pricing is missing, Clawdbot shows tokens only. OAuth tokens
never show dollar cost.
## Cache TTL and pruning impact
Provider prompt caching only applies within the cache TTL window. Clawdbot can
optionally run **cache-ttl pruning**: it prunes the session once the cache TTL
has expired, then resets the cache window so subsequent requests can re-use the
freshly cached context instead of re-caching the full history. This keeps cache
write costs lower when a session goes idle past the TTL.
Configure it in [Gateway configuration](/gateway/configuration) and see the
behavior details in [Session pruning](/concepts/session-pruning).
Heartbeat can keep the cache **warm** across idle gaps. If your model cache TTL
is `1h`, setting the heartbeat interval just under that (e.g., `55m`) can avoid
re-caching the full prompt, reducing cache write costs.
For Anthropic API pricing, cache reads are significantly cheaper than input
tokens, while cache writes are billed at a higher multiplier. See Anthropics
prompt caching pricing for the latest rates and TTL multipliers:
https://docs.anthropic.com/docs/build-with-claude/prompt-caching
### Example: keep 1h cache warm with heartbeat
```yaml
agents:
defaults:
model:
primary: "anthropic/claude-opus-4-5"
models:
"anthropic/claude-opus-4-5":
params:
cacheControlTtl: "1h"
heartbeat:
every: "55m"
```
## Tips for reducing token pressure
- Use `/compact` to summarize long sessions.

View File

@@ -5,6 +5,10 @@ import { withTempHome } from "./test-helpers.js";
describe("config pruning defaults", () => {
it("does not enable contextPruning by default", async () => {
const prevApiKey = process.env.ANTHROPIC_API_KEY;
const prevOauthToken = process.env.ANTHROPIC_OAUTH_TOKEN;
process.env.ANTHROPIC_API_KEY = "";
process.env.ANTHROPIC_OAUTH_TOKEN = "";
await withTempHome(async (home) => {
const configDir = path.join(home, ".clawdbot");
await fs.mkdir(configDir, { recursive: true });
@@ -20,6 +24,85 @@ describe("config pruning defaults", () => {
expect(cfg.agents?.defaults?.contextPruning?.mode).toBeUndefined();
});
if (prevApiKey === undefined) {
delete process.env.ANTHROPIC_API_KEY;
} else {
process.env.ANTHROPIC_API_KEY = prevApiKey;
}
if (prevOauthToken === undefined) {
delete process.env.ANTHROPIC_OAUTH_TOKEN;
} else {
process.env.ANTHROPIC_OAUTH_TOKEN = prevOauthToken;
}
});
it("enables cache-ttl pruning + 1h heartbeat for Anthropic OAuth", async () => {
await withTempHome(async (home) => {
const configDir = path.join(home, ".clawdbot");
await fs.mkdir(configDir, { recursive: true });
await fs.writeFile(
path.join(configDir, "clawdbot.json"),
JSON.stringify(
{
auth: {
profiles: {
"anthropic:me": { provider: "anthropic", mode: "oauth", email: "me@example.com" },
},
},
agents: { defaults: {} },
},
null,
2,
),
"utf-8",
);
vi.resetModules();
const { loadConfig } = await import("./config.js");
const cfg = loadConfig();
expect(cfg.agents?.defaults?.contextPruning?.mode).toBe("cache-ttl");
expect(cfg.agents?.defaults?.contextPruning?.ttl).toBe("1h");
expect(cfg.agents?.defaults?.heartbeat?.every).toBe("1h");
});
});
it("enables cache-ttl pruning + 1h cache TTL for Anthropic API keys", async () => {
await withTempHome(async (home) => {
const configDir = path.join(home, ".clawdbot");
await fs.mkdir(configDir, { recursive: true });
await fs.writeFile(
path.join(configDir, "clawdbot.json"),
JSON.stringify(
{
auth: {
profiles: {
"anthropic:api": { provider: "anthropic", mode: "api_key" },
},
},
agents: {
defaults: {
model: { primary: "anthropic/claude-opus-4-5" },
},
},
},
null,
2,
),
"utf-8",
);
vi.resetModules();
const { loadConfig } = await import("./config.js");
const cfg = loadConfig();
expect(cfg.agents?.defaults?.contextPruning?.mode).toBe("cache-ttl");
expect(cfg.agents?.defaults?.contextPruning?.ttl).toBe("1h");
expect(cfg.agents?.defaults?.heartbeat?.every).toBe("30m");
expect(
cfg.agents?.defaults?.models?.["anthropic/claude-opus-4-5"]?.params?.cacheControlTtl,
).toBe("1h");
});
});
it("does not override explicit contextPruning mode", async () => {

View File

@@ -1,3 +1,4 @@
import { parseModelRef } from "../agents/model-selection.js";
import { resolveTalkApiKey } from "./talk.js";
import type { ClawdbotConfig } from "./types.js";
import { DEFAULT_AGENT_MAX_CONCURRENT, DEFAULT_SUBAGENT_MAX_CONCURRENT } from "./agent-limits.js";
@@ -6,6 +7,8 @@ type WarnState = { warned: boolean };
let defaultWarnState: WarnState = { warned: false };
type AnthropicAuthDefaultsMode = "api_key" | "oauth";
const DEFAULT_MODEL_ALIASES: Readonly<Record<string, string>> = {
// Anthropic (pi-ai catalog uses "latest" ids without date suffix)
opus: "anthropic/claude-opus-4-5",
@@ -20,6 +23,40 @@ const DEFAULT_MODEL_ALIASES: Readonly<Record<string, string>> = {
"gemini-flash": "google/gemini-3-flash-preview",
};
function resolveAnthropicDefaultAuthMode(cfg: ClawdbotConfig): AnthropicAuthDefaultsMode | null {
const profiles = cfg.auth?.profiles ?? {};
const anthropicProfiles = Object.entries(profiles).filter(
([, profile]) => profile?.provider === "anthropic",
);
const order = cfg.auth?.order?.anthropic ?? [];
for (const profileId of order) {
const entry = profiles[profileId];
if (!entry || entry.provider !== "anthropic") continue;
if (entry.mode === "api_key") return "api_key";
if (entry.mode === "oauth" || entry.mode === "token") return "oauth";
}
const hasApiKey = anthropicProfiles.some(([, profile]) => profile?.mode === "api_key");
const hasOauth = anthropicProfiles.some(
([, profile]) => profile?.mode === "oauth" || profile?.mode === "token",
);
if (hasApiKey && !hasOauth) return "api_key";
if (hasOauth && !hasApiKey) return "oauth";
if (process.env.ANTHROPIC_OAUTH_TOKEN?.trim()) return "oauth";
if (process.env.ANTHROPIC_API_KEY?.trim()) return "api_key";
return null;
}
function resolvePrimaryModelRef(raw?: string): string | null {
if (!raw || typeof raw !== "string") return null;
const trimmed = raw.trim();
if (!trimmed) return null;
const aliasKey = trimmed.toLowerCase();
return DEFAULT_MODEL_ALIASES[aliasKey] ?? trimmed;
}
export type SessionDefaultsOptions = {
warn?: (message: string) => void;
warnState?: WarnState;
@@ -157,7 +194,82 @@ export function applyLoggingDefaults(cfg: ClawdbotConfig): ClawdbotConfig {
}
export function applyContextPruningDefaults(cfg: ClawdbotConfig): ClawdbotConfig {
return cfg;
const defaults = cfg.agents?.defaults;
if (!defaults) return cfg;
const authMode = resolveAnthropicDefaultAuthMode(cfg);
if (!authMode) return cfg;
let mutated = false;
const nextDefaults = { ...defaults };
if (defaults.contextPruning?.mode === undefined) {
nextDefaults.contextPruning = {
...(defaults.contextPruning ?? {}),
mode: "cache-ttl",
ttl: defaults.contextPruning?.ttl ?? "1h",
};
mutated = true;
}
if (defaults.heartbeat?.every === undefined) {
nextDefaults.heartbeat = {
...(defaults.heartbeat ?? {}),
every: authMode === "oauth" ? "1h" : "30m",
};
mutated = true;
}
if (authMode === "api_key") {
const nextModels = defaults.models ? { ...defaults.models } : {};
let modelsMutated = false;
for (const [key, entry] of Object.entries(nextModels)) {
const parsed = parseModelRef(key, "anthropic");
if (!parsed || parsed.provider !== "anthropic") continue;
const current = entry ?? {};
const params = (current as { params?: Record<string, unknown> }).params ?? {};
if (typeof params.cacheControlTtl === "string") continue;
nextModels[key] = {
...(current as Record<string, unknown>),
params: { ...params, cacheControlTtl: "1h" },
};
modelsMutated = true;
}
const primary = resolvePrimaryModelRef(defaults.model?.primary ?? undefined);
if (primary) {
const parsedPrimary = parseModelRef(primary, "anthropic");
if (parsedPrimary?.provider === "anthropic") {
const key = `${parsedPrimary.provider}/${parsedPrimary.model}`;
const entry = nextModels[key];
const current = entry ?? {};
const params = (current as { params?: Record<string, unknown> }).params ?? {};
if (typeof params.cacheControlTtl !== "string") {
nextModels[key] = {
...(current as Record<string, unknown>),
params: { ...params, cacheControlTtl: "1h" },
};
modelsMutated = true;
}
}
}
if (modelsMutated) {
nextDefaults.models = nextModels;
mutated = true;
}
}
if (!mutated) return cfg;
return {
...cfg,
agents: {
...cfg.agents,
defaults: nextDefaults,
},
};
}
export function applyCompactionDefaults(cfg: ClawdbotConfig): ClawdbotConfig {