mirror of
https://github.com/Memo-2023/mana-monorepo.git
synced 2026-05-14 20:21:09 +02:00
feat(agent-loop): expose compactionsDone + compactedReminder producer
Closes the loop on M2: when the compactor fires, the LLM needs to know
it's now seeing a <compact-summary> instead of raw turns so it
doesn't waste a turn asking about lost details or re-executing tools
whose responses are gone.
shared-ai:
- LoopState grows `compactionsDone: number` (cap-1 by current loop
policy, but shape kept as count for future multi-compact cycles).
- runPlannerLoop populates it on each reminder-channel call. New
loop test asserts [0, 1] sequence: round 1 before compaction,
round 2 after.
mana-ai:
- New producer `compactedReminder` — fires severity=info when
compactionsDone >= 1, wrapped in a German one-liner ("frag nicht
nach verlorenen Details").
- Injected FIRST in buildReminderChannel so the LLM frames the rest
of the round with "I'm looking at a summary" context. Metric
surface stays `{producer='compacted', severity='info'}`.
4 new reminder tests (3 pure producer + 1 composition-ordering) +
1 loop-wiring test. 77 shared-ai, 20 reminders.test.ts — green.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
be8f5618c6
commit
72f7978ed4
4 changed files with 122 additions and 0 deletions
|
|
@ -480,6 +480,50 @@ describe('runPlannerLoop — compactor', () => {
|
|||
expect(compactSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('surfaces compactionsDone in LoopState for reminder producers', async () => {
|
||||
const llm = new MockLlmClient();
|
||||
// Round 1: over threshold
|
||||
(llm as unknown as { queue: unknown[] }).queue.push({
|
||||
content: null,
|
||||
toolCalls: [{ id: 'c1', name: 'list_things', arguments: {} }],
|
||||
finishReason: 'tool_calls',
|
||||
usage: { promptTokens: 950, completionTokens: 0, totalTokens: 950 },
|
||||
});
|
||||
// Round 2: stop so we end cleanly
|
||||
llm.enqueueStop('done');
|
||||
|
||||
const compactionsDoneSeen: number[] = [];
|
||||
await runPlannerLoop({
|
||||
llm,
|
||||
input: {
|
||||
systemPrompt: 's',
|
||||
userPrompt: 'u',
|
||||
tools,
|
||||
model: 'm',
|
||||
compactor: {
|
||||
maxContextTokens: 1000,
|
||||
compact: async () => ({
|
||||
messages: [
|
||||
{ role: 'system', content: 's' },
|
||||
{ role: 'user', content: 'u' },
|
||||
{ role: 'assistant', content: '<compact>' },
|
||||
],
|
||||
compactedTurns: 2,
|
||||
}),
|
||||
},
|
||||
reminderChannel: (state) => {
|
||||
compactionsDoneSeen.push(state.compactionsDone);
|
||||
return [];
|
||||
},
|
||||
},
|
||||
onToolCall: async () => ({ success: true, message: 'ok' }),
|
||||
});
|
||||
|
||||
// Round 1 channel call: before compaction fires, so 0
|
||||
// Round 2 channel call: after compaction, so 1
|
||||
expect(compactionsDoneSeen).toEqual([0, 1]);
|
||||
});
|
||||
|
||||
it('skips when the compactor returns 0 compacted turns', async () => {
|
||||
const llm = new MockLlmClient();
|
||||
(llm as unknown as { queue: unknown[] }).queue.push({
|
||||
|
|
|
|||
|
|
@ -100,6 +100,15 @@ export interface LoopState {
|
|||
* tool), and similar. Empty in round 1; grows up to the cap.
|
||||
*/
|
||||
readonly recentCalls: readonly ExecutedCall[];
|
||||
/**
|
||||
* Number of times the compactor has folded the message history in
|
||||
* this loop run. Capped at 1 by the loop itself (fire-once policy),
|
||||
* but still exposed as a count rather than a boolean so future
|
||||
* policies (e.g. multi-compact cycles) don't need a breaking API
|
||||
* change. A producer can use this to inject a "just compacted"
|
||||
* reminder on the round immediately after compaction.
|
||||
*/
|
||||
readonly compactionsDone: number;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -276,6 +285,7 @@ export async function runPlannerLoop(opts: {
|
|||
},
|
||||
lastCall: executedCalls[executedCalls.length - 1],
|
||||
recentCalls,
|
||||
compactionsDone,
|
||||
};
|
||||
const reminders = input.reminderChannel(state);
|
||||
if (reminders.length > 0) {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue