Skip to content

Commit ec83625

Browse files
committed
LLMLogFormatter: rich logging to an output stream
Add a streaming implementation of rich LLM logging to use if we don't have the webview connected; it converts the LLMInteractionItems into a human-readable stream with timestamps, handling of overlapping interactions, and so forth. Use this for the binary.
1 parent 0a11233 commit ec83625

File tree

3 files changed

+744
-1
lines changed

3 files changed

+744
-1
lines changed

Diff for: binary/src/index.ts

+3-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
process.env.IS_BINARY = "true";
22
import { Command } from "commander";
33
import { Core } from "core/core";
4+
import { LLMLogFormatter } from "core/llm/logFormatter";
45
import { FromCoreProtocol, ToCoreProtocol } from "core/protocol";
56
import { IMessenger } from "core/protocol/messenger";
67
import { getCoreLogsPath, getPromptLogsPath } from "core/util/paths";
@@ -33,7 +34,8 @@ program.action(async () => {
3334
const ide = new IpcIde(messenger);
3435
const promptLogsPath = getPromptLogsPath();
3536

36-
new Core(messenger, ide);
37+
const core = new Core(messenger, ide);
38+
new LLMLogFormatter(core.llmLogger, fs.createWriteStream(promptLogsPath));
3739

3840
console.log("[binary] Core started");
3941
} catch (e) {

Diff for: core/llm/logFormatter.test.ts

+315
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,315 @@
1+
import { Buffer } from "buffer";
2+
import { Writable } from "stream";
3+
import { LLMLogFormatter } from "./logFormatter";
4+
import { LLMLogger } from "./logger";
5+
6+
class MemoryWritable extends Writable {
7+
data: Buffer[] = [];
8+
constructor() {
9+
super();
10+
this.data = [];
11+
}
12+
13+
_write(
14+
chunk: Buffer,
15+
encoding: BufferEncoding,
16+
callback: (error?: Error | null) => void,
17+
): void {
18+
this.data.push(chunk);
19+
callback();
20+
}
21+
22+
getText() {
23+
const decoder = new TextDecoder();
24+
return decoder.decode(Buffer.concat(this.data as any));
25+
}
26+
}
27+
28+
describe("LLMLogFormatter", () => {
29+
it("should format log items correctly", () => {
30+
const logger = new LLMLogger();
31+
const output = new MemoryWritable();
32+
const formatter = new LLMLogFormatter(logger, output);
33+
34+
logger._logItem({
35+
interactionId: "1",
36+
timestamp: 1698765432100,
37+
kind: "startChat",
38+
options: {
39+
model: "granite3.2-dense:8b",
40+
},
41+
messages: [
42+
{
43+
role: "system",
44+
content: "You are a helpful assistant",
45+
},
46+
],
47+
});
48+
logger._logItem({
49+
interactionId: "1",
50+
timestamp: 1698765433100,
51+
kind: "message",
52+
message: {
53+
role: "assistant",
54+
content: "Hello, ",
55+
},
56+
});
57+
logger._logItem({
58+
interactionId: "1",
59+
timestamp: 1698765433200,
60+
kind: "message",
61+
message: {
62+
role: "assistant",
63+
content: "world!",
64+
},
65+
});
66+
logger._logItem({
67+
interactionId: "1",
68+
timestamp: 1698765433300,
69+
kind: "message",
70+
message: {
71+
role: "assistant",
72+
content: "\nHow can I help you?",
73+
},
74+
});
75+
logger._logItem({
76+
interactionId: "1",
77+
timestamp: 1698765434100,
78+
kind: "success",
79+
promptTokens: 10,
80+
generatedTokens: 20,
81+
thinkingTokens: 0,
82+
});
83+
84+
expect(output.getText()).toBe(
85+
[
86+
" 15:17:12.1 [Chat]",
87+
" Options: {",
88+
' "model": "granite3.2-dense:8b"',
89+
" }",
90+
" Role: system",
91+
" | You are a helpful assistant",
92+
" +1.0 Role: assistant",
93+
" | Hello, world!",
94+
" +1.2 | How can I help you?",
95+
" +2.0 Success",
96+
" Prompt Tokens: 10",
97+
" Generated Tokens: 20",
98+
"",
99+
].join("\n"),
100+
);
101+
});
102+
103+
it("should format completion interactions correctly", () => {
104+
const logger = new LLMLogger();
105+
const output = new MemoryWritable();
106+
const formatter = new LLMLogFormatter(logger, output);
107+
108+
logger._logItem({
109+
interactionId: "1",
110+
timestamp: 1698765432100,
111+
kind: "startComplete",
112+
options: {
113+
model: "granite3.2-dense:8b",
114+
},
115+
prompt: "A horse is a horse",
116+
});
117+
logger._logItem({
118+
interactionId: "1",
119+
timestamp: 1698765433100,
120+
kind: "chunk",
121+
chunk: ", of course, of course",
122+
});
123+
124+
expect(output.getText()).toBe(
125+
[
126+
" 15:17:12.1 [Complete]",
127+
" Options: {",
128+
' "model": "granite3.2-dense:8b"',
129+
" }",
130+
" Prompt:",
131+
" | A horse is a horse",
132+
" +1.0 Result:",
133+
" | , of course, of course",
134+
].join("\n"),
135+
);
136+
});
137+
138+
it("should format FIM interactions correctly", () => {
139+
const logger = new LLMLogger();
140+
const output = new MemoryWritable();
141+
const formatter = new LLMLogFormatter(logger, output);
142+
143+
logger._logItem({
144+
interactionId: "1",
145+
timestamp: 1698765432100,
146+
kind: "startFim",
147+
options: {
148+
model: "granite3.2-dense:8b",
149+
},
150+
prefix: "A\nB",
151+
suffix: "D\nE",
152+
});
153+
logger._logItem({
154+
interactionId: "1",
155+
timestamp: 1698765433100,
156+
kind: "chunk",
157+
chunk: "C",
158+
});
159+
160+
expect(output.getText()).toBe(
161+
[
162+
" 15:17:12.1 [Fim]",
163+
" Options: {",
164+
' "model": "granite3.2-dense:8b"',
165+
" }",
166+
" Prefix:",
167+
" | A",
168+
" | B",
169+
" Suffix:",
170+
" | D",
171+
" | E",
172+
" +1.0 Result:",
173+
" | C",
174+
].join("\n"),
175+
);
176+
});
177+
178+
it("should interleave log items correctly", () => {
179+
const logger = new LLMLogger();
180+
const output = new MemoryWritable();
181+
const formatter = new LLMLogFormatter(logger, output);
182+
183+
logger._logItem({
184+
interactionId: "1",
185+
timestamp: 1698765433100,
186+
kind: "message",
187+
message: {
188+
role: "assistant",
189+
content: "Hello, ",
190+
},
191+
});
192+
logger._logItem({
193+
interactionId: "2",
194+
timestamp: 1698765433200,
195+
kind: "message",
196+
message: {
197+
role: "assistant",
198+
content: "Hello, ",
199+
},
200+
});
201+
logger._logItem({
202+
interactionId: "1",
203+
timestamp: 1698765433300,
204+
kind: "message",
205+
message: {
206+
role: "assistant",
207+
content: "World!",
208+
},
209+
});
210+
logger._logItem({
211+
interactionId: "2",
212+
timestamp: 1698765433400,
213+
kind: "message",
214+
message: {
215+
role: "assistant",
216+
content: "World!",
217+
},
218+
});
219+
// Check if we end interaction 2 and start another interaction,
220+
// it gets a different prefix marker.
221+
logger._logItem({
222+
interactionId: "2",
223+
timestamp: 1698765434000,
224+
kind: "success",
225+
promptTokens: 10,
226+
generatedTokens: 20,
227+
thinkingTokens: 0,
228+
});
229+
logger._logItem({
230+
interactionId: "3",
231+
timestamp: 1698765434100,
232+
kind: "message",
233+
message: {
234+
role: "assistant",
235+
content: "Hello, World!",
236+
},
237+
});
238+
239+
expect(output.getText()).toBe(
240+
[
241+
" 15:17:13.1 Role: assistant",
242+
" | Hello, ",
243+
"|15:17:13.2 Role: assistant",
244+
"| | Hello, ",
245+
" +0.2 | World!",
246+
"| +0.2 | World!",
247+
"| +0.8 Success",
248+
"| Prompt Tokens: 10",
249+
"| Generated Tokens: 20",
250+
"&15:17:14.1 Role: assistant",
251+
"& | Hello, World!",
252+
].join("\n"),
253+
);
254+
});
255+
256+
it("should wrap long lines", () => {
257+
const logger = new LLMLogger();
258+
const output = new MemoryWritable();
259+
const formatter = new LLMLogFormatter(logger, output, 40);
260+
261+
// Test a single message long enough to wrap; the input is
262+
// "<xx...xx>", not "xxxx" to make sure we are writing the correct
263+
// substrings.
264+
logger._logItem({
265+
interactionId: "1",
266+
timestamp: 1698765433100,
267+
kind: "message",
268+
message: {
269+
role: "assistant",
270+
content: "<" + "x".repeat(58) + ">",
271+
},
272+
});
273+
// Test when a first message doesn't wrap, but another message
274+
// that continues the same line causes a wrap
275+
logger._logItem({
276+
interactionId: "1",
277+
timestamp: 1698765433200,
278+
kind: "message",
279+
message: {
280+
role: "assistant",
281+
content: "\n<" + "y".repeat(29),
282+
},
283+
});
284+
logger._logItem({
285+
interactionId: "1",
286+
timestamp: 1698765433300,
287+
kind: "message",
288+
message: {
289+
role: "assistant",
290+
content: "y".repeat(29) + ">",
291+
},
292+
});
293+
logger._logItem({
294+
interactionId: "1",
295+
timestamp: 1698765433400,
296+
kind: "message",
297+
message: {
298+
role: "assistant",
299+
content: "\n" + "Artichokes and Beans ".repeat(3),
300+
},
301+
});
302+
303+
expect(output.getText()).toBe(
304+
[
305+
" 15:17:13.1 Role: assistant",
306+
" | <" + "x".repeat(39),
307+
" . " + "x".repeat(19) + ">",
308+
" +0.1 | <" + "y".repeat(39),
309+
" +0.2 . " + "y".repeat(19) + ">",
310+
" +0.3 | Artichokes and Beans Artichokes and",
311+
" . Beans Artichokes and Beans ",
312+
].join("\n"),
313+
);
314+
});
315+
});

0 commit comments

Comments
 (0)