personal memory agent
1# SPDX-License-Identifier: AGPL-3.0-only
2# Copyright (c) 2026 sol pbc
3
4import argparse
5import re
6from datetime import datetime, timedelta
7
8import pytest
9
10
11def make_journal(tmp_path, day, services, supervisor_lines=None):
12 """Create a synthetic journal with health logs."""
13 health_dir = tmp_path / day / "health"
14 health_dir.mkdir(parents=True)
15
16 for name, lines in services.items():
17 actual = health_dir / f"ref_{name}.log"
18 actual.write_text("\n".join(lines) + "\n" if lines else "", encoding="utf-8")
19
20 symlink = health_dir / f"{name}.log"
21 symlink.symlink_to(f"ref_{name}.log")
22
23 journal_health = tmp_path / "health"
24 journal_health.mkdir(exist_ok=True)
25
26 for name in services:
27 journal_sym = journal_health / f"{name}.log"
28 journal_sym.symlink_to(f"../{day}/health/ref_{name}.log")
29
30 if supervisor_lines is not None:
31 sup = journal_health / "supervisor.log"
32 sup.write_text(
33 "\n".join(supervisor_lines) + "\n" if supervisor_lines else "",
34 encoding="utf-8",
35 )
36
37 return tmp_path
38
39
40def _args(
41 *,
42 c: int = 5,
43 since: datetime | None = None,
44 service: str | None = None,
45 grep: re.Pattern[str] | None = None,
46) -> argparse.Namespace:
47 return argparse.Namespace(c=c, f=False, since=since, service=service, grep=grep)
48
49
50def test_parse_log_line_runner():
51 from think.logs_cli import parse_log_line
52
53 result = parse_log_line("2026-02-09T10:00:00 [echo:stdout] hello world")
54 assert result is not None
55 assert result.service == "echo"
56 assert result.stream == "stdout"
57 assert result.message == "hello world"
58 assert result.timestamp == datetime(2026, 2, 9, 10, 0, 0)
59
60
61def test_parse_log_line_supervisor():
62 from think.logs_cli import parse_log_line
63
64 result = parse_log_line(
65 "2026-02-09T10:00:00 [supervisor:log] INFO Starting service"
66 )
67 assert result is not None
68 assert result.service == "supervisor"
69 assert result.stream == "log"
70 assert result.message == "INFO Starting service"
71
72
73def test_parse_log_line_malformed():
74 from think.logs_cli import parse_log_line
75
76 assert parse_log_line("not a log line") is None
77 assert parse_log_line("") is None
78 assert parse_log_line("2026-02-09T10:00:00 no brackets") is None
79
80
81def test_parse_since_relative():
82 from think.logs_cli import parse_since
83
84 result = parse_since("30m")
85 delta = datetime.now() - result
86 assert timedelta(minutes=29) <= delta <= timedelta(minutes=31)
87
88
89def test_parse_since_absolute():
90 from think.logs_cli import parse_since
91
92 result = parse_since("16:00")
93 assert result.hour == 16
94 assert result.minute == 0
95 assert result.date() == datetime.now().date()
96
97
98def test_parse_since_absolute_ampm():
99 from think.logs_cli import parse_since
100
101 result = parse_since("4pm")
102 assert result.hour == 16
103 result2 = parse_since("4:30pm")
104 assert result2.hour == 16
105 assert result2.minute == 30
106
107
108def test_parse_since_invalid():
109 from think.logs_cli import parse_since
110
111 with pytest.raises(argparse.ArgumentTypeError):
112 parse_since("xyz")
113
114
115def test_filter_grep_invalid_regex():
116 from think.logs_cli import compile_grep
117
118 with pytest.raises(argparse.ArgumentTypeError):
119 compile_grep("[invalid")
120
121
122def test_tail_lines_large(tmp_path):
123 from think.logs_cli import tail_lines_large
124
125 path = tmp_path / "big.log"
126 lines = [f"line {i}" for i in range(1000)]
127 path.write_text("\n".join(lines) + "\n", encoding="utf-8")
128 result = tail_lines_large(path, 5)
129 assert result == [f"line {i}" for i in range(995, 1000)]
130
131
132def test_get_day_log_files_filters_non_symlinks(tmp_path):
133 from think.logs_cli import get_day_log_files
134
135 health = tmp_path / "health"
136 health.mkdir()
137 (health / "ref_echo.log").write_text("data", encoding="utf-8")
138 (health / "echo.log").symlink_to("ref_echo.log")
139 (health / "something.port").write_text("8080", encoding="utf-8")
140 result = get_day_log_files(health)
141 assert len(result) == 1
142 assert result[0].name == "echo.log"
143
144
145def test_collect_default(tmp_path, monkeypatch, capsys):
146 from think import logs_cli
147
148 day = datetime.now().strftime("%Y%m%d")
149 lines = [f"2026-02-09T10:{i:02d}:00 [echo:stdout] line {i}" for i in range(10)]
150 make_journal(tmp_path, day, {"echo": lines})
151 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
152
153 logs_cli.collect_and_print(_args(c=5))
154
155 output = capsys.readouterr().out.strip().splitlines()
156 assert len(output) == 5
157 assert "line 5" in output[0]
158 assert "line 9" in output[4]
159
160
161def test_collect_count(tmp_path, monkeypatch, capsys):
162 from think import logs_cli
163
164 day = datetime.now().strftime("%Y%m%d")
165 lines = [f"2026-02-09T11:{i:02d}:00 [echo:stdout] line {i}" for i in range(10)]
166 make_journal(tmp_path, day, {"echo": lines})
167 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
168
169 logs_cli.collect_and_print(_args(c=2))
170
171 output = capsys.readouterr().out.strip().splitlines()
172 assert len(output) == 2
173 assert "line 8" in output[0]
174 assert "line 9" in output[1]
175
176
177def test_collect_headers_on_tty(tmp_path, monkeypatch, capsys):
178 """Service headers appear when stdout is a TTY."""
179 from think import logs_cli
180
181 day = datetime.now().strftime("%Y%m%d")
182 echo_lines = [
183 "2026-02-09T10:00:00 [echo:stdout] line a",
184 "2026-02-09T10:02:00 [echo:stdout] line c",
185 ]
186 observer_lines = [
187 "2026-02-09T10:01:00 [observer:stdout] line b",
188 ]
189 make_journal(tmp_path, day, {"echo": echo_lines, "observer": observer_lines})
190 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
191 monkeypatch.setattr("sys.stdout.isatty", lambda: True)
192
193 logs_cli.collect_and_print(_args(c=5))
194
195 raw = capsys.readouterr().out
196 output = raw.strip().splitlines()
197 # Should have: header + line a, blank + header + line b, blank + header + line c
198 # That's 3 headers + 3 content lines + 2 blank lines = 8 lines total
199 assert any("── echo ──" in line for line in output)
200 assert any("── observer ──" in line for line in output)
201 # Content lines are still present
202 assert any("line a" in line for line in output)
203 assert any("line b" in line for line in output)
204 assert any("line c" in line for line in output)
205
206
207def test_collect_no_headers_when_piped(tmp_path, monkeypatch, capsys):
208 """No headers when stdout is not a TTY (piped)."""
209 from think import logs_cli
210
211 day = datetime.now().strftime("%Y%m%d")
212 echo_lines = [
213 "2026-02-09T10:00:00 [echo:stdout] line a",
214 ]
215 observer_lines = [
216 "2026-02-09T10:01:00 [observer:stdout] line b",
217 ]
218 make_journal(tmp_path, day, {"echo": echo_lines, "observer": observer_lines})
219 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
220 monkeypatch.setattr("sys.stdout.isatty", lambda: False)
221
222 logs_cli.collect_and_print(_args(c=5))
223
224 output = capsys.readouterr().out.strip().splitlines()
225 assert len(output) == 2
226 assert not any("──" in line for line in output)
227
228
229def test_filter_service(tmp_path, monkeypatch, capsys):
230 from think import logs_cli
231
232 day = datetime.now().strftime("%Y%m%d")
233 echo_lines = [
234 "2026-02-09T10:00:00 [echo:stdout] alpha",
235 "2026-02-09T10:01:00 [echo:stdout] beta",
236 ]
237 observer_lines = [
238 "2026-02-09T10:00:30 [observer:stdout] gamma",
239 "2026-02-09T10:01:30 [observer:stdout] delta",
240 ]
241 make_journal(tmp_path, day, {"echo": echo_lines, "observer": observer_lines})
242 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
243
244 logs_cli.collect_and_print(_args(service="echo"))
245
246 output = capsys.readouterr().out.strip().splitlines()
247 assert len(output) == 2
248 assert all("[echo:stdout]" in line for line in output)
249
250
251def test_filter_grep(tmp_path, monkeypatch, capsys):
252 from think import logs_cli
253
254 day = datetime.now().strftime("%Y%m%d")
255 lines = [
256 "2026-02-09T10:00:00 [echo:stdout] normal line",
257 "2026-02-09T10:01:00 [echo:stdout] special event",
258 "2026-02-09T10:02:00 [echo:stdout] unrelated text",
259 ]
260 make_journal(tmp_path, day, {"echo": lines})
261 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
262
263 logs_cli.collect_and_print(_args(grep=re.compile("normal|special")))
264
265 output = capsys.readouterr().out.strip().splitlines()
266 assert len(output) == 2
267 assert "normal line" in output[0]
268 assert "special event" in output[1]
269
270
271def test_filter_grep_regex_or(tmp_path, monkeypatch, capsys):
272 from think import logs_cli
273
274 day = datetime.now().strftime("%Y%m%d")
275 lines = [
276 "2026-02-09T10:00:00 [echo:stdout] alpha entry",
277 "2026-02-09T10:01:00 [echo:stdout] special entry",
278 "2026-02-09T10:02:00 [echo:stdout] beta entry",
279 ]
280 make_journal(tmp_path, day, {"echo": lines})
281 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
282
283 logs_cli.collect_and_print(_args(grep=re.compile("alpha|special")))
284
285 output = capsys.readouterr().out.strip().splitlines()
286 assert len(output) == 2
287 assert "alpha entry" in output[0]
288 assert "special entry" in output[1]
289
290
291def test_filter_since(tmp_path, monkeypatch, capsys):
292 from think import logs_cli
293
294 day = datetime.now().strftime("%Y%m%d")
295 lines = [
296 "2026-02-09T10:00:00 [echo:stdout] old",
297 "2026-02-09T10:20:00 [echo:stdout] new",
298 ]
299 make_journal(tmp_path, day, {"echo": lines})
300 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
301
302 logs_cli.collect_and_print(_args(since=datetime(2026, 2, 9, 10, 10, 0)))
303
304 output = capsys.readouterr().out.strip().splitlines()
305 assert len(output) == 1
306 assert "new" in output[0]
307
308
309def test_count_limits_filtered_output(tmp_path, monkeypatch, capsys):
310 from think import logs_cli
311
312 day = datetime.now().strftime("%Y%m%d")
313 lines = [
314 f"2026-02-09T10:{i:02d}:00 [echo:stdout] special line {i}" for i in range(10)
315 ]
316 make_journal(tmp_path, day, {"echo": lines})
317 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
318
319 logs_cli.collect_and_print(_args(c=3, grep=re.compile("special")))
320
321 output = capsys.readouterr().out.strip().splitlines()
322 assert len(output) == 3
323 assert "special line 7" in output[0]
324 assert "special line 8" in output[1]
325 assert "special line 9" in output[2]
326
327
328def test_filters_compose(tmp_path, monkeypatch, capsys):
329 from think import logs_cli
330
331 day = datetime.now().strftime("%Y%m%d")
332 echo_lines = [
333 "2026-02-09T10:00:00 [echo:stdout] keep this special",
334 "2026-02-09T10:01:00 [echo:stdout] ignore this",
335 ]
336 observer_lines = [
337 "2026-02-09T10:00:30 [observer:stdout] special but wrong service",
338 ]
339 make_journal(tmp_path, day, {"echo": echo_lines, "observer": observer_lines})
340 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
341
342 logs_cli.collect_and_print(_args(service="echo", grep=re.compile("special")))
343
344 output = capsys.readouterr().out.strip().splitlines()
345 assert len(output) == 1
346 assert "[echo:stdout]" in output[0]
347 assert "keep this special" in output[0]
348
349
350def test_supervisor_included_default(tmp_path, monkeypatch, capsys):
351 from think import logs_cli
352
353 day = datetime.now().strftime("%Y%m%d")
354 echo_lines = [
355 "2026-02-09T10:00:00 [echo:stdout] line 0",
356 "2026-02-09T10:01:00 [echo:stdout] line 1",
357 "2026-02-09T10:02:00 [echo:stdout] line 2",
358 ]
359 supervisor_lines = [
360 "2026-02-09T10:03:00 [supervisor:log] INFO a",
361 "2026-02-09T10:04:00 [supervisor:log] INFO b",
362 "2026-02-09T10:05:00 [supervisor:log] INFO c",
363 ]
364 make_journal(tmp_path, day, {"echo": echo_lines}, supervisor_lines=supervisor_lines)
365 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
366
367 logs_cli.collect_and_print(_args(c=2))
368
369 output = capsys.readouterr().out.strip().splitlines()
370 assert len(output) == 4
371 assert any("[echo:stdout]" in line for line in output)
372 assert any("[supervisor:log]" in line for line in output)
373
374
375def test_supervisor_excluded_with_filters(tmp_path, monkeypatch, capsys):
376 from think import logs_cli
377
378 day = datetime.now().strftime("%Y%m%d")
379 echo_lines = [
380 "2026-02-09T10:00:00 [echo:stdout] special",
381 ]
382 supervisor_lines = [
383 "2026-02-09T10:01:00 [supervisor:log] INFO special",
384 ]
385 make_journal(tmp_path, day, {"echo": echo_lines}, supervisor_lines=supervisor_lines)
386 monkeypatch.setenv("_SOLSTONE_JOURNAL_OVERRIDE", str(tmp_path))
387
388 logs_cli.collect_and_print(_args(grep=re.compile("special")))
389
390 output = capsys.readouterr().out.strip().splitlines()
391 assert len(output) == 1
392 assert "[echo:stdout]" in output[0]
393 assert "[supervisor:log]" not in output[0]