personal memory agent
1# SPDX-License-Identifier: AGPL-3.0-only
2# Copyright (c) 2026 sol pbc
3
4"""Facet-specific utilities and tooling for the think module."""
5
6import json
7import logging
8import os
9import re
10import shutil
11from datetime import datetime, timezone
12from pathlib import Path
13from typing import Any, Optional
14
15from think.entities import get_identity_names
16from think.utils import DATE_RE, day_path, get_journal, iter_segments
17
18
19def _get_principal_display_name() -> str | None:
20 """Get the display name for the principal from identity config.
21
22 Returns the first identity name (preferred if set, else full name).
23 Returns None if identity is not configured.
24 """
25 names = get_identity_names()
26 return names[0] if names else None
27
28
29def _format_principal_role(
30 entities: list[dict[str, Any]],
31) -> tuple[str | None, list[dict[str, Any]]]:
32 """Extract principal entity and format role line if all info available.
33
34 Args:
35 entities: List of entity dicts from load_entities()
36
37 Returns:
38 Tuple of (role_line, filtered_entities) where:
39 - role_line is markdown like "**Jer's Role**: Description" or None if incomplete
40 - filtered_entities excludes the principal entity
41 """
42 # Find principal entity
43 principal = None
44 other_entities = []
45 for entity in entities:
46 if entity.get("is_principal"):
47 principal = entity
48 else:
49 other_entities.append(entity)
50
51 if not principal:
52 return None, entities
53
54 # Get display name and description
55 display_name = _get_principal_display_name()
56 description = principal.get("description", "").strip()
57
58 # Only format if we have both name and description
59 if not display_name or not description:
60 return None, entities
61
62 role_line = f"**{display_name}'s Role**: {description}"
63 return role_line, other_entities
64
65
66def _format_entity_name_with_aka(entity: dict[str, Any]) -> str:
67 """Format entity name, appending aka values in parentheses if present.
68
69 Args:
70 entity: Entity dict with 'name' and optional 'aka' list
71
72 Returns:
73 Formatted name string, e.g. "John Smith" or "John Smith (JS, Johnny)"
74 """
75 name = entity.get("name", "")
76 aka_list = entity.get("aka", [])
77 if isinstance(aka_list, list) and aka_list:
78 aka_str = ", ".join(aka_list)
79 return f"{name} ({aka_str})"
80 return name
81
82
83def _format_activity_line(activity: dict[str, Any], *, bold_name: bool = False) -> str:
84 """Format a single activity as a markdown list item.
85
86 Args:
87 activity: Activity dict with 'name'/'id', 'description', 'priority'
88 bold_name: If True, wraps name in **bold**
89
90 Returns:
91 Formatted string like "**Meetings** (high): Description" or "Meetings (high): Description"
92 """
93 name = activity.get("name", activity.get("id", ""))
94 desc = activity.get("description", "")
95 priority = activity.get("priority", "normal")
96
97 # Format with priority tag if non-normal
98 if priority == "high":
99 priority_suffix = " (high)"
100 elif priority == "low":
101 priority_suffix = " (low)"
102 else:
103 priority_suffix = ""
104
105 if bold_name:
106 name_part = f"**{name}**{priority_suffix}"
107 else:
108 name_part = f"{name}{priority_suffix}"
109
110 if desc:
111 return f"{name_part}: {desc}"
112 return name_part
113
114
115def _write_action_log(
116 facet: str | None,
117 action: str,
118 params: dict[str, Any],
119 source: str,
120 actor: str,
121 day: str | None = None,
122 agent_id: str | None = None,
123) -> None:
124 """Write action to the daily audit log.
125
126 Internal function that writes JSONL log entries. When facet is provided,
127 writes to facets/{facet}/logs/{day}.jsonl. When facet is None, writes to
128 config/actions/{day}.jsonl for journal-level actions.
129
130 Use log_call_action() for CLI call commands or log_app_action() for web apps.
131
132 Args:
133 facet: Facet name where the action occurred, or None for journal-level
134 action: Action type (e.g., "todo_add", "entity_attach")
135 params: Dictionary of action-specific parameters
136 source: Origin type - "tool" for agents, "app" for web UI
137 actor: For tools: agent name. For apps: app name
138 day: Day in YYYYMMDD format (defaults to today)
139 agent_id: Optional agent ID (only for tool actions)
140 """
141 journal = get_journal()
142
143 if day is None:
144 day = datetime.now().strftime("%Y%m%d")
145
146 # Build log file path based on whether facet is provided
147 if facet is not None:
148 log_path = Path(journal) / "facets" / facet / "logs" / f"{day}.jsonl"
149 else:
150 log_path = Path(journal) / "config" / "actions" / f"{day}.jsonl"
151
152 # Ensure parent directory exists
153 log_path.parent.mkdir(parents=True, exist_ok=True)
154
155 # Create log entry
156 entry = {
157 "timestamp": datetime.now(timezone.utc).isoformat(),
158 "source": source,
159 "actor": actor,
160 "action": action,
161 "params": params,
162 }
163
164 # Add facet only if provided
165 if facet is not None:
166 entry["facet"] = facet
167
168 # Add agent_id only if available
169 if agent_id is not None:
170 entry["agent_id"] = agent_id
171
172 # Append to log file
173 with open(log_path, "a", encoding="utf-8") as f:
174 f.write(json.dumps(entry, ensure_ascii=False) + "\n")
175
176
177def log_call_action(
178 facet: str | None,
179 action: str,
180 params: dict[str, Any],
181 *,
182 day: str | None = None,
183) -> None:
184 """Log an action from a ``sol call`` CLI command.
185
186 Creates a JSONL log entry for tracking successful modifications made via
187 ``sol call`` subcommands (entities, todos, etc.).
188
189 When facet is provided, writes to facets/{facet}/logs/{day}.jsonl.
190 When facet is None, writes to config/actions/{day}.jsonl for journal-level
191 actions (settings changes, system operations, etc.).
192
193 Args:
194 facet: Facet name where the action occurred, or None for journal-level
195 action: Action type (e.g., "todo_add", "entity_attach")
196 params: Dictionary of action-specific parameters
197 day: Day in YYYYMMDD format (defaults to today)
198 """
199 _write_action_log(
200 facet=facet,
201 action=action,
202 params=params,
203 source="call",
204 actor="agent",
205 day=day,
206 )
207
208
209def get_facets() -> dict[str, dict[str, object]]:
210 """Return available facets with metadata.
211
212 Each key is the facet name. The value contains the facet metadata
213 from facet.json including title, description, and the facet path.
214 """
215 facets_dir = Path(get_journal()) / "facets"
216 facets: dict[str, dict[str, object]] = {}
217
218 if not facets_dir.exists():
219 return facets
220
221 for facet_path in sorted(facets_dir.iterdir()):
222 if not facet_path.is_dir():
223 continue
224
225 facet_name = facet_path.name
226 facet_json = facet_path / "facet.json"
227
228 if not facet_json.exists():
229 continue
230
231 try:
232 with open(facet_json, "r", encoding="utf-8") as f:
233 facet_data = json.load(f)
234
235 if isinstance(facet_data, dict):
236 facet_info = {
237 "path": str(facet_path),
238 "title": facet_data.get("title", facet_name),
239 "description": facet_data.get("description", ""),
240 "color": facet_data.get("color", ""),
241 "emoji": facet_data.get("emoji", ""),
242 "muted": facet_data.get("muted", False),
243 }
244
245 facets[facet_name] = facet_info
246 except Exception as exc: # pragma: no cover - metadata optional
247 logging.debug("Error reading %s: %s", facet_json, exc)
248
249 return facets
250
251
252def get_enabled_facets() -> dict[str, dict[str, object]]:
253 """Return non-muted facets only.
254
255 Convenience wrapper around get_facets() that filters out muted facets.
256 Used by scheduled agents to skip processing for muted facets.
257 """
258 return {k: v for k, v in get_facets().items() if not v.get("muted", False)}
259
260
261def facet_summary(facet: str, *, detailed: bool = True) -> str:
262 """Generate a nicely formatted markdown summary of a facet.
263
264 Args:
265 facet: The facet name to summarize
266 detailed: If True (default), include full descriptions for entities
267 and activities. If False, show names only.
268
269 Returns:
270 Formatted markdown string with facet title, description, entities,
271 and activities
272
273 Raises:
274 FileNotFoundError: If the facet doesn't exist
275 """
276 from think.activities import get_facet_activities
277 from think.entities import load_entities
278
279 facet_path = Path(get_journal()) / "facets" / facet
280 if not facet_path.exists():
281 raise FileNotFoundError(f"Facet '{facet}' not found at {facet_path}")
282
283 # Load facet metadata
284 facet_json_path = facet_path / "facet.json"
285 if not facet_json_path.exists():
286 raise FileNotFoundError(f"facet.json not found for facet '{facet}'")
287
288 with open(facet_json_path, "r", encoding="utf-8") as f:
289 facet_data = json.load(f)
290
291 # Extract metadata
292 title = facet_data.get("title", facet)
293 description = facet_data.get("description", "")
294 color = facet_data.get("color", "")
295
296 # Build markdown summary
297 lines = []
298
299 # Title without emoji
300 lines.append(f"# {title}")
301
302 # Add color as a badge if available
303 if color:
304 lines.append(f"")
305 lines.append("")
306
307 # Description
308 if description:
309 lines.append(f"**Description:** {description}")
310 lines.append("")
311
312 # Load entities if available
313 entities = load_entities(facet)
314 if entities:
315 # Extract principal role line and filter principal from list
316 role_line, display_entities = _format_principal_role(entities)
317
318 if role_line:
319 lines.append(role_line)
320 lines.append("")
321
322 if display_entities:
323 if detailed:
324 lines.append("## Entities")
325 lines.append("")
326 for entity in display_entities:
327 entity_type = entity.get("type", "")
328 formatted_name = _format_entity_name_with_aka(entity)
329 desc = entity.get("description", "")
330
331 if desc:
332 lines.append(f"- **{entity_type}**: {formatted_name} - {desc}")
333 else:
334 lines.append(f"- **{entity_type}**: {formatted_name}")
335 lines.append("")
336 else:
337 # Short mode: names only as semicolon-separated list
338 entity_names = "; ".join(
339 _format_entity_name_with_aka(e) for e in display_entities
340 )
341 lines.append(f"**Entities**: {entity_names}")
342 lines.append("")
343
344 # Load activities if available
345 activities = get_facet_activities(facet)
346 if activities:
347 if detailed:
348 lines.append("## Activities")
349 lines.append("")
350 for activity in activities:
351 lines.append(f"- {_format_activity_line(activity, bold_name=True)}")
352 lines.append("")
353 else:
354 # Short mode: names only as semicolon-separated list
355 activity_names = "; ".join(
356 a.get("name", a.get("id", "")) for a in activities
357 )
358 lines.append(f"**Activities**: {activity_names}")
359 lines.append("")
360
361 return "\n".join(lines)
362
363
364def get_facet_news(
365 facet: str,
366 *,
367 cursor: Optional[str] = None,
368 limit: int = 1,
369 day: Optional[str] = None,
370) -> dict[str, Any]:
371 """Return facet news entries grouped by day, newest first.
372
373 Parameters
374 ----------
375 facet:
376 Facet name containing the news directory.
377 cursor:
378 Optional date string (``YYYYMMDD``). When provided, only news files with
379 a date strictly earlier than the cursor are returned. This supports
380 pagination in the UI where older entries are fetched on demand.
381 limit:
382 Maximum number of news days to return. Defaults to one day per request.
383 day:
384 Optional specific day (``YYYYMMDD``) to return. When provided, returns
385 only news for that specific day if it exists. Overrides cursor and limit.
386
387 Returns
388 -------
389 dict[str, Any]
390 Dictionary with ``days`` (list of news day payloads), ``next_cursor``
391 (date string for subsequent requests) and ``has_more`` boolean flag.
392 """
393 news_dir = Path(get_journal()) / "facets" / facet / "news"
394 if not news_dir.exists():
395 return {"days": [], "next_cursor": None, "has_more": False}
396
397 # If specific day requested, check for that file directly
398 if day:
399 news_path = news_dir / f"{day}.md"
400 if news_path.exists() and news_path.is_file():
401 selected = [news_path]
402 else:
403 return {"days": [], "next_cursor": None, "has_more": False}
404 else:
405 news_files = [
406 path
407 for path in news_dir.iterdir()
408 if path.is_file() and re.fullmatch(r"\d{8}\.md", path.name)
409 ]
410
411 # Sort newest first by file name (YYYYMMDD.md)
412 news_files.sort(key=lambda p: p.stem, reverse=True)
413
414 if cursor:
415 news_files = [path for path in news_files if path.stem < cursor]
416
417 if limit is not None and limit > 0:
418 selected = news_files[:limit]
419 else:
420 selected = news_files
421
422 days: list[dict[str, Any]] = []
423
424 for news_path in selected:
425 date_key = news_path.stem
426
427 # Read the raw markdown content
428 raw_content = ""
429 try:
430 raw_content = news_path.read_text(encoding="utf-8")
431 except Exception:
432 pass
433
434 days.append(
435 {
436 "date": date_key,
437 "raw_content": raw_content,
438 }
439 )
440
441 # When specific day requested, no pagination
442 if day:
443 has_more = False
444 next_cursor = None
445 else:
446 has_more = len(news_files) > len(selected)
447 next_cursor = selected[-1].stem if has_more and selected else None
448
449 return {"days": days, "next_cursor": next_cursor, "has_more": has_more}
450
451
452def is_facet_muted(facet: str) -> bool:
453 """Check if a facet is currently muted.
454
455 Args:
456 facet: Facet name to check
457
458 Returns:
459 True if facet is muted, False if unmuted or facet doesn't exist
460 """
461 facets = get_facets()
462 if facet not in facets:
463 return False
464 return bool(facets[facet].get("muted", False))
465
466
467def load_segment_facets(day: str, segment: str, stream: str | None = None) -> list[str]:
468 """Load facet IDs from a segment's facets.json output.
469
470 Args:
471 day: Day in YYYYMMDD format
472 segment: Segment key (HHMMSS_LEN format)
473 stream: Optional stream name. If None, searches all streams for the segment.
474
475 Returns:
476 List of facet ID strings found in the segment's facets.json
477 """
478 if stream:
479 candidates = [day_path(day) / stream / segment / "agents" / "facets.json"]
480 else:
481 # Search all streams for this segment
482 candidates = []
483 for _s, seg_key, seg_path in iter_segments(day):
484 if seg_key == segment:
485 candidates.append(seg_path / "agents" / "facets.json")
486
487 for facets_file in candidates:
488 if not facets_file.exists():
489 continue
490
491 try:
492 content = facets_file.read_text().strip()
493 if not content:
494 continue
495
496 data = json.loads(content)
497 if not isinstance(data, list):
498 logging.warning(f"facets.json is not an array: {facets_file}")
499 continue
500
501 result = [item.get("facet") for item in data if item.get("facet")]
502 if result:
503 return result
504
505 except json.JSONDecodeError as e:
506 logging.error(f"Failed to parse facets.json for {segment}: {e}")
507 except Exception as e:
508 logging.error(f"Error reading facets.json for {segment}: {e}")
509
510 logging.debug(f"No facets.json found for segment {segment}")
511 return []
512
513
514def get_active_facets(day: str) -> set[str]:
515 """Return facets that had activity on a given day.
516
517 Scans segment-level ``facets.json`` files produced by the facets
518 classifier agent during recording.
519
520 Args:
521 day: Day in YYYYMMDD format
522
523 Returns:
524 Set of facet names that appeared in at least one segment's facets.json
525 """
526 active: set[str] = set()
527
528 for stream_name, seg_key, seg_path in iter_segments(day):
529 active.update(load_segment_facets(day, seg_key, stream=stream_name))
530
531 return active
532
533
534def aggregate_speculative_facets(days: list[str] | None = None) -> list[dict]:
535 """Aggregate speculative facet outputs from segment classifiers across days.
536
537 Scans per-segment agents/facets.json files produced by the facets classifier
538 and counts facet name frequency. Useful during onboarding to suggest journal
539 organization to the user.
540
541 Args:
542 days: Optional list of days in YYYYMMDD format. If None, scans all days.
543
544 Returns:
545 List of dicts with keys:
546 - "facet": facet name (str)
547 - "count": number of segments where this facet appeared (int)
548 - "sample_activities": up to 3 activity descriptions for this facet (list[str])
549 Sorted by count descending, capped at 8 entries.
550 """
551 journal_path = Path(get_journal())
552
553 if days is not None:
554 scan_days = days
555 else:
556 scan_days = []
557 if journal_path.exists():
558 for entry in sorted(journal_path.iterdir()):
559 if entry.is_dir() and DATE_RE.fullmatch(entry.name):
560 scan_days.append(entry.name)
561
562 facet_counts: dict[str, int] = {}
563 facet_activities: dict[str, list[str]] = {}
564
565 for day in scan_days:
566 for _stream, _seg_key, seg_path in iter_segments(day):
567 facets_file = seg_path / "agents" / "facets.json"
568 if not facets_file.exists():
569 continue
570
571 try:
572 content = facets_file.read_text().strip()
573 if not content:
574 continue
575
576 data = json.loads(content)
577 if not isinstance(data, list):
578 continue
579
580 for item in data:
581 if not isinstance(item, dict):
582 continue
583 facet_name = item.get("facet")
584 if not facet_name:
585 continue
586 facet_counts[facet_name] = facet_counts.get(facet_name, 0) + 1
587
588 activity = item.get("activity", "")
589 if activity:
590 samples = facet_activities.setdefault(facet_name, [])
591 if len(samples) < 3:
592 samples.append(activity)
593
594 except (json.JSONDecodeError, OSError):
595 continue
596
597 result = [
598 {
599 "facet": facet_name,
600 "count": count,
601 "sample_activities": facet_activities.get(facet_name, []),
602 }
603 for facet_name, count in sorted(
604 facet_counts.items(), key=lambda x: x[1], reverse=True
605 )
606 ]
607 return result[:8]
608
609
610def set_facet_muted(facet: str, muted: bool) -> None:
611 """Mute or unmute a facet by updating facet.json.
612
613 Creates an audit log entry when the state changes.
614
615 Args:
616 facet: Facet name to modify
617 muted: True to mute, False to unmute
618
619 Raises:
620 FileNotFoundError: If facet doesn't exist
621 """
622 facet_path = Path(get_journal()) / "facets" / facet
623 if not facet_path.exists():
624 raise FileNotFoundError(f"Facet '{facet}' not found at {facet_path}")
625
626 facet_json_path = facet_path / "facet.json"
627 if not facet_json_path.exists():
628 raise FileNotFoundError(f"facet.json not found for facet '{facet}'")
629
630 # Load current config
631 with open(facet_json_path, "r", encoding="utf-8") as f:
632 facet_data = json.load(f)
633
634 # Check if state is actually changing
635 current_state = bool(facet_data.get("muted", False))
636 if current_state == muted:
637 # No change needed
638 return
639
640 # Update muted field
641 if muted:
642 facet_data["muted"] = True
643 else:
644 # Remove the field when unmuting (cleaner for default case)
645 facet_data.pop("muted", None)
646
647 # Write back atomically
648 import tempfile
649
650 temp_fd, temp_path = tempfile.mkstemp(
651 dir=facet_json_path.parent, suffix=".json", text=True
652 )
653 try:
654 with os.fdopen(temp_fd, "w", encoding="utf-8") as f:
655 json.dump(facet_data, f, indent=2, ensure_ascii=False)
656 f.write("\n")
657 os.replace(temp_path, facet_json_path)
658 except Exception:
659 # Clean up temp file on error
660 try:
661 os.unlink(temp_path)
662 except Exception:
663 pass
664 raise
665
666 # Log the change
667 action = "facet_mute" if muted else "facet_unmute"
668 log_call_action(
669 facet=facet,
670 action=action,
671 params={"muted": muted},
672 )
673
674
675def create_facet(
676 title: str,
677 emoji: str = "📦",
678 color: str = "#667eea",
679 description: str = "",
680 *,
681 consent: bool = False,
682) -> str:
683 """Create a new facet directory with facet.json.
684
685 Args:
686 title: Display title for the facet
687 emoji: Icon emoji (default: "📦")
688 color: Hex color (default: "#667eea")
689 description: Facet description
690
691 Returns:
692 The generated slug name for the facet
693
694 Raises:
695 ValueError: If title is empty, slug is invalid, or facet already exists
696 """
697 title = title.strip()
698 if not title:
699 raise ValueError("Facet title is required.")
700
701 slug = re.sub(r"[^a-z0-9]+", "-", title.lower()).strip("-")
702 if not re.fullmatch(r"[a-z][a-z0-9_-]*", slug):
703 raise ValueError(
704 f"Invalid facet name '{slug}': must be lowercase, start with a letter, "
705 "and contain only letters, digits, hyphens, or underscores"
706 )
707
708 if slug in get_facets():
709 raise ValueError(f"Facet '{slug}' already exists")
710
711 facet_path = Path(get_journal()) / "facets" / slug
712 facet_path.mkdir(parents=True, exist_ok=True)
713 facet_json_path = facet_path / "facet.json"
714
715 facet_data = {
716 "title": title,
717 "description": description,
718 "color": color,
719 "emoji": emoji,
720 }
721
722 import tempfile
723
724 temp_fd, temp_path = tempfile.mkstemp(
725 dir=facet_json_path.parent, suffix=".json", text=True
726 )
727 try:
728 with os.fdopen(temp_fd, "w", encoding="utf-8") as f:
729 json.dump(facet_data, f, indent=2, ensure_ascii=False)
730 f.write("\n")
731 os.replace(temp_path, facet_json_path)
732 except Exception:
733 try:
734 os.unlink(temp_path)
735 except Exception:
736 pass
737 raise
738
739 log_params: dict = {
740 "title": title,
741 "emoji": emoji,
742 "color": color,
743 "description": description,
744 }
745 if consent:
746 log_params["consent"] = True
747 log_call_action(
748 facet=slug,
749 action="facet_create",
750 params=log_params,
751 )
752 return slug
753
754
755def update_facet(name: str, **kwargs: Any) -> dict[str, Any]:
756 """Update facet.json fields for an existing facet.
757
758 Args:
759 name: Facet name
760 **kwargs: Fields to update (title, description, emoji, color)
761
762 Returns:
763 Dict of changed fields {field: {"old": ..., "new": ...}}
764
765 Raises:
766 FileNotFoundError: If facet doesn't exist
767 ValueError: If no valid fields provided
768 """
769 facet_path = Path(get_journal()) / "facets" / name
770 if not facet_path.exists():
771 raise FileNotFoundError(f"Facet '{name}' not found at {facet_path}")
772
773 facet_json_path = facet_path / "facet.json"
774 if facet_json_path.exists():
775 with open(facet_json_path, "r", encoding="utf-8") as f:
776 facet_data = json.load(f)
777 else:
778 facet_data = {}
779
780 allowed_fields = {"title", "description", "color", "emoji"}
781 changed_fields: dict[str, Any] = {}
782 filtered = {k: v for k, v in kwargs.items() if k in allowed_fields}
783 if not filtered:
784 raise ValueError("No valid fields to update")
785
786 for field, new_value in filtered.items():
787 old_value = facet_data.get(field)
788 if old_value != new_value:
789 changed_fields[field] = {"old": old_value, "new": new_value}
790 facet_data[field] = new_value
791
792 import tempfile
793
794 temp_fd, temp_path = tempfile.mkstemp(
795 dir=facet_json_path.parent, suffix=".json", text=True
796 )
797 try:
798 with os.fdopen(temp_fd, "w", encoding="utf-8") as f:
799 json.dump(facet_data, f, indent=2, ensure_ascii=False)
800 f.write("\n")
801 os.replace(temp_path, facet_json_path)
802 except Exception:
803 try:
804 os.unlink(temp_path)
805 except Exception:
806 pass
807 raise
808
809 if changed_fields:
810 log_call_action(
811 facet=name,
812 action="facet_update",
813 params={"changed_fields": changed_fields},
814 )
815
816 return changed_fields
817
818
819def delete_facet(name: str, *, consent: bool = False) -> None:
820 """Delete a facet directory and clean up references.
821
822 Removes the facet directory tree and updates convey.json and chat metadata.
823
824 Args:
825 name: Facet name to delete
826
827 Raises:
828 FileNotFoundError: If facet doesn't exist
829 """
830 facet_path = Path(get_journal()) / "facets" / name
831 if not facet_path.exists():
832 raise FileNotFoundError(f"Facet '{name}' not found at {facet_path}")
833
834 convey_config_path = Path(get_journal()) / "config" / "convey.json"
835 if convey_config_path.exists():
836 try:
837 with open(convey_config_path, "r", encoding="utf-8") as f:
838 config = json.load(f)
839
840 changed = False
841 facets_config = config.get("facets", {})
842
843 if facets_config.get("selected") == name:
844 facets_config["selected"] = ""
845 changed = True
846
847 order = facets_config.get("order", [])
848 if name in order:
849 facets_config["order"] = [item for item in order if item != name]
850 changed = True
851
852 if changed:
853 config["facets"] = facets_config
854 with open(convey_config_path, "w", encoding="utf-8") as f:
855 json.dump(config, f, indent=2, ensure_ascii=False)
856 f.write("\n")
857 except (json.JSONDecodeError, OSError):
858 pass
859
860 log_params: dict = {"name": name}
861 if consent:
862 log_params["consent"] = True
863 log_call_action(
864 facet=None,
865 action="facet_delete",
866 params=log_params,
867 )
868 shutil.rmtree(facet_path)
869
870
871def facet_summaries(*, detailed: bool = False) -> str:
872 """Generate a formatted list summary of enabled (non-muted) facets for use in agent prompts.
873
874 Returns a markdown-formatted string with each facet as a list item including:
875 - Facet name and hashtag ID
876 - Description
877 - Entity names (if available)
878 - Activity names (if available)
879
880 Parameters
881 ----------
882 detailed:
883 If True, includes full entity and activity details (name: description).
884 If False (default), includes only names as semicolon-separated lists.
885
886 Returns
887 -------
888 str
889 Formatted markdown string with enabled facets, entities, and activities
890 """
891 from think.activities import get_facet_activities
892 from think.entities import load_entities
893
894 facets = get_enabled_facets()
895 if not facets:
896 return "No facets found."
897
898 lines = []
899 lines.append("## Available Facets\n")
900
901 for facet_name, facet_info in sorted(facets.items()):
902 # Build facet header with name in parentheses
903 title = facet_info.get("title", facet_name)
904 description = facet_info.get("description", "")
905
906 # Main list item for facet
907 lines.append(f"- **{title}** (`{facet_name}`)")
908
909 if description:
910 lines.append(f" {description}")
911
912 # Load entities for this facet
913 try:
914 if detailed:
915 entities = load_entities(facet_name)
916 if entities:
917 # Extract principal role and filter from list
918 role_line, display_entities = _format_principal_role(entities)
919
920 if role_line:
921 lines.append(f" - {role_line}")
922
923 if display_entities:
924 lines.append(f" - **{title} Entities**:")
925 for entity in display_entities:
926 formatted_name = _format_entity_name_with_aka(entity)
927 desc = entity.get("description", "")
928
929 if desc:
930 lines.append(f" - {formatted_name}: {desc}")
931 else:
932 lines.append(f" - {formatted_name}")
933 else:
934 # Simple mode: load entities, filter principal, show names only
935 entities = load_entities(facet_name)
936 if entities:
937 role_line, display_entities = _format_principal_role(entities)
938
939 if role_line:
940 lines.append(f" - {role_line}")
941
942 if display_entities:
943 # Build semicolon-separated names list
944 entity_names = "; ".join(
945 e.get("name", "") for e in display_entities
946 )
947 lines.append(f" - **{title} Entities**: {entity_names}")
948 except Exception:
949 # No entities file or error loading - that's fine, skip it
950 pass
951
952 # Load activities for this facet
953 try:
954 activities = get_facet_activities(facet_name)
955 if activities:
956 if detailed:
957 lines.append(f" - **{title} Activities**:")
958 for activity in activities:
959 lines.append(
960 f" - {_format_activity_line(activity, bold_name=False)}"
961 )
962 else:
963 # Simple mode: activity names only
964 activity_names = "; ".join(
965 a.get("name", a.get("id", "")) for a in activities
966 )
967 lines.append(f" - **{title} Activities**: {activity_names}")
968 except Exception:
969 # No activities file or error loading - that's fine, skip it
970 pass
971
972 lines.append("") # Empty line between facets
973
974 return "\n".join(lines).strip()
975
976
977def format_logs(
978 entries: list[dict],
979 context: dict | None = None,
980) -> tuple[list[dict], dict]:
981 """Format action log JSONL entries to markdown chunks.
982
983 This is the formatter function used by the formatters registry.
984 Handles both facet-scoped logs (facets/{facet}/logs/) and journal-level
985 logs (config/actions/).
986
987 Args:
988 entries: Raw JSONL entries (one action log per line)
989 context: Optional context with:
990 - file_path: Path to JSONL file (for extracting facet name and day)
991
992 Returns:
993 Tuple of (chunks, meta) where:
994 - chunks: List of dicts with keys:
995 - timestamp: int (unix ms)
996 - markdown: str
997 - source: dict (original log entry)
998 - meta: Dict with optional "header" and "error" keys
999 """
1000 ctx = context or {}
1001 file_path = ctx.get("file_path")
1002 meta: dict[str, Any] = {}
1003 chunks: list[dict[str, Any]] = []
1004 skipped_count = 0
1005
1006 # Extract facet name and day from path
1007 facet_name: str | None = None
1008 day_str: str | None = None
1009 is_journal_level = False
1010
1011 if file_path:
1012 file_path = Path(file_path)
1013 path_str = str(file_path)
1014
1015 # Check for journal-level logs: config/actions/YYYYMMDD.jsonl
1016 if "config/actions" in path_str or "config\\actions" in path_str:
1017 is_journal_level = True
1018 else:
1019 # Extract facet name from path: facets/{facet}/logs/YYYYMMDD.jsonl
1020 facet_match = re.search(r"facets/([^/]+)/logs", path_str)
1021 if facet_match:
1022 facet_name = facet_match.group(1)
1023
1024 # Extract day from filename
1025 if file_path.stem.isdigit() and len(file_path.stem) == 8:
1026 day_str = file_path.stem
1027
1028 # Build header
1029 if day_str:
1030 formatted_day = f"{day_str[:4]}-{day_str[4:6]}-{day_str[6:8]}"
1031 if is_journal_level:
1032 meta["header"] = f"# Journal Action Log ({formatted_day})"
1033 elif facet_name:
1034 meta["header"] = f"# Action Log: {facet_name} ({formatted_day})"
1035 else:
1036 meta["header"] = f"# Action Log ({formatted_day})"
1037 else:
1038 if is_journal_level:
1039 meta["header"] = "# Journal Action Log"
1040 elif facet_name:
1041 meta["header"] = f"# Action Log: {facet_name}"
1042 else:
1043 meta["header"] = "# Action Log"
1044
1045 # Format each log entry as a chunk
1046 for entry in entries:
1047 # Skip entries without action field
1048 action = entry.get("action")
1049 if not action:
1050 skipped_count += 1
1051 continue
1052
1053 # Parse timestamp
1054 ts = 0
1055 timestamp_str = entry.get("timestamp", "")
1056 time_display = ""
1057 if timestamp_str:
1058 try:
1059 dt = datetime.fromisoformat(timestamp_str)
1060 ts = int(dt.timestamp() * 1000)
1061 time_display = dt.strftime("%H:%M:%S")
1062 except (ValueError, TypeError):
1063 pass
1064
1065 # Extract fields
1066 source = entry.get("source", "unknown")
1067 actor = entry.get("actor", "unknown")
1068 params = entry.get("params", {})
1069 agent_id = entry.get("agent_id")
1070
1071 # Format action name for display (e.g., "todo_add" -> "Todo Add")
1072 action_display = action.replace("_", " ").title()
1073
1074 # Build markdown
1075 lines = [f"### {action_display} by {actor}", ""]
1076
1077 # Metadata line
1078 meta_parts = [f"**Source:** {source}"]
1079 if time_display:
1080 meta_parts.append(f"**Time:** {time_display}")
1081 lines.append(" | ".join(meta_parts))
1082
1083 # Agent link if present
1084 if agent_id:
1085 lines.append(f"**Agent:** [{agent_id}](/app/agents/{agent_id})")
1086
1087 lines.append("")
1088
1089 # Parameters
1090 if params and isinstance(params, dict):
1091 lines.append("**Parameters:**")
1092 for key, value in params.items():
1093 # Format value - truncate long strings
1094 if isinstance(value, str) and len(value) > 100:
1095 value = value[:100] + "..."
1096 lines.append(f"- {key}: {value}")
1097 lines.append("")
1098
1099 chunks.append(
1100 {
1101 "timestamp": ts,
1102 "markdown": "\n".join(lines),
1103 "source": entry,
1104 }
1105 )
1106
1107 # Report skipped entries
1108 if skipped_count > 0:
1109 error_msg = f"Skipped {skipped_count} entries missing 'action' field"
1110 meta["error"] = error_msg
1111 logging.info(error_msg)
1112
1113 # Indexer metadata - agent is "action" for action logs
1114 meta["indexer"] = {"agent": "action"}
1115
1116 return chunks, meta
1117
1118
1119def rename_facet(old_name: str, new_name: str) -> None:
1120 """Rename a facet by updating its directory and config references.
1121
1122 Performs the following steps:
1123 1. Rename facets/{old}/ directory to facets/{new}/
1124 2. Update config/convey.json (facets.selected, facets.order)
1125 3. Print instruction to rebuild the search index
1126
1127 Args:
1128 old_name: Current facet name (must exist)
1129 new_name: New facet name (must not already exist)
1130
1131 Raises:
1132 ValueError: If names are invalid or preconditions fail
1133 """
1134 journal = get_journal()
1135 facets_dir = Path(journal) / "facets"
1136
1137 # Validate new name format (lowercase alphanumeric + hyphens/underscores)
1138 if not re.fullmatch(r"[a-z][a-z0-9_-]*", new_name):
1139 raise ValueError(
1140 f"Invalid facet name '{new_name}': must be lowercase, start with a letter, "
1141 "and contain only letters, digits, hyphens, or underscores"
1142 )
1143
1144 old_path = facets_dir / old_name
1145 new_path = facets_dir / new_name
1146
1147 if not old_path.is_dir():
1148 raise ValueError(f"Facet '{old_name}' does not exist")
1149 if new_path.exists():
1150 raise ValueError(f"Facet '{new_name}' already exists")
1151
1152 # Step 1: Rename the directory
1153 print(f"Renaming facets/{old_name}/ → facets/{new_name}/")
1154 os.rename(old_path, new_path)
1155
1156 # Step 2: Update config/convey.json
1157 convey_config_path = Path(journal) / "config" / "convey.json"
1158 if convey_config_path.exists():
1159 try:
1160 with open(convey_config_path, "r", encoding="utf-8") as f:
1161 config = json.load(f)
1162
1163 changed = False
1164 facets_config = config.get("facets", {})
1165
1166 if facets_config.get("selected") == old_name:
1167 facets_config["selected"] = new_name
1168 changed = True
1169
1170 order = facets_config.get("order", [])
1171 if old_name in order:
1172 facets_config["order"] = [
1173 new_name if name == old_name else name for name in order
1174 ]
1175 changed = True
1176
1177 if changed:
1178 config["facets"] = facets_config
1179 with open(convey_config_path, "w", encoding="utf-8") as f:
1180 json.dump(config, f, indent=2, ensure_ascii=False)
1181 f.write("\n")
1182 print("Updated config/convey.json")
1183 else:
1184 print("No changes needed in config/convey.json")
1185 except (json.JSONDecodeError, OSError) as exc:
1186 logging.warning("Failed to update convey config: %s", exc)
1187
1188 # Step 3: Advise index rebuild
1189 print(
1190 "Facet renamed. Rebuild the search index with: sol indexer --reset --rescan-full"
1191 )