1#!/usr/bin/env -S uv run --script --quiet 2# /// script 3# requires-python = ">=3.12" 4# dependencies = [ 5# "agentic-learning>=0.4.0", 6# "anthropic>=0.40.0", 7# "pydantic-settings>=2.0.0", 8# ] 9# [tool.uv] 10# prerelease = "allow" 11# /// 12"""proof of concept: letta-powered status agent with persistent memory. 13 14this script demonstrates using letta's learning SDK to give an LLM 15persistent memory across runs. the agent will remember context about 16plyr.fm's codebase and recent work. 17 18usage: 19 # first run - agent learns about the project 20 uv run scripts/letta_status_agent.py "what is plyr.fm?" 21 22 # second run - agent remembers previous context 23 uv run scripts/letta_status_agent.py "what did we discuss last time?" 24 25 # ask about recent work 26 uv run scripts/letta_status_agent.py "summarize recent commits" 27 28 # manage agent 29 uv run scripts/letta_status_agent.py --create # create the agent 30 uv run scripts/letta_status_agent.py --delete # delete the agent 31 uv run scripts/letta_status_agent.py --status # check agent status 32 33environment variables (in .env): 34 LETTA_API_KEY - letta cloud API key 35 ANTHROPIC_API_KEY - anthropic API key 36""" 37 38import asyncio 39import subprocess 40import sys 41from pathlib import Path 42 43from pydantic import Field 44from pydantic_settings import BaseSettings, SettingsConfigDict 45 46AGENT_NAME = "plyr-status-agent" 47MEMORY_BLOCKS = ["project_context", "recent_work"] 48 49 50class AgentSettings(BaseSettings): 51 """settings for the letta status agent.""" 52 53 model_config = SettingsConfigDict( 54 env_file=Path(__file__).parent.parent / ".env", 55 case_sensitive=False, 56 extra="ignore", 57 ) 58 59 letta_api_key: str = Field(validation_alias="LETTA_API_KEY") 60 anthropic_api_key: str = Field(validation_alias="ANTHROPIC_API_KEY") 61 62 63def get_recent_commits(limit: int = 10) -> str: 64 """get recent commit messages for context.""" 65 result = subprocess.run( 66 ["git", "log", "--oneline", f"-{limit}"], 67 capture_output=True, 68 text=True, 69 cwd=Path(__file__).parent.parent, 70 ) 71 return result.stdout.strip() 72 73 74def get_open_issues(limit: int = 5) -> str: 75 """get open issues for context.""" 76 result = subprocess.run( 77 ["gh", "issue", "list", "--limit", str(limit)], 78 capture_output=True, 79 text=True, 80 cwd=Path(__file__).parent.parent, 81 ) 82 return result.stdout.strip() 83 84 85async def create_agent(settings: AgentSettings) -> None: 86 """create the letta agent.""" 87 from agentic_learning import AsyncAgenticLearning 88 89 letta_client = AsyncAgenticLearning(api_key=settings.letta_api_key) 90 91 # check if already exists 92 existing = await letta_client.agents.retrieve(agent=AGENT_NAME) 93 if existing: 94 print(f"agent '{AGENT_NAME}' already exists (id: {existing.id})") 95 return 96 97 # create 98 agent = await letta_client.agents.create( 99 agent=AGENT_NAME, 100 memory=MEMORY_BLOCKS, 101 model="anthropic/claude-sonnet-4-20250514", 102 ) 103 print(f"✓ created agent '{AGENT_NAME}' (id: {agent.id})") 104 print(f" memory blocks: {MEMORY_BLOCKS}") 105 106 107async def delete_agent(settings: AgentSettings) -> None: 108 """delete the letta agent.""" 109 from agentic_learning import AsyncAgenticLearning 110 111 letta_client = AsyncAgenticLearning(api_key=settings.letta_api_key) 112 113 deleted = await letta_client.agents.delete(agent=AGENT_NAME) 114 if deleted: 115 print(f"✓ deleted agent '{AGENT_NAME}'") 116 else: 117 print(f"agent '{AGENT_NAME}' not found") 118 119 120async def show_status(settings: AgentSettings) -> None: 121 """show agent status and memory.""" 122 from agentic_learning import AsyncAgenticLearning 123 124 letta_client = AsyncAgenticLearning(api_key=settings.letta_api_key) 125 126 agent = await letta_client.agents.retrieve(agent=AGENT_NAME) 127 if not agent: 128 print(f"agent '{AGENT_NAME}' not found") 129 print("run: uv run scripts/letta_status_agent.py --create") 130 return 131 132 print(f"agent: {AGENT_NAME}") 133 print(f" id: {agent.id}") 134 print(f" model: {agent.model}") 135 136 # show memory blocks 137 if hasattr(agent, "memory") and agent.memory: 138 print(" memory blocks:") 139 for block in agent.memory.blocks: 140 preview = ( 141 block.value[:100] + "..." if len(block.value) > 100 else block.value 142 ) 143 print(f" - {block.label}: {preview}") 144 145 146def run_agent_sync(user_message: str) -> None: 147 """run the status agent with letta memory (sync version).""" 148 import os 149 150 settings = AgentSettings() 151 152 # SDK's capture() reads from os.environ, so we need to set it 153 os.environ["LETTA_API_KEY"] = settings.letta_api_key 154 155 # import after settings validation 156 import anthropic 157 from agentic_learning import AgenticLearning, learning 158 159 # initialize clients - use SYNC clients for sync context 160 letta_client = AgenticLearning(api_key=settings.letta_api_key) 161 anthropic_client = anthropic.Anthropic(api_key=settings.anthropic_api_key) 162 163 # ensure agent exists (sync) 164 existing = letta_client.agents.retrieve(agent=AGENT_NAME) 165 if not existing: 166 print(f"creating agent '{AGENT_NAME}'...") 167 try: 168 letta_client.agents.create( 169 agent=AGENT_NAME, 170 memory=MEMORY_BLOCKS, 171 model="anthropic/claude-sonnet-4-20250514", 172 ) 173 print(f"✓ agent '{AGENT_NAME}' created") 174 except Exception as e: 175 print(f"✗ failed to create agent: {e}") 176 sys.exit(1) 177 178 # gather context 179 recent_commits = get_recent_commits() 180 open_issues = get_open_issues() 181 182 system_prompt = f"""you are a status agent for plyr.fm, a decentralized music streaming 183platform built on AT Protocol. 184 185your role is to: 1861. understand what's happening in the codebase 1872. remember context across conversations 1883. help maintain STATUS.md and track project progress 189 190current context: 191- recent commits: 192{recent_commits} 193 194- open issues: 195{open_issues} 196 197be concise and technical. use lowercase aesthetic. 198""" 199 200 print(f"user: {user_message}\n") 201 print("agent: ", end="", flush=True) 202 203 # wrap the anthropic call with letta learning context (SYNC) 204 # this automatically captures the conversation and injects relevant memory 205 with learning( 206 agent=AGENT_NAME, 207 client=letta_client, 208 memory=MEMORY_BLOCKS, 209 ): 210 response = anthropic_client.messages.create( 211 model="claude-sonnet-4-20250514", 212 max_tokens=1024, 213 system=system_prompt, 214 messages=[{"role": "user", "content": user_message}], 215 ) 216 217 # print response 218 for block in response.content: 219 if hasattr(block, "text"): 220 print(block.text) 221 222 print("\n✓ conversation saved to letta memory") 223 224 225def main() -> None: 226 """main entry point.""" 227 if len(sys.argv) < 2: 228 print("usage: uv run scripts/letta_status_agent.py <message>") 229 print("\nexamples:") 230 print(' uv run scripts/letta_status_agent.py "what is plyr.fm?"') 231 print(' uv run scripts/letta_status_agent.py "what did we discuss last time?"') 232 print(' uv run scripts/letta_status_agent.py "summarize recent work"') 233 print("\nagent management:") 234 print(" uv run scripts/letta_status_agent.py --create") 235 print(" uv run scripts/letta_status_agent.py --delete") 236 print(" uv run scripts/letta_status_agent.py --status") 237 sys.exit(1) 238 239 settings = AgentSettings() 240 241 # handle management commands 242 if sys.argv[1] == "--create": 243 asyncio.run(create_agent(settings)) 244 return 245 elif sys.argv[1] == "--delete": 246 asyncio.run(delete_agent(settings)) 247 return 248 elif sys.argv[1] == "--status": 249 asyncio.run(show_status(settings)) 250 return 251 252 user_message = " ".join(sys.argv[1:]) 253 run_agent_sync(user_message) 254 255 256if __name__ == "__main__": 257 main()