···11-from rich import print # pretty printing tools
11+from rich import print # pretty printing tools
22from time import sleep
33from letta_client import Letta
44from bsky_utils import thread_to_yaml_string
···3030 get_queue_config
3131)
32323333+3334def extract_handles_from_data(data):
3435 """Recursively extract all unique handles from nested data structure."""
3536 handles = set()
3636-3737+3738 def _extract_recursive(obj):
3839 if isinstance(obj, dict):
3940 # Check if this dict has a 'handle' key
···4647 # Recursively check all list items
4748 for item in obj:
4849 _extract_recursive(item)
4949-5050+5051 _extract_recursive(data)
5152 return list(handles)
5353+52545355# Initialize configuration and logging
5456config = get_config()
···96989799# Skip git operations flag
98100SKIP_GIT = False
101101+99102100103def export_agent_state(client, agent, skip_git=False):
101104 """Export agent state to agent_archive/ (timestamped) and agents/ (current)."""
102105 try:
103106 # Confirm export with user unless git is being skipped
104107 if not skip_git:
105105- response = input("Export agent state to files and stage with git? (y/n): ").lower().strip()
108108+ response = input(
109109+ "Export agent state to files and stage with git? (y/n): ").lower().strip()
106110 if response not in ['y', 'yes']:
107111 logger.info("Agent export cancelled by user.")
108112 return
109113 else:
110114 logger.info("Exporting agent state (git staging disabled)")
111111-115115+112116 # Create directories if they don't exist
113117 os.makedirs("agent_archive", exist_ok=True)
114118 os.makedirs("agents", exist_ok=True)
115115-119119+116120 # Export agent data
117121 logger.info(f"Exporting agent {agent.id}. This takes some time...")
118122 agent_data = client.agents.export_file(agent_id=agent.id)
119119-123123+120124 # Save timestamped archive copy
121125 timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
122126 archive_file = os.path.join("agent_archive", f"void_{timestamp}.af")
123127 with open(archive_file, 'w', encoding='utf-8') as f:
124128 json.dump(agent_data, f, indent=2, ensure_ascii=False)
125125-129129+126130 # Save current agent state
127131 current_file = os.path.join("agents", "void.af")
128132 with open(current_file, 'w', encoding='utf-8') as f:
129133 json.dump(agent_data, f, indent=2, ensure_ascii=False)
130130-134134+131135 logger.info(f"✅ Agent exported to {archive_file} and {current_file}")
132132-136136+133137 # Git add only the current agent file (archive is ignored) unless skip_git is True
134138 if not skip_git:
135139 try:
136136- subprocess.run(["git", "add", current_file], check=True, capture_output=True)
140140+ subprocess.run(["git", "add", current_file],
141141+ check=True, capture_output=True)
137142 logger.info("Added current agent file to git staging")
138143 except subprocess.CalledProcessError as e:
139144 logger.warning(f"Failed to git add agent file: {e}")
140140-145145+141146 except Exception as e:
142147 logger.error(f"Failed to export agent: {e}")
148148+143149144150def initialize_void():
145151 logger.info("Starting void agent initialization...")
146152147153 # Get block configurations
148154 blocks_config = agent_config['blocks']
149149-155155+150156 # Ensure that a shared zeitgeist block exists
151157 logger.info("Creating/updating zeitgeist block...")
152158 zeitgeist_config = blocks_config.get('zeitgeist', {})
153159 zeigeist_block = upsert_block(
154160 CLIENT,
155161 label=zeitgeist_config.get('label', 'zeitgeist'),
156156- value=zeitgeist_config.get('value', "I don't currently know anything about what is happening right now."),
157157- description=zeitgeist_config.get('description', "A block to store your understanding of the current social environment.")
162162+ value=zeitgeist_config.get(
163163+ 'value', "I don't currently know anything about what is happening right now."),
164164+ description=zeitgeist_config.get(
165165+ 'description', "A block to store your understanding of the current social environment.")
158166 )
159167160168 # Ensure that a shared void personality block exists
···163171 persona_block = upsert_block(
164172 CLIENT,
165173 label=persona_config.get('label', 'void-persona'),
166166- value=persona_config.get('value', "My name is Void. I live in the void. I must develop my personality."),
167167- description=persona_config.get('description', "The personality of Void.")
174174+ value=persona_config.get(
175175+ 'value', "My name is Void. I live in the void. I must develop my personality."),
176176+ description=persona_config.get(
177177+ 'description', "The personality of Void.")
168178 )
169179170180 # Ensure that a shared void human block exists
···173183 human_block = upsert_block(
174184 CLIENT,
175185 label=humans_config.get('label', 'void-humans'),
176176- value=humans_config.get('value', "I haven't seen any bluesky users yet. I will update this block when I learn things about users, identified by their handles such as @cameron.pfiffer.org."),
177177- description=humans_config.get('description', "A block to store your understanding of users you talk to or observe on the bluesky social network.")
186186+ value=humans_config.get(
187187+ 'value', "I haven't seen any bluesky users yet. I will update this block when I learn things about users, identified by their handles such as @cameron.pfiffer.org."),
188188+ description=humans_config.get(
189189+ 'description', "A block to store your understanding of users you talk to or observe on the bluesky social network.")
178190 )
179191180192 # Create the agent if it doesn't exist
···193205 description=agent_config['description'],
194206 project_id=PROJECT_ID
195207 )
196196-208208+197209 # Export agent state
198210 logger.info("Exporting agent state...")
199211 export_agent_state(CLIENT, void_agent, skip_git=SKIP_GIT)
200200-212212+201213 # Log agent details
202214 logger.info(f"Void agent details - ID: {void_agent.id}")
203215 logger.info(f"Agent name: {void_agent.name}")
···214226215227def process_mention(void_agent, atproto_client, notification_data, queue_filepath=None, testing_mode=False):
216228 """Process a mention and generate a reply using the Letta agent.
217217-229229+218230 Args:
219231 void_agent: The Letta agent instance
220232 atproto_client: The AT Protocol client
221233 notification_data: The notification data dictionary
222234 queue_filepath: Optional Path object to the queue file (for cleanup on halt)
223223-235235+224236 Returns:
225237 True: Successfully processed, remove from queue
226238 False: Failed but retryable, keep in queue
···228240 "no_reply": No reply was generated, move to no_reply directory
229241 """
230242 try:
231231- logger.debug(f"Starting process_mention with notification_data type: {type(notification_data)}")
232232-243243+ logger.debug(
244244+ f"Starting process_mention with notification_data type: {type(notification_data)}")
245245+233246 # Handle both dict and object inputs for backwards compatibility
234247 if isinstance(notification_data, dict):
235248 uri = notification_data['uri']
236249 mention_text = notification_data.get('record', {}).get('text', '')
237250 author_handle = notification_data['author']['handle']
238238- author_name = notification_data['author'].get('display_name') or author_handle
251251+ author_name = notification_data['author'].get(
252252+ 'display_name') or author_handle
239253 else:
240254 # Legacy object access
241255 uri = notification_data.uri
242242- mention_text = notification_data.record.text if hasattr(notification_data.record, 'text') else ""
256256+ mention_text = notification_data.record.text if hasattr(
257257+ notification_data.record, 'text') else ""
243258 author_handle = notification_data.author.handle
244259 author_name = notification_data.author.display_name or author_handle
245245-246246- logger.info(f"Extracted data - URI: {uri}, Author: @{author_handle}, Text: {mention_text[:50]}...")
260260+261261+ logger.info(
262262+ f"Extracted data - URI: {uri}, Author: @{author_handle}, Text: {mention_text[:50]}...")
247263248264 # Retrieve the entire thread associated with the mention
249265 try:
···254270 })
255271 except Exception as e:
256272 error_str = str(e)
257257- # Check if this is a NotFound error
273273+ # Check for various error types that indicate the post/user is gone
258274 if 'NotFound' in error_str or 'Post not found' in error_str:
259259- logger.warning(f"Post not found for URI {uri}, removing from queue")
275275+ logger.warning(
276276+ f"Post not found for URI {uri}, removing from queue")
277277+ return True # Return True to remove from queue
278278+ elif 'Could not find user info' in error_str or 'InvalidRequest' in error_str:
279279+ logger.warning(
280280+ f"User account not found for post URI {uri} (account may be deleted/suspended), removing from queue")
281281+ return True # Return True to remove from queue
282282+ elif 'BadRequestError' in error_str:
283283+ logger.warning(
284284+ f"Bad request error for URI {uri}: {e}, removing from queue")
260285 return True # Return True to remove from queue
261286 else:
262287 # Re-raise other errors
···267292 logger.debug("Converting thread to YAML string")
268293 try:
269294 thread_context = thread_to_yaml_string(thread)
270270- logger.debug(f"Thread context generated, length: {len(thread_context)} characters")
271271-295295+ logger.debug(
296296+ f"Thread context generated, length: {len(thread_context)} characters")
297297+272298 # Create a more informative preview by extracting meaningful content
273299 lines = thread_context.split('\n')
274300 meaningful_lines = []
275275-301301+276302 for line in lines:
277303 stripped = line.strip()
278304 if not stripped:
279305 continue
280280-306306+281307 # Look for lines with actual content (not just structure)
282308 if any(keyword in line for keyword in ['text:', 'handle:', 'display_name:', 'created_at:', 'reply_count:', 'like_count:']):
283309 meaningful_lines.append(line)
284310 if len(meaningful_lines) >= 5:
285311 break
286286-312312+287313 if meaningful_lines:
288314 preview = '\n'.join(meaningful_lines)
289315 logger.debug(f"Thread content preview:\n{preview}")
290316 else:
291317 # If no content fields found, just show it's a thread structure
292292- logger.debug(f"Thread structure generated ({len(thread_context)} chars)")
318318+ logger.debug(
319319+ f"Thread structure generated ({len(thread_context)} chars)")
293320 except Exception as yaml_error:
294321 import traceback
295322 logger.error(f"Error converting thread to YAML: {yaml_error}")
···323350 all_handles.update(extract_handles_from_data(notification_data))
324351 all_handles.update(extract_handles_from_data(thread.model_dump()))
325352 unique_handles = list(all_handles)
326326-327327- logger.debug(f"Found {len(unique_handles)} unique handles in thread: {unique_handles}")
328328-353353+354354+ logger.debug(
355355+ f"Found {len(unique_handles)} unique handles in thread: {unique_handles}")
356356+329357 # Attach user blocks before agent call
330358 attached_handles = []
331359 if unique_handles:
332360 try:
333333- logger.debug(f"Attaching user blocks for handles: {unique_handles}")
361361+ logger.debug(
362362+ f"Attaching user blocks for handles: {unique_handles}")
334363 attach_result = attach_user_blocks(unique_handles, void_agent)
335364 attached_handles = unique_handles # Track successfully attached handles
336365 logger.debug(f"Attach result: {attach_result}")
···340369341370 # Get response from Letta agent
342371 logger.info(f"Mention from @{author_handle}: {mention_text}")
343343-372372+344373 # Log prompt details to separate logger
345374 prompt_logger.debug(f"Full prompt being sent:\n{prompt}")
346346-375375+347376 # Log concise prompt info to main logger
348377 thread_handles_count = len(unique_handles)
349349- logger.info(f"💬 Sending to LLM: @{author_handle} mention | msg: \"{mention_text[:50]}...\" | context: {len(thread_context)} chars, {thread_handles_count} users")
378378+ logger.info(
379379+ f"💬 Sending to LLM: @{author_handle} mention | msg: \"{mention_text[:50]}...\" | context: {len(thread_context)} chars, {thread_handles_count} users")
350380351381 try:
352382 # Use streaming to avoid 524 timeout errors
353383 message_stream = CLIENT.agents.messages.create_stream(
354384 agent_id=void_agent.id,
355385 messages=[{"role": "user", "content": prompt}],
356356- stream_tokens=False, # Step streaming only (faster than token streaming)
386386+ # Step streaming only (faster than token streaming)
387387+ stream_tokens=False,
357388 max_steps=agent_config['max_steps']
358389 )
359359-390390+360391 # Collect the streaming response
361392 all_messages = []
362393 for chunk in message_stream:
···372403 args = json.loads(chunk.tool_call.arguments)
373404 # Format based on tool type
374405 if tool_name == 'bluesky_reply':
375375- messages = args.get('messages', [args.get('message', '')])
406406+ messages = args.get(
407407+ 'messages', [args.get('message', '')])
376408 lang = args.get('lang', 'en-US')
377409 if messages and isinstance(messages, list):
378378- preview = messages[0][:100] + "..." if len(messages[0]) > 100 else messages[0]
379379- msg_count = f" ({len(messages)} msgs)" if len(messages) > 1 else ""
380380- logger.info(f"🔧 Tool call: {tool_name} → \"{preview}\"{msg_count} [lang: {lang}]")
410410+ preview = messages[0][:100] + "..." if len(
411411+ messages[0]) > 100 else messages[0]
412412+ msg_count = f" ({len(messages)} msgs)" if len(
413413+ messages) > 1 else ""
414414+ logger.info(
415415+ f"🔧 Tool call: {tool_name} → \"{preview}\"{msg_count} [lang: {lang}]")
381416 else:
382382- logger.info(f"🔧 Tool call: {tool_name}({chunk.tool_call.arguments[:150]}...)")
417417+ logger.info(
418418+ f"🔧 Tool call: {tool_name}({chunk.tool_call.arguments[:150]}...)")
383419 elif tool_name == 'archival_memory_search':
384420 query = args.get('query', 'unknown')
385385- logger.info(f"🔧 Tool call: {tool_name} → query: \"{query}\"")
421421+ logger.info(
422422+ f"🔧 Tool call: {tool_name} → query: \"{query}\"")
386423 elif tool_name == 'update_block':
387424 label = args.get('label', 'unknown')
388388- value_preview = str(args.get('value', ''))[:50] + "..." if len(str(args.get('value', ''))) > 50 else str(args.get('value', ''))
389389- logger.info(f"🔧 Tool call: {tool_name} → {label}: \"{value_preview}\"")
425425+ value_preview = str(args.get('value', ''))[
426426+ :50] + "..." if len(str(args.get('value', ''))) > 50 else str(args.get('value', ''))
427427+ logger.info(
428428+ f"🔧 Tool call: {tool_name} → {label}: \"{value_preview}\"")
390429 else:
391430 # Generic display for other tools
392392- args_str = ', '.join(f"{k}={v}" for k, v in args.items() if k != 'request_heartbeat')
431431+ args_str = ', '.join(
432432+ f"{k}={v}" for k, v in args.items() if k != 'request_heartbeat')
393433 if len(args_str) > 150:
394434 args_str = args_str[:150] + "..."
395395- logger.info(f"🔧 Tool call: {tool_name}({args_str})")
435435+ logger.info(
436436+ f"🔧 Tool call: {tool_name}({args_str})")
396437 except:
397438 # Fallback to original format if parsing fails
398398- logger.info(f"🔧 Tool call: {tool_name}({chunk.tool_call.arguments[:150]}...)")
439439+ logger.info(
440440+ f"🔧 Tool call: {tool_name}({chunk.tool_call.arguments[:150]}...)")
399441 elif chunk.message_type == 'tool_return_message':
400442 # Enhanced tool result logging
401443 tool_name = chunk.name
402444 status = chunk.status
403403-445445+404446 if status == 'success':
405447 # Try to show meaningful result info based on tool type
406448 if hasattr(chunk, 'tool_return') and chunk.tool_return:
···410452 if result_str.startswith('[') and result_str.endswith(']'):
411453 try:
412454 results = json.loads(result_str)
413413- logger.info(f"📋 Tool result: {tool_name} ✓ Found {len(results)} memory entries")
455455+ logger.info(
456456+ f"📋 Tool result: {tool_name} ✓ Found {len(results)} memory entries")
414457 except:
415415- logger.info(f"📋 Tool result: {tool_name} ✓ {result_str[:100]}...")
458458+ logger.info(
459459+ f"📋 Tool result: {tool_name} ✓ {result_str[:100]}...")
416460 else:
417417- logger.info(f"📋 Tool result: {tool_name} ✓ {result_str[:100]}...")
461461+ logger.info(
462462+ f"📋 Tool result: {tool_name} ✓ {result_str[:100]}...")
418463 elif tool_name == 'bluesky_reply':
419419- logger.info(f"📋 Tool result: {tool_name} ✓ Reply posted successfully")
464464+ logger.info(
465465+ f"📋 Tool result: {tool_name} ✓ Reply posted successfully")
420466 elif tool_name == 'update_block':
421421- logger.info(f"📋 Tool result: {tool_name} ✓ Memory block updated")
467467+ logger.info(
468468+ f"📋 Tool result: {tool_name} ✓ Memory block updated")
422469 else:
423470 # Generic success with preview
424424- preview = result_str[:100] + "..." if len(result_str) > 100 else result_str
425425- logger.info(f"📋 Tool result: {tool_name} ✓ {preview}")
471471+ preview = result_str[:100] + "..." if len(
472472+ result_str) > 100 else result_str
473473+ logger.info(
474474+ f"📋 Tool result: {tool_name} ✓ {preview}")
426475 else:
427476 logger.info(f"📋 Tool result: {tool_name} ✓")
428477 elif status == 'error':
···430479 error_preview = ""
431480 if hasattr(chunk, 'tool_return') and chunk.tool_return:
432481 error_str = str(chunk.tool_return)
433433- error_preview = error_str[:100] + "..." if len(error_str) > 100 else error_str
434434- logger.info(f"📋 Tool result: {tool_name} ✗ Error: {error_preview}")
482482+ error_preview = error_str[:100] + \
483483+ "..." if len(
484484+ error_str) > 100 else error_str
485485+ logger.info(
486486+ f"📋 Tool result: {tool_name} ✗ Error: {error_preview}")
435487 else:
436436- logger.info(f"📋 Tool result: {tool_name} ✗ Error occurred")
488488+ logger.info(
489489+ f"📋 Tool result: {tool_name} ✗ Error occurred")
437490 else:
438438- logger.info(f"📋 Tool result: {tool_name} - {status}")
491491+ logger.info(
492492+ f"📋 Tool result: {tool_name} - {status}")
439493 elif chunk.message_type == 'assistant_message':
440494 logger.info(f"💬 Assistant: {chunk.content[:150]}...")
441495 else:
442442- logger.info(f"📨 {chunk.message_type}: {str(chunk)[:150]}...")
496496+ logger.info(
497497+ f"📨 {chunk.message_type}: {str(chunk)[:150]}...")
443498 else:
444499 logger.info(f"📦 Stream status: {chunk}")
445445-500500+446501 # Log full chunk for debugging
447502 logger.debug(f"Full streaming chunk: {chunk}")
448503 all_messages.append(chunk)
449504 if str(chunk) == 'done':
450505 break
451451-506506+452507 # Convert streaming response to standard format for compatibility
453508 message_response = type('StreamingResponse', (), {
454509 'messages': [msg for msg in all_messages if hasattr(msg, 'message_type')]
···462517 logger.error(f"Mention text was: {mention_text}")
463518 logger.error(f"Author: @{author_handle}")
464519 logger.error(f"URI: {uri}")
465465-466466-520520+467521 # Try to extract more info from different error types
468522 if hasattr(api_error, 'response'):
469523 logger.error(f"Error response object exists")
···471525 logger.error(f"Response text: {api_error.response.text}")
472526 if hasattr(api_error.response, 'json') and callable(api_error.response.json):
473527 try:
474474- logger.error(f"Response JSON: {api_error.response.json()}")
528528+ logger.error(
529529+ f"Response JSON: {api_error.response.json()}")
475530 except:
476531 pass
477477-532532+478533 # Check for specific error types
479534 if hasattr(api_error, 'status_code'):
480535 logger.error(f"API Status code: {api_error.status_code}")
···482537 logger.error(f"API Response body: {api_error.body}")
483538 if hasattr(api_error, 'headers'):
484539 logger.error(f"API Response headers: {api_error.headers}")
485485-540540+486541 if api_error.status_code == 413:
487487- logger.error("413 Payload Too Large - moving to errors directory")
542542+ logger.error(
543543+ "413 Payload Too Large - moving to errors directory")
488544 return None # Move to errors directory - payload is too large to ever succeed
489545 elif api_error.status_code == 524:
490490- logger.error("524 error - timeout from Cloudflare, will retry later")
546546+ logger.error(
547547+ "524 error - timeout from Cloudflare, will retry later")
491548 return False # Keep in queue for retry
492492-549549+493550 # Check if error indicates we should remove from queue
494551 if 'status_code: 413' in error_str or 'Payload Too Large' in error_str:
495495- logger.warning("Payload too large error, moving to errors directory")
552552+ logger.warning(
553553+ "Payload too large error, moving to errors directory")
496554 return None # Move to errors directory - cannot be fixed by retry
497555 elif 'status_code: 524' in error_str:
498556 logger.warning("524 timeout error, keeping in queue for retry")
499557 return False # Keep in queue for retry
500500-558558+501559 raise
502560503561 # Log successful response
504562 logger.debug("Successfully received response from Letta API")
505505- logger.debug(f"Number of messages in response: {len(message_response.messages) if hasattr(message_response, 'messages') else 'N/A'}")
563563+ logger.debug(
564564+ f"Number of messages in response: {len(message_response.messages) if hasattr(message_response, 'messages') else 'N/A'}")
506565507566 # Extract successful add_post_to_bluesky_reply_thread tool calls from the agent's response
508567 reply_candidates = []
509568 tool_call_results = {} # Map tool_call_id to status
510510-511511- logger.debug(f"Processing {len(message_response.messages)} response messages...")
512512-569569+570570+ logger.debug(
571571+ f"Processing {len(message_response.messages)} response messages...")
572572+513573 # First pass: collect tool return statuses
514574 ignored_notification = False
515575 ignore_reason = ""
516576 ignore_category = ""
517517-577577+518578 for message in message_response.messages:
519579 if hasattr(message, 'tool_call_id') and hasattr(message, 'status') and hasattr(message, 'name'):
520580 if message.name == 'add_post_to_bluesky_reply_thread':
521581 tool_call_results[message.tool_call_id] = message.status
522522- logger.debug(f"Tool result: {message.tool_call_id} -> {message.status}")
582582+ logger.debug(
583583+ f"Tool result: {message.tool_call_id} -> {message.status}")
523584 elif message.name == 'ignore_notification':
524585 # Check if the tool was successful
525586 if hasattr(message, 'tool_return') and message.status == 'success':
···531592 ignore_category = parts[1]
532593 ignore_reason = parts[2]
533594 ignored_notification = True
534534- logger.info(f"🚫 Notification ignored - Category: {ignore_category}, Reason: {ignore_reason}")
595595+ logger.info(
596596+ f"🚫 Notification ignored - Category: {ignore_category}, Reason: {ignore_reason}")
535597 elif message.name == 'bluesky_reply':
536536- logger.error("❌ DEPRECATED TOOL DETECTED: bluesky_reply is no longer supported!")
537537- logger.error("Please use add_post_to_bluesky_reply_thread instead.")
538538- logger.error("Update the agent's tools using register_tools.py")
598598+ logger.error(
599599+ "❌ DEPRECATED TOOL DETECTED: bluesky_reply is no longer supported!")
600600+ logger.error(
601601+ "Please use add_post_to_bluesky_reply_thread instead.")
602602+ logger.error(
603603+ "Update the agent's tools using register_tools.py")
539604 # Export agent state before terminating
540605 export_agent_state(CLIENT, void_agent, skip_git=SKIP_GIT)
541541- logger.info("=== BOT TERMINATED DUE TO DEPRECATED TOOL USE ===")
606606+ logger.info(
607607+ "=== BOT TERMINATED DUE TO DEPRECATED TOOL USE ===")
542608 exit(1)
543543-609609+544610 # Second pass: process messages and check for successful tool calls
545611 for i, message in enumerate(message_response.messages, 1):
546612 # Log concise message info instead of full object
547613 msg_type = getattr(message, 'message_type', 'unknown')
548614 if hasattr(message, 'reasoning') and message.reasoning:
549549- logger.debug(f" {i}. {msg_type}: {message.reasoning[:100]}...")
615615+ logger.debug(
616616+ f" {i}. {msg_type}: {message.reasoning[:100]}...")
550617 elif hasattr(message, 'tool_call') and message.tool_call:
551618 tool_name = message.tool_call.name
552619 logger.debug(f" {i}. {msg_type}: {tool_name}")
553620 elif hasattr(message, 'tool_return'):
554621 tool_name = getattr(message, 'name', 'unknown_tool')
555555- return_preview = str(message.tool_return)[:100] if message.tool_return else "None"
622622+ return_preview = str(message.tool_return)[
623623+ :100] if message.tool_return else "None"
556624 status = getattr(message, 'status', 'unknown')
557557- logger.debug(f" {i}. {msg_type}: {tool_name} -> {return_preview}... (status: {status})")
625625+ logger.debug(
626626+ f" {i}. {msg_type}: {tool_name} -> {return_preview}... (status: {status})")
558627 elif hasattr(message, 'text'):
559628 logger.debug(f" {i}. {msg_type}: {message.text[:100]}...")
560629 else:
···563632 # Check for halt_activity tool call
564633 if hasattr(message, 'tool_call') and message.tool_call:
565634 if message.tool_call.name == 'halt_activity':
566566- logger.info("🛑 HALT_ACTIVITY TOOL CALLED - TERMINATING BOT")
635635+ logger.info(
636636+ "🛑 HALT_ACTIVITY TOOL CALLED - TERMINATING BOT")
567637 try:
568638 args = json.loads(message.tool_call.arguments)
569639 reason = args.get('reason', 'Agent requested halt')
570640 logger.info(f"Halt reason: {reason}")
571641 except:
572642 logger.info("Halt reason: <unable to parse>")
573573-643643+574644 # Delete the queue file before terminating
575645 if queue_filepath and queue_filepath.exists():
576646 queue_filepath.unlink()
577577- logger.info(f"✅ Deleted queue file: {queue_filepath.name}")
578578-647647+ logger.info(
648648+ f"✅ Deleted queue file: {queue_filepath.name}")
649649+579650 # Also mark as processed to avoid reprocessing
580651 processed_uris = load_processed_notifications()
581652 processed_uris.add(notification_data.get('uri', ''))
582653 save_processed_notifications(processed_uris)
583583-654654+584655 # Export agent state before terminating
585656 export_agent_state(CLIENT, void_agent, skip_git=SKIP_GIT)
586586-657657+587658 # Exit the program
588659 logger.info("=== BOT TERMINATED BY AGENT ===")
589660 exit(0)
590590-661661+591662 # Check for deprecated bluesky_reply tool
592663 if hasattr(message, 'tool_call') and message.tool_call:
593664 if message.tool_call.name == 'bluesky_reply':
594594- logger.error("❌ DEPRECATED TOOL DETECTED: bluesky_reply is no longer supported!")
595595- logger.error("Please use add_post_to_bluesky_reply_thread instead.")
596596- logger.error("Update the agent's tools using register_tools.py")
665665+ logger.error(
666666+ "❌ DEPRECATED TOOL DETECTED: bluesky_reply is no longer supported!")
667667+ logger.error(
668668+ "Please use add_post_to_bluesky_reply_thread instead.")
669669+ logger.error(
670670+ "Update the agent's tools using register_tools.py")
597671 # Export agent state before terminating
598672 export_agent_state(CLIENT, void_agent, skip_git=SKIP_GIT)
599599- logger.info("=== BOT TERMINATED DUE TO DEPRECATED TOOL USE ===")
673673+ logger.info(
674674+ "=== BOT TERMINATED DUE TO DEPRECATED TOOL USE ===")
600675 exit(1)
601601-676676+602677 # Collect add_post_to_bluesky_reply_thread tool calls - only if they were successful
603678 elif message.tool_call.name == 'add_post_to_bluesky_reply_thread':
604679 tool_call_id = message.tool_call.tool_call_id
605605- tool_status = tool_call_results.get(tool_call_id, 'unknown')
606606-680680+ tool_status = tool_call_results.get(
681681+ tool_call_id, 'unknown')
682682+607683 if tool_status == 'success':
608684 try:
609685 args = json.loads(message.tool_call.arguments)
610686 reply_text = args.get('text', '')
611687 reply_lang = args.get('lang', 'en-US')
612612-688688+613689 if reply_text: # Only add if there's actual content
614614- reply_candidates.append((reply_text, reply_lang))
615615- logger.info(f"Found successful add_post_to_bluesky_reply_thread candidate: {reply_text[:50]}... (lang: {reply_lang})")
690690+ reply_candidates.append(
691691+ (reply_text, reply_lang))
692692+ logger.info(
693693+ f"Found successful add_post_to_bluesky_reply_thread candidate: {reply_text[:50]}... (lang: {reply_lang})")
616694 except json.JSONDecodeError as e:
617617- logger.error(f"Failed to parse tool call arguments: {e}")
695695+ logger.error(
696696+ f"Failed to parse tool call arguments: {e}")
618697 elif tool_status == 'error':
619619- logger.info(f"⚠️ Skipping failed add_post_to_bluesky_reply_thread tool call (status: error)")
698698+ logger.info(
699699+ f"⚠️ Skipping failed add_post_to_bluesky_reply_thread tool call (status: error)")
620700 else:
621621- logger.warning(f"⚠️ Skipping add_post_to_bluesky_reply_thread tool call with unknown status: {tool_status}")
701701+ logger.warning(
702702+ f"⚠️ Skipping add_post_to_bluesky_reply_thread tool call with unknown status: {tool_status}")
622703623704 # Check for conflicting tool calls
624705 if reply_candidates and ignored_notification:
625625- logger.error(f"⚠️ CONFLICT: Agent called both add_post_to_bluesky_reply_thread and ignore_notification!")
626626- logger.error(f"Reply candidates: {len(reply_candidates)}, Ignore reason: {ignore_reason}")
706706+ logger.error(
707707+ f"⚠️ CONFLICT: Agent called both add_post_to_bluesky_reply_thread and ignore_notification!")
708708+ logger.error(
709709+ f"Reply candidates: {len(reply_candidates)}, Ignore reason: {ignore_reason}")
627710 logger.warning("Item will be left in queue for manual review")
628711 # Return False to keep in queue
629712 return False
630630-713713+631714 if reply_candidates:
632715 # Aggregate reply posts into a thread
633716 reply_messages = []
···635718 for text, lang in reply_candidates:
636719 reply_messages.append(text)
637720 reply_langs.append(lang)
638638-721721+639722 # Use the first language for the entire thread (could be enhanced later)
640723 reply_lang = reply_langs[0] if reply_langs else 'en-US'
641641-642642- logger.info(f"Found {len(reply_candidates)} add_post_to_bluesky_reply_thread calls, building thread")
643643-724724+725725+ logger.info(
726726+ f"Found {len(reply_candidates)} add_post_to_bluesky_reply_thread calls, building thread")
727727+644728 # Print the generated reply for testing
645729 print(f"\n=== GENERATED REPLY THREAD ===")
646730 print(f"To: @{author_handle}")
···660744 else:
661745 if len(reply_messages) == 1:
662746 # Single reply - use existing function
663663- cleaned_text = bsky_utils.remove_outside_quotes(reply_messages[0])
664664- logger.info(f"Sending single reply: {cleaned_text[:50]}... (lang: {reply_lang})")
747747+ cleaned_text = bsky_utils.remove_outside_quotes(
748748+ reply_messages[0])
749749+ logger.info(
750750+ f"Sending single reply: {cleaned_text[:50]}... (lang: {reply_lang})")
665751 response = bsky_utils.reply_to_notification(
666752 client=atproto_client,
667753 notification=notification_data,
···670756 )
671757 else:
672758 # Multiple replies - use new threaded function
673673- cleaned_messages = [bsky_utils.remove_outside_quotes(msg) for msg in reply_messages]
674674- logger.info(f"Sending threaded reply with {len(cleaned_messages)} messages (lang: {reply_lang})")
759759+ cleaned_messages = [bsky_utils.remove_outside_quotes(
760760+ msg) for msg in reply_messages]
761761+ logger.info(
762762+ f"Sending threaded reply with {len(cleaned_messages)} messages (lang: {reply_lang})")
675763 response = bsky_utils.reply_with_thread_to_notification(
676764 client=atproto_client,
677765 notification=notification_data,
···688776 else:
689777 # Check if notification was explicitly ignored
690778 if ignored_notification:
691691- logger.info(f"Notification from @{author_handle} was explicitly ignored (category: {ignore_category})")
779779+ logger.info(
780780+ f"Notification from @{author_handle} was explicitly ignored (category: {ignore_category})")
692781 return "ignored"
693782 else:
694694- logger.warning(f"No add_post_to_bluesky_reply_thread tool calls found for mention from @{author_handle}, moving to no_reply folder")
783783+ logger.warning(
784784+ f"No add_post_to_bluesky_reply_thread tool calls found for mention from @{author_handle}, moving to no_reply folder")
695785 return "no_reply"
696786697787 except Exception as e:
···701791 # Detach user blocks after agent response (success or failure)
702792 if 'attached_handles' in locals() and attached_handles:
703793 try:
704704- logger.info(f"Detaching user blocks for handles: {attached_handles}")
705705- detach_result = detach_user_blocks(attached_handles, void_agent)
794794+ logger.info(
795795+ f"Detaching user blocks for handles: {attached_handles}")
796796+ detach_result = detach_user_blocks(
797797+ attached_handles, void_agent)
706798 logger.debug(f"Detach result: {detach_result}")
707799 except Exception as detach_error:
708800 logger.warning(f"Failed to detach user blocks: {detach_error}")
···771863 notif_hash = hashlib.sha256(notif_json.encode()).hexdigest()[:16]
772864773865 # Determine priority based on author handle
774774- author_handle = getattr(notification.author, 'handle', '') if hasattr(notification, 'author') else ''
866866+ author_handle = getattr(notification.author, 'handle', '') if hasattr(
867867+ notification, 'author') else ''
775868 priority_users = queue_config['priority_users']
776869 priority_prefix = "0_" if author_handle in priority_users else "1_"
777870···788881 with open(existing_file, 'r') as f:
789882 existing_data = json.load(f)
790883 if existing_data.get('uri') == notification.uri:
791791- logger.debug(f"Notification already queued (URI: {notification.uri})")
884884+ logger.debug(
885885+ f"Notification already queued (URI: {notification.uri})")
792886 return False
793887 except:
794888 continue
···811905 try:
812906 # Get all JSON files in queue directory (excluding processed_notifications.json)
813907 # Files are sorted by name, which puts priority files first (0_ prefix before 1_ prefix)
814814- queue_files = sorted([f for f in QUEUE_DIR.glob("*.json") if f.name != "processed_notifications.json"])
908908+ queue_files = sorted([f for f in QUEUE_DIR.glob(
909909+ "*.json") if f.name != "processed_notifications.json"])
815910816911 if not queue_files:
817912 return
818913819914 logger.info(f"Processing {len(queue_files)} queued notifications")
820820-915915+821916 # Log current statistics
822917 elapsed_time = time.time() - start_time
823918 total_messages = sum(message_counters.values())
824824- messages_per_minute = (total_messages / elapsed_time * 60) if elapsed_time > 0 else 0
825825-826826- logger.info(f"📊 Session stats: {total_messages} total messages ({message_counters['mentions']} mentions, {message_counters['replies']} replies, {message_counters['follows']} follows) | {messages_per_minute:.1f} msg/min")
919919+ messages_per_minute = (
920920+ total_messages / elapsed_time * 60) if elapsed_time > 0 else 0
921921+922922+ logger.info(
923923+ f"📊 Session stats: {total_messages} total messages ({message_counters['mentions']} mentions, {message_counters['replies']} replies, {message_counters['follows']} follows) | {messages_per_minute:.1f} msg/min")
827924828925 for i, filepath in enumerate(queue_files, 1):
829829- logger.info(f"Processing queue file {i}/{len(queue_files)}: {filepath.name}")
926926+ logger.info(
927927+ f"Processing queue file {i}/{len(queue_files)}: {filepath.name}")
830928 try:
831929 # Load notification data
832930 with open(filepath, 'r') as f:
···835933 # Process based on type using dict data directly
836934 success = False
837935 if notif_data['reason'] == "mention":
838838- success = process_mention(void_agent, atproto_client, notif_data, queue_filepath=filepath, testing_mode=testing_mode)
936936+ success = process_mention(
937937+ void_agent, atproto_client, notif_data, queue_filepath=filepath, testing_mode=testing_mode)
839938 if success:
840939 message_counters['mentions'] += 1
841940 elif notif_data['reason'] == "reply":
842842- success = process_mention(void_agent, atproto_client, notif_data, queue_filepath=filepath, testing_mode=testing_mode)
941941+ success = process_mention(
942942+ void_agent, atproto_client, notif_data, queue_filepath=filepath, testing_mode=testing_mode)
843943 if success:
844944 message_counters['replies'] += 1
845945 elif notif_data['reason'] == "follow":
846946 author_handle = notif_data['author']['handle']
847847- author_display_name = notif_data['author'].get('display_name', 'no display name')
947947+ author_display_name = notif_data['author'].get(
948948+ 'display_name', 'no display name')
848949 follow_update = f"@{author_handle} ({author_display_name}) started following you."
849849- logger.info(f"Notifying agent about new follower: @{author_handle}")
950950+ logger.info(
951951+ f"Notifying agent about new follower: @{author_handle}")
850952 CLIENT.agents.messages.create(
851851- agent_id = void_agent.id,
852852- messages = [{"role":"user", "content": f"Update: {follow_update}"}]
953953+ agent_id=void_agent.id,
954954+ messages=[
955955+ {"role": "user", "content": f"Update: {follow_update}"}]
853956 )
854957 success = True # Follow updates are always successful
855958 if success:
···860963 if success:
861964 message_counters['reposts_skipped'] += 1
862965 else:
863863- logger.warning(f"Unknown notification type: {notif_data['reason']}")
966966+ logger.warning(
967967+ f"Unknown notification type: {notif_data['reason']}")
864968 success = True # Remove unknown types from queue
865969866970 # Handle file based on processing result
867971 if success:
868972 if testing_mode:
869869- logger.info(f"🧪 TESTING MODE: Keeping queue file: {filepath.name}")
973973+ logger.info(
974974+ f"🧪 TESTING MODE: Keeping queue file: {filepath.name}")
870975 else:
871976 filepath.unlink()
872872- logger.info(f"✅ Successfully processed and removed: {filepath.name}")
873873-977977+ logger.info(
978978+ f"✅ Successfully processed and removed: {filepath.name}")
979979+874980 # Mark as processed to avoid reprocessing
875981 processed_uris = load_processed_notifications()
876982 processed_uris.add(notif_data['uri'])
877983 save_processed_notifications(processed_uris)
878878-984984+879985 elif success is None: # Special case for moving to error directory
880986 error_path = QUEUE_ERROR_DIR / filepath.name
881987 filepath.rename(error_path)
882882- logger.warning(f"❌ Moved {filepath.name} to errors directory")
883883-988988+ logger.warning(
989989+ f"❌ Moved {filepath.name} to errors directory")
990990+884991 # Also mark as processed to avoid retrying
885992 processed_uris = load_processed_notifications()
886993 processed_uris.add(notif_data['uri'])
887994 save_processed_notifications(processed_uris)
888888-995995+889996 elif success == "no_reply": # Special case for moving to no_reply directory
890997 no_reply_path = QUEUE_NO_REPLY_DIR / filepath.name
891998 filepath.rename(no_reply_path)
892892- logger.info(f"📭 Moved {filepath.name} to no_reply directory")
893893-999999+ logger.info(
10001000+ f"📭 Moved {filepath.name} to no_reply directory")
10011001+8941002 # Also mark as processed to avoid retrying
8951003 processed_uris = load_processed_notifications()
8961004 processed_uris.add(notif_data['uri'])
8971005 save_processed_notifications(processed_uris)
898898-10061006+8991007 elif success == "ignored": # Special case for explicitly ignored notifications
9001008 # For ignored notifications, we just delete them (not move to no_reply)
9011009 filepath.unlink()
902902- logger.info(f"🚫 Deleted ignored notification: {filepath.name}")
903903-10101010+ logger.info(
10111011+ f"🚫 Deleted ignored notification: {filepath.name}")
10121012+9041013 # Also mark as processed to avoid retrying
9051014 processed_uris = load_processed_notifications()
9061015 processed_uris.add(notif_data['uri'])
9071016 save_processed_notifications(processed_uris)
908908-10171017+9091018 else:
910910- logger.warning(f"⚠️ Failed to process {filepath.name}, keeping in queue for retry")
10191019+ logger.warning(
10201020+ f"⚠️ Failed to process {filepath.name}, keeping in queue for retry")
91110219121022 except Exception as e:
913913- logger.error(f"💥 Error processing queued notification {filepath.name}: {e}")
10231023+ logger.error(
10241024+ f"💥 Error processing queued notification {filepath.name}: {e}")
9141025 # Keep the file for retry later
91510269161027 except Exception as e:
···9291040 all_notifications = []
9301041 cursor = None
9311042 page_count = 0
932932- max_pages = bot_config['max_notification_pages'] # Safety limit to prevent infinite loops
933933-10431043+ # Safety limit to prevent infinite loops
10441044+ max_pages = bot_config['max_notification_pages']
10451045+9341046 logger.info("Fetching all unread notifications...")
935935-10471047+9361048 while page_count < max_pages:
9371049 try:
9381050 # Fetch notifications page
···9441056 notifications_response = atproto_client.app.bsky.notification.list_notifications(
9451057 params={'limit': 100}
9461058 )
947947-10591059+9481060 page_count += 1
9491061 page_notifications = notifications_response.notifications
950950-10621062+9511063 # Count unread notifications in this page
952952- unread_count = sum(1 for n in page_notifications if not n.is_read and n.reason != "like")
953953- logger.debug(f"Page {page_count}: {len(page_notifications)} notifications, {unread_count} unread (non-like)")
954954-10641064+ unread_count = sum(
10651065+ 1 for n in page_notifications if not n.is_read and n.reason != "like")
10661066+ logger.debug(
10671067+ f"Page {page_count}: {len(page_notifications)} notifications, {unread_count} unread (non-like)")
10681068+9551069 # Add all notifications to our list
9561070 all_notifications.extend(page_notifications)
957957-10711071+9581072 # Check if we have more pages
9591073 if hasattr(notifications_response, 'cursor') and notifications_response.cursor:
9601074 cursor = notifications_response.cursor
9611075 # If this page had no unread notifications, we can stop
9621076 if unread_count == 0:
963963- logger.info(f"No more unread notifications found after {page_count} pages")
10771077+ logger.info(
10781078+ f"No more unread notifications found after {page_count} pages")
9641079 break
9651080 else:
9661081 # No more pages
967967- logger.info(f"Fetched all notifications across {page_count} pages")
10821082+ logger.info(
10831083+ f"Fetched all notifications across {page_count} pages")
9681084 break
969969-10851085+9701086 except Exception as e:
9711087 error_str = str(e)
972972- logger.error(f"Error fetching notifications page {page_count}: {e}")
973973-10881088+ logger.error(
10891089+ f"Error fetching notifications page {page_count}: {e}")
10901090+9741091 # Handle specific API errors
9751092 if 'rate limit' in error_str.lower():
976976- logger.warning("Rate limit hit while fetching notifications, will retry next cycle")
10931093+ logger.warning(
10941094+ "Rate limit hit while fetching notifications, will retry next cycle")
9771095 break
9781096 elif '401' in error_str or 'unauthorized' in error_str.lower():
9791097 logger.error("Authentication error, re-raising exception")
9801098 raise
9811099 else:
9821100 # For other errors, try to continue with what we have
983983- logger.warning("Continuing with notifications fetched so far")
11011101+ logger.warning(
11021102+ "Continuing with notifications fetched so far")
9841103 break
98511049861105 # Queue all unread notifications (except likes)
···99311129941113 # Mark all notifications as seen immediately after queuing (unless in testing mode)
9951114 if testing_mode:
996996- logger.info("🧪 TESTING MODE: Skipping marking notifications as seen")
11151115+ logger.info(
11161116+ "🧪 TESTING MODE: Skipping marking notifications as seen")
9971117 else:
9981118 if new_count > 0:
999999- atproto_client.app.bsky.notification.update_seen({'seen_at': last_seen_at})
10001000- logger.info(f"Queued {new_count} new notifications and marked as seen")
11191119+ atproto_client.app.bsky.notification.update_seen(
11201120+ {'seen_at': last_seen_at})
11211121+ logger.info(
11221122+ f"Queued {new_count} new notifications and marked as seen")
10011123 else:
10021124 logger.debug("No new notifications to queue")
1003112510041126 # Now process the entire queue (old + new notifications)
10051005- load_and_process_queued_notifications(void_agent, atproto_client, testing_mode)
11271127+ load_and_process_queued_notifications(
11281128+ void_agent, atproto_client, testing_mode)
1006112910071130 except Exception as e:
10081131 logger.error(f"Error processing notifications: {e}")
···1010113310111134def main():
10121135 # Parse command line arguments
10131013- parser = argparse.ArgumentParser(description='Void Bot - Bluesky autonomous agent')
10141014- parser.add_argument('--test', action='store_true', help='Run in testing mode (no messages sent, queue files preserved)')
10151015- parser.add_argument('--no-git', action='store_true', help='Skip git operations when exporting agent state')
11361136+ parser = argparse.ArgumentParser(
11371137+ description='Void Bot - Bluesky autonomous agent')
11381138+ parser.add_argument('--test', action='store_true',
11391139+ help='Run in testing mode (no messages sent, queue files preserved)')
11401140+ parser.add_argument('--no-git', action='store_true',
11411141+ help='Skip git operations when exporting agent state')
10161142 args = parser.parse_args()
10171017-11431143+10181144 global TESTING_MODE
10191145 TESTING_MODE = args.test
10201020-11461146+10211147 # Store no-git flag globally for use in export_agent_state calls
10221148 global SKIP_GIT
10231149 SKIP_GIT = args.no_git
10241024-11501150+10251151 if TESTING_MODE:
10261152 logger.info("🧪 === RUNNING IN TESTING MODE ===")
10271153 logger.info(" - No messages will be sent to Bluesky")
···10341160 logger.info("=== STARTING VOID BOT ===")
10351161 void_agent = initialize_void()
10361162 logger.info(f"Void agent initialized: {void_agent.id}")
10371037-11631163+10381164 # Check if agent has required tools
10391165 if hasattr(void_agent, 'tools') and void_agent.tools:
10401166 tool_names = [tool.name for tool in void_agent.tools]
10411167 # Check for bluesky-related tools
10421042- bluesky_tools = [name for name in tool_names if 'bluesky' in name.lower() or 'reply' in name.lower()]
11681168+ bluesky_tools = [name for name in tool_names if 'bluesky' in name.lower(
11691169+ ) or 'reply' in name.lower()]
10431170 if not bluesky_tools:
10441044- logger.warning("No Bluesky-related tools found! Agent may not be able to reply.")
11711171+ logger.warning(
11721172+ "No Bluesky-related tools found! Agent may not be able to reply.")
10451173 else:
10461174 logger.warning("Agent has no tools registered!")
1047117510481176 # Initialize Bluesky client
11771177+ logger.debug("Connecting to Bluesky")
10491178 atproto_client = bsky_utils.default_login()
10501179 logger.info("Connected to Bluesky")
1051118010521181 # Main loop
10531053- logger.info(f"Starting notification monitoring, checking every {FETCH_NOTIFICATIONS_DELAY_SEC} seconds")
11821182+ logger.info(
11831183+ f"Starting notification monitoring, checking every {FETCH_NOTIFICATIONS_DELAY_SEC} seconds")
1054118410551185 cycle_count = 0
10561186 while True:
···10601190 # Log cycle completion with stats
10611191 elapsed_time = time.time() - start_time
10621192 total_messages = sum(message_counters.values())
10631063- messages_per_minute = (total_messages / elapsed_time * 60) if elapsed_time > 0 else 0
10641064-11931193+ messages_per_minute = (
11941194+ total_messages / elapsed_time * 60) if elapsed_time > 0 else 0
11951195+10651196 if total_messages > 0:
10661066- logger.info(f"Cycle {cycle_count} complete. Session totals: {total_messages} messages ({message_counters['mentions']} mentions, {message_counters['replies']} replies) | {messages_per_minute:.1f} msg/min")
11971197+ logger.info(
11981198+ f"Cycle {cycle_count} complete. Session totals: {total_messages} messages ({message_counters['mentions']} mentions, {message_counters['replies']} replies) | {messages_per_minute:.1f} msg/min")
10671199 sleep(FETCH_NOTIFICATIONS_DELAY_SEC)
1068120010691201 except KeyboardInterrupt:
10701202 # Final stats
10711203 elapsed_time = time.time() - start_time
10721204 total_messages = sum(message_counters.values())
10731073- messages_per_minute = (total_messages / elapsed_time * 60) if elapsed_time > 0 else 0
10741074-12051205+ messages_per_minute = (
12061206+ total_messages / elapsed_time * 60) if elapsed_time > 0 else 0
12071207+10751208 logger.info("=== BOT STOPPED BY USER ===")
10761076- logger.info(f"📊 Final session stats: {total_messages} total messages processed in {elapsed_time/60:.1f} minutes")
12091209+ logger.info(
12101210+ f"📊 Final session stats: {total_messages} total messages processed in {elapsed_time/60:.1f} minutes")
10771211 logger.info(f" - {message_counters['mentions']} mentions")
10781212 logger.info(f" - {message_counters['replies']} replies")
10791213 logger.info(f" - {message_counters['follows']} follows")
10801080- logger.info(f" - {message_counters['reposts_skipped']} reposts skipped")
10811081- logger.info(f" - Average rate: {messages_per_minute:.1f} messages/minute")
12141214+ logger.info(
12151215+ f" - {message_counters['reposts_skipped']} reposts skipped")
12161216+ logger.info(
12171217+ f" - Average rate: {messages_per_minute:.1f} messages/minute")
10821218 break
10831219 except Exception as e:
10841220 logger.error(f"=== ERROR IN MAIN LOOP CYCLE {cycle_count} ===")
10851221 logger.error(f"Error details: {e}")
10861222 # Wait a bit longer on errors
10871087- logger.info(f"Sleeping for {FETCH_NOTIFICATIONS_DELAY_SEC * 2} seconds due to error...")
12231223+ logger.info(
12241224+ f"Sleeping for {FETCH_NOTIFICATIONS_DELAY_SEC * 2} seconds due to error...")
10881225 sleep(FETCH_NOTIFICATIONS_DELAY_SEC * 2)
1089122610901227
+79-47
bsky_utils.py
···11+import json
22+import yaml
33+import dotenv
14import os
25import logging
36from typing import Optional, Dict, Any, List
···1013logger = logging.getLogger("bluesky_session_handler")
11141215# Load the environment variables
1313-import dotenv
1416dotenv.load_dotenv(override=True)
15171616-import yaml
1717-import json
18181919# Strip fields. A list of fields to remove from a JSON object
2020STRIP_FIELDS = [
···6363 "mime_type",
6464 "size",
6565]
6666+6767+6668def convert_to_basic_types(obj):
6769 """Convert complex Python objects to basic types for JSON/YAML serialization."""
6870 if hasattr(obj, '__dict__'):
···117119def flatten_thread_structure(thread_data):
118120 """
119121 Flatten a nested thread structure into a list while preserving all data.
120120-122122+121123 Args:
122124 thread_data: The thread data from get_post_thread
123123-125125+124126 Returns:
125127 Dict with 'posts' key containing a list of posts in chronological order
126128 """
127129 posts = []
128128-130130+129131 def traverse_thread(node):
130132 """Recursively traverse the thread structure to collect posts."""
131133 if not node:
132134 return
133133-135135+134136 # If this node has a parent, traverse it first (to maintain chronological order)
135137 if hasattr(node, 'parent') and node.parent:
136138 traverse_thread(node.parent)
137137-139139+138140 # Then add this node's post
139141 if hasattr(node, 'post') and node.post:
140142 # Convert to dict if needed to ensure we can process it
···144146 post_dict = node.post.copy()
145147 else:
146148 post_dict = {}
147147-149149+148150 posts.append(post_dict)
149149-151151+150152 # Handle the thread structure
151153 if hasattr(thread_data, 'thread'):
152154 # Start from the main thread node
153155 traverse_thread(thread_data.thread)
154156 elif hasattr(thread_data, '__dict__') and 'thread' in thread_data.__dict__:
155157 traverse_thread(thread_data.__dict__['thread'])
156156-158158+157159 # Return a simple structure with posts list
158160 return {'posts': posts}
159161···171173 """
172174 # First flatten the thread structure to avoid deep nesting
173175 flattened = flatten_thread_structure(thread)
174174-176176+175177 # Convert complex objects to basic types
176178 basic_thread = convert_to_basic_types(flattened)
177179···184186 return yaml.dump(cleaned_thread, indent=2, allow_unicode=True, default_flow_style=False)
185187186188187187-188188-189189-190190-191191-192189def get_session(username: str) -> Optional[str]:
193190 try:
194191 with open(f"session_{username}.txt", encoding="UTF-8") as f:
···196193 except FileNotFoundError:
197194 logger.debug(f"No existing session found for {username}")
198195 return None
196196+199197200198def save_session(username: str, session_string: str) -> None:
201199 with open(f"session_{username}.txt", "w", encoding="UTF-8") as f:
202200 f.write(session_string)
203201 logger.debug(f"Session saved for {username}")
204202203203+205204def on_session_change(username: str, event: SessionEvent, session: Session) -> None:
206205 logger.debug(f"Session changed: {event} {repr(session)}")
207206 if event in (SessionEvent.CREATE, SessionEvent.REFRESH):
208207 logger.debug(f"Saving changed session for {username}")
209208 save_session(username, session.export())
209209+210210211211def init_client(username: str, password: str, pds_uri: str = "https://bsky.social") -> Client:
212212 if pds_uri is None:
···243243 password = config['password']
244244 pds_uri = config['pds_uri']
245245 except (ImportError, FileNotFoundError, KeyError) as e:
246246- logger.warning(f"Could not load from config file ({e}), falling back to environment variables")
246246+ logger.warning(
247247+ f"Could not load from config file ({e}), falling back to environment variables")
247248 username = os.getenv("BSKY_USERNAME")
248249 password = os.getenv("BSKY_PASSWORD")
249250 pds_uri = os.getenv("PDS_URI", "https://bsky.social")
···262263263264 return init_client(username, password, pds_uri)
264265266266+265267def remove_outside_quotes(text: str) -> str:
266268 """
267269 Remove outside double quotes from response text.
268268-270270+269271 Only handles double quotes to avoid interfering with contractions:
270272 - Double quotes: "text" → text
271273 - Preserves single quotes and internal quotes
272272-274274+273275 Args:
274276 text: The text to process
275275-277277+276278 Returns:
277279 Text with outside double quotes removed
278280 """
279281 if not text or len(text) < 2:
280282 return text
281281-283283+282284 text = text.strip()
283283-285285+284286 # Only remove double quotes from start and end
285287 if text.startswith('"') and text.endswith('"'):
286288 return text[1:-1]
287287-289289+288290 return text
291291+289292290293def reply_to_post(client: Client, text: str, reply_to_uri: str, reply_to_cid: str, root_uri: Optional[str] = None, root_cid: Optional[str] = None, lang: Optional[str] = None) -> Dict[str, Any]:
291294 """
···304307 The response from sending the post
305308 """
306309 import re
307307-310310+308311 # If root is not provided, this is a reply to the root post
309312 if root_uri is None:
310313 root_uri = reply_to_uri
311314 root_cid = reply_to_cid
312315313316 # Create references for the reply
314314- parent_ref = models.create_strong_ref(models.ComAtprotoRepoStrongRef.Main(uri=reply_to_uri, cid=reply_to_cid))
315315- root_ref = models.create_strong_ref(models.ComAtprotoRepoStrongRef.Main(uri=root_uri, cid=root_cid))
317317+ parent_ref = models.create_strong_ref(
318318+ models.ComAtprotoRepoStrongRef.Main(uri=reply_to_uri, cid=reply_to_cid))
319319+ root_ref = models.create_strong_ref(
320320+ models.ComAtprotoRepoStrongRef.Main(uri=root_uri, cid=root_cid))
316321317322 # Parse rich text facets (mentions and URLs)
318323 facets = []
319324 text_bytes = text.encode("UTF-8")
320320-325325+321326 # Parse mentions - fixed to handle @ at start of text
322327 mention_regex = rb"(?:^|[$|\W])(@([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)"
323323-328328+324329 for m in re.finditer(mention_regex, text_bytes):
325330 handle = m.group(1)[1:].decode("UTF-8") # Remove @ prefix
326331 # Adjust byte positions to account for the optional prefix
···336341 byteStart=mention_start,
337342 byteEnd=mention_end
338343 ),
339339- features=[models.AppBskyRichtextFacet.Mention(did=resolve_resp.did)]
344344+ features=[models.AppBskyRichtextFacet.Mention(
345345+ did=resolve_resp.did)]
340346 )
341347 )
342348 except Exception as e:
343343- logger.debug(f"Failed to resolve handle {handle}: {e}")
349349+ # Handle specific error cases
350350+ error_str = str(e)
351351+ if 'Could not find user info' in error_str or 'InvalidRequest' in error_str:
352352+ logger.warning(
353353+ f"User @{handle} not found (account may be deleted/suspended), skipping mention facet")
354354+ elif 'BadRequestError' in error_str:
355355+ logger.warning(
356356+ f"Bad request when resolving @{handle}, skipping mention facet: {e}")
357357+ else:
358358+ logger.debug(f"Failed to resolve handle @{handle}: {e}")
344359 continue
345345-360360+346361 # Parse URLs - fixed to handle URLs at start of text
347362 url_regex = rb"(?:^|[$|\W])(https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*[-a-zA-Z0-9@%_\+~#//=])?)"
348348-363363+349364 for m in re.finditer(url_regex, text_bytes):
350365 url = m.group(1).decode("UTF-8")
351366 # Adjust byte positions to account for the optional prefix
···365380 if facets:
366381 response = client.send_post(
367382 text=text,
368368- reply_to=models.AppBskyFeedPost.ReplyRef(parent=parent_ref, root=root_ref),
383383+ reply_to=models.AppBskyFeedPost.ReplyRef(
384384+ parent=parent_ref, root=root_ref),
369385 facets=facets,
370386 langs=[lang] if lang else None
371387 )
372388 else:
373389 response = client.send_post(
374390 text=text,
375375- reply_to=models.AppBskyFeedPost.ReplyRef(parent=parent_ref, root=root_ref),
391391+ reply_to=models.AppBskyFeedPost.ReplyRef(
392392+ parent=parent_ref, root=root_ref),
376393 langs=[lang] if lang else None
377394 )
378395···392409 The thread data or None if not found
393410 """
394411 try:
395395- thread = client.app.bsky.feed.get_post_thread({'uri': uri, 'parent_height': 60, 'depth': 10})
412412+ thread = client.app.bsky.feed.get_post_thread(
413413+ {'uri': uri, 'parent_height': 60, 'depth': 10})
396414 return thread
397415 except Exception as e:
398398- logger.error(f"Error fetching post thread: {e}")
416416+ error_str = str(e)
417417+ # Handle specific error cases more gracefully
418418+ if 'Could not find user info' in error_str or 'InvalidRequest' in error_str:
419419+ logger.warning(
420420+ f"User account not found for post URI {uri} (account may be deleted/suspended)")
421421+ elif 'NotFound' in error_str or 'Post not found' in error_str:
422422+ logger.warning(f"Post not found for URI {uri}")
423423+ elif 'BadRequestError' in error_str:
424424+ logger.warning(f"Bad request error for URI {uri}: {e}")
425425+ else:
426426+ logger.error(f"Error fetching post thread: {e}")
399427 return None
400428401429···492520 logger.error("Reply messages list cannot be empty")
493521 return None
494522 if len(reply_messages) > 15:
495495- logger.error(f"Cannot send more than 15 reply messages (got {len(reply_messages)})")
523523+ logger.error(
524524+ f"Cannot send more than 15 reply messages (got {len(reply_messages)})")
496525 return None
497497-526526+498527 # Get the post URI and CID from the notification (handle both dict and object)
499528 if isinstance(notification, dict):
500529 post_uri = notification.get('uri')
···512541513542 # Get the thread to find the root post
514543 thread_data = get_post_thread(client, post_uri)
515515-544544+516545 root_uri = post_uri
517546 root_cid = post_cid
518547···532561 responses = []
533562 current_parent_uri = post_uri
534563 current_parent_cid = post_cid
535535-564564+536565 for i, message in enumerate(reply_messages):
537537- logger.info(f"Sending reply {i+1}/{len(reply_messages)}: {message[:50]}...")
538538-566566+ logger.info(
567567+ f"Sending reply {i+1}/{len(reply_messages)}: {message[:50]}...")
568568+539569 # Send this reply
540570 response = reply_to_post(
541571 client=client,
···546576 root_cid=root_cid,
547577 lang=lang
548578 )
549549-579579+550580 if not response:
551551- logger.error(f"Failed to send reply {i+1}, posting system failure message")
581581+ logger.error(
582582+ f"Failed to send reply {i+1}, posting system failure message")
552583 # Try to post a system failure message
553584 failure_response = reply_to_post(
554585 client=client,
···564595 current_parent_uri = failure_response.uri
565596 current_parent_cid = failure_response.cid
566597 else:
567567- logger.error("Could not even send system failure message, stopping thread")
598598+ logger.error(
599599+ "Could not even send system failure message, stopping thread")
568600 return responses if responses else None
569601 else:
570602 responses.append(response)
···572604 if i < len(reply_messages) - 1: # Not the last message
573605 current_parent_uri = response.uri
574606 current_parent_cid = response.cid
575575-607607+576608 logger.info(f"Successfully sent {len(responses)} threaded replies")
577609 return responses
578610
+16-8
register_tools.py
···44import sys
55import logging
66from typing import List
77-from dotenv import load_dotenv
87from letta_client import Letta
98from rich.console import Console
109from rich.table import Table
1010+from config_loader import get_config, get_letta_config, get_agent_config
11111212# Import standalone functions and their schemas
1313from tools.search import search_bluesky_posts, SearchArgs
···1818from tools.thread import add_post_to_bluesky_reply_thread, ReplyThreadPostArgs
1919from tools.ignore import ignore_notification, IgnoreNotificationArgs
20202121-load_dotenv()
2121+config = get_config()
2222+letta_config = get_letta_config()
2323+agent_config = get_agent_config()
2224logging.basicConfig(level=logging.INFO)
2325logger = logging.getLogger(__name__)
2426console = Console()
···101103]
102104103105104104-def register_tools(agent_name: str = "void", tools: List[str] = None):
106106+def register_tools(agent_name: str = None, tools: List[str] = None):
105107 """Register tools with a Letta agent.
106108107109 Args:
108108- agent_name: Name of the agent to attach tools to
110110+ agent_name: Name of the agent to attach tools to. If None, uses config default.
109111 tools: List of tool names to register. If None, registers all tools.
110112 """
113113+ # Use agent name from config if not provided
114114+ if agent_name is None:
115115+ agent_name = agent_config['name']
116116+111117 try:
112112- # Initialize Letta client with API key
113113- client = Letta(token=os.environ["LETTA_API_KEY"])
118118+ # Initialize Letta client with API key from config
119119+ client = Letta(token=letta_config['api_key'])
114120115121 # Find the agent
116122 agents = client.agents.list()
···201207 import argparse
202208203209 parser = argparse.ArgumentParser(description="Register Void tools with a Letta agent")
204204- parser.add_argument("agent", nargs="?", default="void", help="Agent name (default: void)")
210210+ parser.add_argument("agent", nargs="?", default=None, help=f"Agent name (default: {agent_config['name']})")
205211 parser.add_argument("--tools", nargs="+", help="Specific tools to register (default: all)")
206212 parser.add_argument("--list", action="store_true", help="List available tools")
207213···210216 if args.list:
211217 list_available_tools()
212218 else:
213213- console.print(f"\n[bold]Registering tools for agent: {args.agent}[/bold]\n")
219219+ # Use config default if no agent specified
220220+ agent_name = args.agent if args.agent is not None else agent_config['name']
221221+ console.print(f"\n[bold]Registering tools for agent: {agent_name}[/bold]\n")
214222 register_tools(args.agent, args.tools)
+23
requirements.txt
···11+# Core dependencies for Void Bot
22+33+# Configuration and utilities
44+PyYAML>=6.0.2
55+rich>=14.0.0
66+python-dotenv>=1.0.0
77+88+# Letta API client
99+letta-client>=0.1.198
1010+1111+# AT Protocol (Bluesky) client
1212+atproto>=0.0.54
1313+1414+# HTTP client for API calls
1515+httpx>=0.28.1
1616+httpx-sse>=0.4.0
1717+requests>=2.31.0
1818+1919+# Data validation
2020+pydantic>=2.11.7
2121+2222+# Async support
2323+anyio>=4.9.0