a digital person for bluesky

Slim down verbose logging for cleaner default output

- Change default log level from DEBUG to INFO for void_bot logger
- Remove redundant startup messages and tool listings
- Reduce main loop cycle noise (only show stats when messages processed)
- Remove excessive notification processing status messages
- Move session-related logs to DEBUG level in bsky_utils.py
- Clean up user block attachment/detachment logs in tools/blocks.py

Keeps queue processing logs and important status messages while
eliminating noise from normal operation.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>

Changed files
+14 -31
tools
+6 -23
bsky.py
··· 42 42 43 43 # Configure logging 44 44 logging.basicConfig( 45 - level=logging.DEBUG, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" 45 + level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" 46 46 ) 47 47 logger = logging.getLogger("void_bot") 48 - logger.setLevel(logging.DEBUG) 48 + logger.setLevel(logging.INFO) 49 49 50 50 # Create a separate logger for prompts (set to WARNING to hide by default) 51 51 prompt_logger = logging.getLogger("void_bot.prompts") ··· 584 584 585 585 def load_and_process_queued_notifications(void_agent, atproto_client): 586 586 """Load and process all notifications from the queue in priority order.""" 587 - logger.info("Loading queued notifications from disk...") 588 587 try: 589 588 # Get all JSON files in queue directory (excluding processed_notifications.json) 590 589 # Files are sorted by name, which puts priority files first (0_ prefix before 1_ prefix) 591 590 queue_files = sorted([f for f in QUEUE_DIR.glob("*.json") if f.name != "processed_notifications.json"]) 592 591 593 592 if not queue_files: 594 - logger.info("No queued notifications found") 595 593 return 596 594 597 595 logger.info(f"Processing {len(queue_files)} queued notifications") ··· 611 609 notif_data = json.load(f) 612 610 613 611 # Process based on type using dict data directly 614 - logger.info(f"Processing {notif_data['reason']} from @{notif_data['author']['handle']}") 615 612 success = False 616 613 if notif_data['reason'] == "mention": 617 614 success = process_mention(void_agent, atproto_client, notif_data) ··· 634 631 if success: 635 632 message_counters['follows'] += 1 636 633 elif notif_data['reason'] == "repost": 637 - logger.info(f"Skipping repost notification from @{notif_data['author']['handle']}") 634 + # Skip reposts silently 638 635 success = True # Skip reposts but mark as successful to remove from queue 639 636 if success: 640 637 message_counters['reposts_skipped'] += 1 ··· 675 672 676 673 def process_notifications(void_agent, atproto_client): 677 674 """Fetch new notifications, queue them, and process the queue.""" 678 - logger.info("Starting notification processing cycle...") 679 675 try: 680 676 # First, process any existing queued notifications 681 - logger.info("Processing existing queued notifications...") 682 677 load_and_process_queued_notifications(void_agent, atproto_client) 683 678 684 679 # Get current time for marking notifications as seen ··· 760 755 logger.debug("No new notifications to queue") 761 756 762 757 # Process the queue (including any newly added notifications) 763 - logger.info("Processing notification queue after fetching...") 764 758 load_and_process_queued_notifications(void_agent, atproto_client) 765 759 766 760 except Exception as e: ··· 772 766 global start_time 773 767 start_time = time.time() 774 768 logger.info("=== STARTING VOID BOT ===") 775 - logger.info("Initializing Void bot...") 776 - 777 - # Initialize the Letta agent 778 - logger.info("Calling initialize_void()...") 779 769 void_agent = initialize_void() 780 770 logger.info(f"Void agent initialized: {void_agent.id}") 781 771 782 772 # Check if agent has required tools 783 773 if hasattr(void_agent, 'tools') and void_agent.tools: 784 774 tool_names = [tool.name for tool in void_agent.tools] 785 - logger.info(f"Agent has tools: {tool_names}") 786 - 787 775 # Check for bluesky-related tools 788 776 bluesky_tools = [name for name in tool_names if 'bluesky' in name.lower() or 'reply' in name.lower()] 789 - if bluesky_tools: 790 - logger.info(f"Found Bluesky-related tools: {bluesky_tools}") 791 - else: 777 + if not bluesky_tools: 792 778 logger.warning("No Bluesky-related tools found! Agent may not be able to reply.") 793 779 else: 794 780 logger.warning("Agent has no tools registered!") 795 781 796 782 # Initialize Bluesky client 797 - logger.info("Connecting to Bluesky...") 798 783 atproto_client = bsky_utils.default_login() 799 784 logger.info("Connected to Bluesky") 800 785 801 786 # Main loop 802 - logger.info(f"=== ENTERING MAIN LOOP ===") 803 787 logger.info(f"Starting notification monitoring, checking every {FETCH_NOTIFICATIONS_DELAY_SEC} seconds") 804 788 805 789 cycle_count = 0 806 790 while True: 807 791 try: 808 792 cycle_count += 1 809 - logger.info(f"=== MAIN LOOP CYCLE {cycle_count} ===") 810 793 process_notifications(void_agent, atproto_client) 811 794 # Log cycle completion with stats 812 795 elapsed_time = time.time() - start_time 813 796 total_messages = sum(message_counters.values()) 814 797 messages_per_minute = (total_messages / elapsed_time * 60) if elapsed_time > 0 else 0 815 798 816 - logger.info(f"Cycle {cycle_count} complete. Session totals: {total_messages} messages ({message_counters['mentions']} mentions, {message_counters['replies']} replies) | {messages_per_minute:.1f} msg/min") 817 - logger.info(f"Sleeping for {FETCH_NOTIFICATIONS_DELAY_SEC} seconds...") 799 + if total_messages > 0: 800 + logger.info(f"Cycle {cycle_count} complete. Session totals: {total_messages} messages ({message_counters['mentions']} mentions, {message_counters['replies']} replies) | {messages_per_minute:.1f} msg/min") 818 801 sleep(FETCH_NOTIFICATIONS_DELAY_SEC) 819 802 820 803 except KeyboardInterrupt:
+5 -5
bsky_utils.py
··· 203 203 logger.debug(f"Session saved for {username}") 204 204 205 205 def on_session_change(username: str, event: SessionEvent, session: Session) -> None: 206 - logger.info(f"Session changed: {event} {repr(session)}") 206 + logger.debug(f"Session changed: {event} {repr(session)}") 207 207 if event in (SessionEvent.CREATE, SessionEvent.REFRESH): 208 - logger.info(f"Saving changed session for {username}") 208 + logger.debug(f"Saving changed session for {username}") 209 209 save_session(username, session.export()) 210 210 211 211 def init_client(username: str, password: str) -> Client: ··· 217 217 pds_uri = "https://bsky.social" 218 218 219 219 # Print the PDS URI 220 - logger.info(f"Using PDS URI: {pds_uri}") 220 + logger.debug(f"Using PDS URI: {pds_uri}") 221 221 222 222 client = Client(pds_uri) 223 223 client.on_session_change( ··· 226 226 227 227 session_string = get_session(username) 228 228 if session_string: 229 - logger.info(f"Reusing existing session for {username}") 229 + logger.debug(f"Reusing existing session for {username}") 230 230 client.login(session_string=session_string) 231 231 else: 232 - logger.info(f"Creating new session for {username}") 232 + logger.debug(f"Creating new session for {username}") 233 233 client.login(username, password) 234 234 235 235 return client
+3 -3
tools/blocks.py
··· 55 55 blocks = client.blocks.list(label=block_label) 56 56 if blocks and len(blocks) > 0: 57 57 block = blocks[0] 58 - logger.info(f"Found existing block: {block_label}") 58 + logger.debug(f"Found existing block: {block_label}") 59 59 else: 60 60 block = client.blocks.create( 61 61 label=block_label, ··· 71 71 ) 72 72 73 73 results.append(f"✓ {handle}: Block attached") 74 - logger.info(f"Successfully attached block {block_label} to agent") 74 + logger.debug(f"Successfully attached block {block_label} to agent") 75 75 76 76 except Exception as e: 77 77 results.append(f"✗ {handle}: Error - {str(e)}") ··· 126 126 block_id=block_label_to_id[block_label] 127 127 ) 128 128 results.append(f"✓ {handle}: Detached") 129 - logger.info(f"Successfully detached block {block_label} from agent") 129 + logger.debug(f"Successfully detached block {block_label} from agent") 130 130 except Exception as e: 131 131 results.append(f"✗ {handle}: Error during detachment - {str(e)}") 132 132 logger.error(f"Error detaching block {block_label}: {e}")