# Rustagent Configuration # Copy this file to rustagent.toml and fill in your API keys # Default LLM configuration (fallback for all modes) [llm] provider = "anthropic" # Options: "anthropic", "openai", "ollama" model = "claude-sonnet-4-20250514" max_tokens = 8192 # Provider-specific configuration [anthropic] api_key = "${ANTHROPIC_API_KEY}" [openai] api_key = "${OPENAI_API_KEY}" [ollama] base_url = "http://localhost:11434" # Optional: Override LLM for planning mode [planning.llm] provider = "anthropic" model = "claude-opus-4-20250514" max_tokens = 16384 # Optional: Override LLM for ralph mode [ralph.llm] provider = "anthropic" model = "claude-sonnet-4-20250514" max_tokens = 4096 # Agent configuration [rustagent] spec_dir = "specs" max_iterations = 100 # Set to null for unlimited # Security configuration [security] shell_policy = "allowlist" # Options: "allowlist", "blocklist", "unrestricted" allowed_commands = ["git", "cargo", "bun", "ls", "cat", "grep", "find", "echo", "pwd", "mkdir", "touch"] blocked_patterns = ["npm"] max_file_size_mb = 10 allowed_paths = ["."]