An AI agent built to do Ralph loops - plan mode for planning and ralph mode for implementing.
1use anyhow::{Context, Result};
2use serde::{Deserialize, Serialize};
3use std::path::Path;
4
5#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
6#[serde(rename_all = "lowercase")]
7pub enum LlmProvider {
8 Anthropic,
9 OpenAi,
10 Ollama,
11}
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
14pub struct LlmConfig {
15 pub provider: LlmProvider,
16 pub model: String,
17 #[serde(default = "default_max_tokens")]
18 pub max_tokens: u32,
19}
20
21fn default_max_tokens() -> u32 {
22 8192
23}
24
25#[derive(Debug, Clone, Serialize, Deserialize)]
26pub struct AnthropicConfig {
27 pub api_key: String,
28}
29
30#[derive(Debug, Clone, Serialize, Deserialize)]
31pub struct OpenAiConfig {
32 pub api_key: String,
33}
34
35#[derive(Debug, Clone, Serialize, Deserialize)]
36pub struct OllamaConfig {
37 pub base_url: String,
38}
39
40#[derive(Debug, Clone, Serialize, Deserialize)]
41pub struct RustagentConfig {
42 pub spec_dir: String,
43 pub max_iterations: Option<usize>,
44}
45
46#[derive(Debug, Clone, Serialize, Deserialize)]
47pub struct ModeConfig {
48 pub llm: LlmConfig,
49}
50
51#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
52#[serde(rename_all = "lowercase")]
53pub enum ShellPolicy {
54 Allowlist,
55 Blocklist,
56 Unrestricted,
57}
58
59#[derive(Debug, Clone, Serialize, Deserialize)]
60pub struct SecurityConfig {
61 #[serde(default = "default_shell_policy")]
62 pub shell_policy: ShellPolicy,
63
64 #[serde(default = "default_allowed_commands")]
65 pub allowed_commands: Vec<String>,
66
67 #[serde(default)]
68 pub blocked_patterns: Vec<String>,
69
70 #[serde(default = "default_max_file_size_mb")]
71 pub max_file_size_mb: u64,
72
73 #[serde(default = "default_allowed_paths")]
74 pub allowed_paths: Vec<String>,
75}
76
77fn default_shell_policy() -> ShellPolicy {
78 ShellPolicy::Allowlist
79}
80
81fn default_allowed_commands() -> Vec<String> {
82 vec![
83 "git".to_string(),
84 "cargo".to_string(),
85 "npm".to_string(),
86 "ls".to_string(),
87 "cat".to_string(),
88 "grep".to_string(),
89 "find".to_string(),
90 "echo".to_string(),
91 "pwd".to_string(),
92 "mkdir".to_string(),
93 "touch".to_string(),
94 ]
95}
96
97fn default_max_file_size_mb() -> u64 {
98 10
99}
100
101fn default_allowed_paths() -> Vec<String> {
102 vec![".".to_string()]
103}
104
105#[derive(Debug, Clone, Serialize, Deserialize)]
106pub struct Config {
107 pub llm: LlmConfig,
108 pub anthropic: Option<AnthropicConfig>,
109 pub openai: Option<OpenAiConfig>,
110 pub ollama: Option<OllamaConfig>,
111 pub planning: Option<ModeConfig>,
112 pub ralph: Option<ModeConfig>,
113 pub rustagent: RustagentConfig,
114 #[serde(default)]
115 pub security: SecurityConfig,
116}
117
118impl Config {
119 pub fn load(path: &Path) -> Result<Self> {
120 let content = std::fs::read_to_string(path).context("Failed to read config file")?;
121
122 // Expand environment variables
123 let expanded = Self::expand_env_vars(&content);
124
125 let config: Config = toml::from_str(&expanded).context("Failed to parse config file")?;
126
127 // Validate provider configuration exists
128 match config.llm.provider {
129 LlmProvider::Anthropic if config.anthropic.is_none() => {
130 anyhow::bail!("Anthropic provider selected but [anthropic] config missing")
131 }
132 LlmProvider::OpenAi if config.openai.is_none() => {
133 anyhow::bail!("OpenAI provider selected but [openai] config missing")
134 }
135 LlmProvider::Ollama if config.ollama.is_none() => {
136 anyhow::bail!("Ollama provider selected but [ollama] config missing")
137 }
138 _ => {}
139 }
140
141 Ok(config)
142 }
143
144 fn expand_env_vars(content: &str) -> String {
145 let mut result = content.to_string();
146
147 // Find all ${VAR} patterns
148 while let Some(start) = result.find("${") {
149 if let Some(end) = result[start..].find('}') {
150 let var_name = &result[start + 2..start + end];
151 let value = std::env::var(var_name).unwrap_or_default();
152 result.replace_range(start..start + end + 1, &value);
153 } else {
154 break;
155 }
156 }
157
158 result
159 }
160
161 pub fn planning_llm(&self) -> &LlmConfig {
162 self.planning.as_ref().map(|m| &m.llm).unwrap_or(&self.llm)
163 }
164
165 pub fn ralph_llm(&self) -> &LlmConfig {
166 self.ralph.as_ref().map(|m| &m.llm).unwrap_or(&self.llm)
167 }
168}
169
170impl Default for RustagentConfig {
171 fn default() -> Self {
172 Self {
173 spec_dir: "specs".to_string(),
174 max_iterations: None,
175 }
176 }
177}
178
179impl Default for SecurityConfig {
180 fn default() -> Self {
181 Self {
182 shell_policy: default_shell_policy(),
183 allowed_commands: default_allowed_commands(),
184 blocked_patterns: vec![],
185 max_file_size_mb: default_max_file_size_mb(),
186 allowed_paths: default_allowed_paths(),
187 }
188 }
189}
190
191#[cfg(test)]
192mod tests {
193 use super::*;
194 use std::env;
195
196 #[test]
197 fn test_expand_env_vars_multichar() {
198 // Set a multi-character environment variable
199 unsafe {
200 env::set_var("MY_TEST_VAR", "test_value");
201 }
202
203 let input = "api_key = \"${MY_TEST_VAR}\"";
204 let result = Config::expand_env_vars(input);
205
206 assert_eq!(result, "api_key = \"test_value\"");
207
208 // Clean up
209 unsafe {
210 env::remove_var("MY_TEST_VAR");
211 }
212 }
213
214 #[test]
215 fn test_expand_env_vars_multiple() {
216 unsafe {
217 env::set_var("VAR1", "value1");
218 env::set_var("VAR2", "value2");
219 }
220
221 let input = "key1 = \"${VAR1}\"\nkey2 = \"${VAR2}\"";
222 let result = Config::expand_env_vars(input);
223
224 assert_eq!(result, "key1 = \"value1\"\nkey2 = \"value2\"");
225
226 unsafe {
227 env::remove_var("VAR1");
228 env::remove_var("VAR2");
229 }
230 }
231
232 #[test]
233 fn test_expand_env_vars_missing() {
234 let input = "api_key = \"${NONEXISTENT_VAR}\"";
235 let result = Config::expand_env_vars(input);
236
237 // Should expand to empty string when var doesn't exist
238 assert_eq!(result, "api_key = \"\"");
239 }
240
241 #[test]
242 fn test_provider_validation_anthropic_missing() {
243 use std::io::Write;
244 use tempfile::NamedTempFile;
245
246 let mut file = NamedTempFile::new().unwrap();
247 writeln!(
248 file,
249 r#"
250[llm]
251provider = "anthropic"
252model = "claude-3-5-sonnet-20241022"
253
254[rustagent]
255spec_dir = "specs"
256 "#
257 )
258 .unwrap();
259
260 let result = Config::load(file.path());
261 assert!(result.is_err());
262 assert!(
263 result
264 .unwrap_err()
265 .to_string()
266 .contains("Anthropic provider selected but [anthropic] config missing")
267 );
268 }
269
270 #[test]
271 fn test_provider_validation_openai_missing() {
272 use std::io::Write;
273 use tempfile::NamedTempFile;
274
275 let mut file = NamedTempFile::new().unwrap();
276 writeln!(
277 file,
278 r#"
279[llm]
280provider = "openai"
281model = "gpt-4"
282
283[rustagent]
284spec_dir = "specs"
285 "#
286 )
287 .unwrap();
288
289 let result = Config::load(file.path());
290 assert!(result.is_err());
291 assert!(
292 result
293 .unwrap_err()
294 .to_string()
295 .contains("OpenAI provider selected but [openai] config missing")
296 );
297 }
298
299 #[test]
300 fn test_provider_validation_ollama_missing() {
301 use std::io::Write;
302 use tempfile::NamedTempFile;
303
304 let mut file = NamedTempFile::new().unwrap();
305 writeln!(
306 file,
307 r#"
308[llm]
309provider = "ollama"
310model = "llama2"
311
312[rustagent]
313spec_dir = "specs"
314 "#
315 )
316 .unwrap();
317
318 let result = Config::load(file.path());
319 assert!(result.is_err());
320 assert!(
321 result
322 .unwrap_err()
323 .to_string()
324 .contains("Ollama provider selected but [ollama] config missing")
325 );
326 }
327
328 #[test]
329 fn test_provider_validation_success() {
330 use std::io::Write;
331 use tempfile::NamedTempFile;
332
333 unsafe {
334 env::set_var("TEST_API_KEY", "test_key");
335 }
336
337 let mut file = NamedTempFile::new().unwrap();
338 writeln!(
339 file,
340 r#"
341[llm]
342provider = "anthropic"
343model = "claude-3-5-sonnet-20241022"
344
345[anthropic]
346api_key = "${{TEST_API_KEY}}"
347
348[rustagent]
349spec_dir = "specs"
350 "#
351 )
352 .unwrap();
353
354 let result = Config::load(file.path());
355 assert!(result.is_ok());
356
357 let config = result.unwrap();
358 assert_eq!(config.llm.provider, LlmProvider::Anthropic);
359 assert_eq!(config.anthropic.unwrap().api_key, "test_key");
360
361 unsafe {
362 env::remove_var("TEST_API_KEY");
363 }
364 }
365}