An AI agent built to do Ralph loops - plan mode for planning and ralph mode for implementing.
at main 70 lines 2.0 kB view raw
1use super::{LlmClient, Message, Response, ResponseContent, ToolCall, ToolDefinition}; 2use async_trait::async_trait; 3use std::collections::VecDeque; 4use std::sync::{Arc, Mutex}; 5 6type RecordedCalls = Vec<(Vec<Message>, Vec<ToolDefinition>)>; 7 8pub struct MockLlmClient { 9 responses: Arc<Mutex<VecDeque<Response>>>, 10 recorded_calls: Arc<Mutex<RecordedCalls>>, 11} 12 13impl MockLlmClient { 14 pub fn new() -> Self { 15 Self { 16 responses: Arc::new(Mutex::new(VecDeque::new())), 17 recorded_calls: Arc::new(Mutex::new(Vec::new())), 18 } 19 } 20 21 pub fn queue_text_response(&self, text: &str) { 22 let response = Response { 23 content: ResponseContent::Text(text.to_string()), 24 stop_reason: Some("end_turn".to_string()), 25 }; 26 self.responses.lock().unwrap().push_back(response); 27 } 28 29 pub fn queue_tool_call(&self, name: &str, params: serde_json::Value) { 30 let response = Response { 31 content: ResponseContent::ToolCalls(vec![ToolCall { 32 id: format!("call_{}", uuid::Uuid::new_v4()), 33 name: name.to_string(), 34 parameters: params, 35 }]), 36 stop_reason: Some("tool_use".to_string()), 37 }; 38 self.responses.lock().unwrap().push_back(response); 39 } 40 41 pub fn get_recorded_calls(&self) -> Vec<(Vec<Message>, Vec<ToolDefinition>)> { 42 self.recorded_calls.lock().unwrap().clone() 43 } 44} 45 46impl Default for MockLlmClient { 47 fn default() -> Self { 48 Self::new() 49 } 50} 51 52#[async_trait] 53impl LlmClient for MockLlmClient { 54 async fn chat( 55 &self, 56 messages: Vec<Message>, 57 tools: &[ToolDefinition], 58 ) -> anyhow::Result<Response> { 59 self.recorded_calls 60 .lock() 61 .unwrap() 62 .push((messages, tools.to_vec())); 63 64 self.responses 65 .lock() 66 .unwrap() 67 .pop_front() 68 .ok_or_else(|| anyhow::anyhow!("No more mock responses queued")) 69 } 70}