An AI agent built to do Ralph loops - plan mode for planning and ralph mode for implementing.
1use async_trait::async_trait;
2use serde::{Deserialize, Serialize};
3use serde_json::Value;
4
5/// Role in a conversation
6#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
7#[serde(rename_all = "lowercase")]
8pub enum Role {
9 User,
10 Assistant,
11 System,
12 Tool,
13}
14
15/// A message in the conversation
16#[derive(Debug, Clone, Serialize, Deserialize)]
17pub struct Message {
18 pub role: Role,
19 pub content: String,
20 #[serde(skip_serializing_if = "Option::is_none")]
21 pub tool_call_id: Option<String>,
22}
23
24impl Message {
25 pub fn user(content: impl Into<String>) -> Self {
26 Self {
27 role: Role::User,
28 content: content.into(),
29 tool_call_id: None,
30 }
31 }
32
33 pub fn assistant(content: impl Into<String>) -> Self {
34 Self {
35 role: Role::Assistant,
36 content: content.into(),
37 tool_call_id: None,
38 }
39 }
40
41 pub fn system(content: impl Into<String>) -> Self {
42 Self {
43 role: Role::System,
44 content: content.into(),
45 tool_call_id: None,
46 }
47 }
48
49 pub fn tool_result(tool_call_id: impl Into<String>, content: impl Into<String>) -> Self {
50 Self {
51 role: Role::Tool,
52 content: content.into(),
53 tool_call_id: Some(tool_call_id.into()),
54 }
55 }
56}
57
58/// Definition of a tool that can be called by the LLM
59#[derive(Debug, Clone, Serialize, Deserialize)]
60pub struct ToolDefinition {
61 pub name: String,
62 pub description: String,
63 pub parameters: Value,
64}
65
66/// A tool call requested by the LLM
67#[derive(Debug, Clone, Serialize, Deserialize)]
68pub struct ToolCall {
69 pub id: String,
70 pub name: String,
71 pub parameters: Value,
72}
73
74/// Result of executing a tool
75#[derive(Debug, Clone, Serialize, Deserialize)]
76pub struct ToolResult {
77 pub success: bool,
78 pub output: String,
79}
80
81/// Content of a response from the LLM
82#[derive(Debug, Clone, Serialize, Deserialize)]
83#[serde(untagged)]
84pub enum ResponseContent {
85 Text(String),
86 ToolCalls(Vec<ToolCall>),
87}
88
89/// Response from the LLM
90#[derive(Debug, Clone, Serialize, Deserialize)]
91pub struct Response {
92 pub content: ResponseContent,
93 pub stop_reason: Option<String>,
94}
95
96/// Trait for LLM client implementations
97#[async_trait]
98pub trait LlmClient: Send + Sync {
99 async fn chat(
100 &self,
101 messages: Vec<Message>,
102 tools: &[ToolDefinition],
103 ) -> anyhow::Result<Response>;
104}
105
106pub mod anthropic;
107pub mod error;
108pub mod factory;
109pub mod mock;
110pub mod ollama;
111pub mod openai;
112pub mod retry;