Monorepo for Aesthetic.Computer
aesthetic.computer
1#!/usr/bin/env fish
2# 🧠 Ant Brain — middleware for LLM calls
3# Abstracts the LLM provider so ants can use any backend.
4#
5# Usage: fish brain.fish --provider <provider> --model <model> --system <text> --prompt <text>
6#
7# Providers:
8# gh-models — GitHub Models via `gh models run` (free with Copilot)
9# claude-code — Claude Code CLI via `claude --print` (needs auth)
10# openai — OpenAI API via curl (needs OPENAI_API_KEY)
11# ollama — Local Ollama via curl (needs ollama running)
12# custom — Custom curl endpoint (needs ANT_API_URL, ANT_API_KEY)
13#
14# Output: prints the LLM response to stdout. Exit code 0 = success, 1 = error.
15#
16# The brain does NOT have tools/agency. It receives context and returns text.
17# The colony script is responsible for acting on the response.
18
19set -g BRAIN_DIR (realpath (dirname (status filename)))
20
21# Defaults
22set -g PROVIDER "gh-models"
23set -g MODEL ""
24set -g SYSTEM_PROMPT ""
25set -g USER_PROMPT ""
26set -g MAX_TOKENS 4096
27
28# Parse args
29set -l i 1
30while test $i -le (count $argv)
31 switch $argv[$i]
32 case --provider
33 set i (math $i + 1)
34 set PROVIDER $argv[$i]
35 case --model
36 set i (math $i + 1)
37 set MODEL $argv[$i]
38 case --system
39 set i (math $i + 1)
40 set SYSTEM_PROMPT $argv[$i]
41 case --prompt
42 set i (math $i + 1)
43 set USER_PROMPT $argv[$i]
44 case --max-tokens
45 set i (math $i + 1)
46 set MAX_TOKENS $argv[$i]
47 end
48 set i (math $i + 1)
49end
50
51# Default models per provider
52if test -z "$MODEL"
53 switch $PROVIDER
54 case gh-models
55 set MODEL "openai/gpt-4o-mini"
56 case claude-code
57 set MODEL "sonnet"
58 case openai
59 set MODEL "gpt-4o-mini"
60 case ollama
61 set MODEL "llama3.2"
62 case custom
63 set MODEL "default"
64 end
65end
66
67function brain_gh_models
68 # GitHub Models via `gh models run`
69 # System prompt goes via --system-prompt, user prompt is the positional arg
70 if test -n "$SYSTEM_PROMPT"
71 gh models run $MODEL "$USER_PROMPT" \
72 --system-prompt "$SYSTEM_PROMPT" \
73 --max-tokens "$MAX_TOKENS" 2>&1
74 else
75 gh models run $MODEL "$USER_PROMPT" \
76 --max-tokens "$MAX_TOKENS" 2>&1
77 end
78 return $status
79end
80
81function brain_claude_code
82 # Claude Code in headless mode (agentic — has tools)
83 set -l _saved_key "$ANTHROPIC_API_KEY"
84 set -e ANTHROPIC_API_KEY
85
86 if test -n "$SYSTEM_PROMPT"
87 claude --print \
88 --model $MODEL \
89 --system-prompt "$SYSTEM_PROMPT" \
90 --dangerously-skip-permissions \
91 --allowedTools "Bash,Read,Edit,Write" \
92 --max-budget-usd 0.10 \
93 --no-session-persistence \
94 "$USER_PROMPT" 2>&1
95 else
96 claude --print \
97 --model $MODEL \
98 --dangerously-skip-permissions \
99 --allowedTools "Bash,Read,Edit,Write" \
100 --max-budget-usd 0.10 \
101 --no-session-persistence \
102 "$USER_PROMPT" 2>&1
103 end
104
105 set -l result $status
106
107 if test -n "$_saved_key"
108 set -gx ANTHROPIC_API_KEY $_saved_key
109 end
110
111 return $result
112end
113
114function brain_openai
115 # OpenAI API via curl
116 if test -z "$OPENAI_API_KEY"
117 echo "ERROR: OPENAI_API_KEY not set" >&2
118 return 1
119 end
120
121 set -l messages "[]"
122 if test -n "$SYSTEM_PROMPT"
123 set messages (printf '[{"role":"system","content":"%s"},{"role":"user","content":"%s"}]' \
124 (echo $SYSTEM_PROMPT | sed 's/"/\\"/g; s/\n/\\n/g') \
125 (echo $USER_PROMPT | sed 's/"/\\"/g; s/\n/\\n/g'))
126 else
127 set messages (printf '[{"role":"user","content":"%s"}]' \
128 (echo $USER_PROMPT | sed 's/"/\\"/g; s/\n/\\n/g'))
129 end
130
131 set -l response (curl -s https://api.openai.com/v1/chat/completions \
132 -H "Content-Type: application/json" \
133 -H "Authorization: Bearer $OPENAI_API_KEY" \
134 -d "{\"model\":\"$MODEL\",\"messages\":$messages,\"max_tokens\":$MAX_TOKENS}" 2>&1)
135
136 # Extract content from response
137 echo $response | python3 -c "import sys,json; r=json.load(sys.stdin); print(r['choices'][0]['message']['content'])" 2>/dev/null
138 return $status
139end
140
141function brain_ollama
142 # Local Ollama via curl
143 set -l ollama_url (test -n "$OLLAMA_URL" && echo $OLLAMA_URL || echo "http://localhost:11434")
144
145 set -l messages "[]"
146 if test -n "$SYSTEM_PROMPT"
147 set messages (printf '[{"role":"system","content":"%s"},{"role":"user","content":"%s"}]' \
148 (echo $SYSTEM_PROMPT | sed 's/"/\\"/g; s/\n/\\n/g') \
149 (echo $USER_PROMPT | sed 's/"/\\"/g; s/\n/\\n/g'))
150 else
151 set messages (printf '[{"role":"user","content":"%s"}]' \
152 (echo $USER_PROMPT | sed 's/"/\\"/g; s/\n/\\n/g'))
153 end
154
155 set -l response (curl -s "$ollama_url/api/chat" \
156 -d "{\"model\":\"$MODEL\",\"messages\":$messages,\"stream\":false}" 2>&1)
157
158 echo $response | python3 -c "import sys,json; r=json.load(sys.stdin); print(r['message']['content'])" 2>/dev/null
159 return $status
160end
161
162function brain_custom
163 # Custom API endpoint (OpenAI-compatible)
164 if test -z "$ANT_API_URL"
165 echo "ERROR: ANT_API_URL not set" >&2
166 return 1
167 end
168
169 set -l messages "[]"
170 if test -n "$SYSTEM_PROMPT"
171 set messages (printf '[{"role":"system","content":"%s"},{"role":"user","content":"%s"}]' \
172 (echo $SYSTEM_PROMPT | sed 's/"/\\"/g; s/\n/\\n/g') \
173 (echo $USER_PROMPT | sed 's/"/\\"/g; s/\n/\\n/g'))
174 else
175 set messages (printf '[{"role":"user","content":"%s"}]' \
176 (echo $USER_PROMPT | sed 's/"/\\"/g; s/\n/\\n/g'))
177 end
178
179 set -l headers "-H 'Content-Type: application/json'"
180 if test -n "$ANT_API_KEY"
181 set headers "$headers -H 'Authorization: Bearer $ANT_API_KEY'"
182 end
183
184 set -l response (curl -s "$ANT_API_URL" \
185 -H "Content-Type: application/json" \
186 -H "Authorization: Bearer $ANT_API_KEY" \
187 -d "{\"model\":\"$MODEL\",\"messages\":$messages,\"max_tokens\":$MAX_TOKENS}" 2>&1)
188
189 echo $response | python3 -c "import sys,json; r=json.load(sys.stdin); print(r['choices'][0]['message']['content'])" 2>/dev/null
190 return $status
191end
192
193# --- Dispatch ---
194
195switch $PROVIDER
196 case gh-models
197 brain_gh_models
198 case claude-code
199 brain_claude_code
200 case openai
201 brain_openai
202 case ollama
203 brain_ollama
204 case custom
205 brain_custom
206 case '*'
207 echo "ERROR: Unknown provider '$PROVIDER'" >&2
208 exit 1
209end
210
211exit $status