Script for easily configuring, using, switching and comparing local offline coding models
1import { spawn, execSync } from "node:child_process";
2import { existsSync } from "node:fs";
3import { OLLAMA_PORT } from "../config.js";
4import {
5 loadConfig,
6 saveConfig,
7 getActiveTui,
8 getActiveChatModel,
9} from "../runtime-config.js";
10import { getModelById } from "../registry/models.js";
11import { getTuiById } from "../registry/tuis.js";
12import { log, err } from "../log.js";
13import { ensureOllama, pullIfNeeded } from "./server.js";
14import { commandExists, runPassthrough } from "../util.js";
15import type { ModelDef } from "../registry/models.js";
16import type { TuiDef } from "../registry/tuis.js";
17
18export interface LaunchOverrides {
19 model?: string;
20 tui?: string;
21 passthrough: string[];
22}
23
24function ensureGit(): void {
25 if (!existsSync(".git")) {
26 log("Initializing git repo...");
27 execSync("git init", { stdio: "inherit" });
28 execSync("git add -A", { stdio: "inherit" });
29 execSync('git commit -m "Initial commit (before AI edits)" --allow-empty', {
30 stdio: "inherit",
31 });
32 }
33}
34
35function resolveOverrides(overrides: LaunchOverrides): {
36 chatModel: ModelDef;
37 tui: TuiDef;
38} {
39 const config = loadConfig();
40 let changed = false;
41
42 let chatModel: ModelDef;
43 if (overrides.model) {
44 const m = getModelById(overrides.model);
45 if (!m) err(`Unknown model: ${overrides.model}`);
46 if (m.role !== "chat") err(`${overrides.model} is not a chat model.`);
47 chatModel = m;
48 config.chatModel = m.id;
49 changed = true;
50 } else {
51 chatModel = getActiveChatModel();
52 }
53
54 let tui: TuiDef;
55 if (overrides.tui) {
56 const t = getTuiById(overrides.tui);
57 if (!t) err(`Unknown TUI: ${overrides.tui}`);
58 tui = t;
59 config.tui = t.id;
60 changed = true;
61 } else {
62 tui = getActiveTui();
63 }
64
65 if (changed) saveConfig(config);
66
67 return { chatModel, tui };
68}
69
70function ensureTuiInstalled(tui: TuiDef): void {
71 // ollama launch handles installation
72 if (tui.ollamaLaunch) return;
73
74 if (!commandExists(tui.checkCmd)) {
75 log(`Installing ${tui.name}...`);
76 runPassthrough(tui.installCmd);
77 }
78}
79
80export async function runDefault(overrides: LaunchOverrides): Promise<void> {
81 await ensureOllama();
82 const { chatModel, tui } = resolveOverrides(overrides);
83 await pullIfNeeded(chatModel.ollamaTag, chatModel.name);
84 ensureTuiInstalled(tui);
85 ensureGit();
86
87 const args = overrides.passthrough;
88 let tuiArgs: string[];
89 let tuiCmd: string;
90 const env = { ...process.env };
91
92 if (tui.ollamaLaunch) {
93 // ollama launch handles all config automatically
94 tuiCmd = "ollama";
95 tuiArgs = ["launch", tui.ollamaLaunch, "--model", chatModel.ollamaTag, ...args];
96 } else {
97 tuiCmd = tui.checkCmd;
98
99 switch (tui.id) {
100 case "aider":
101 env.OPENAI_API_KEY = "sk-not-needed";
102 env.OPENAI_API_BASE = `http://127.0.0.1:${OLLAMA_PORT}/v1`;
103 tuiArgs = [
104 "--model", `openai/${chatModel.id}`,
105 "--no-show-model-warnings",
106 "--no-check-update",
107 ...args,
108 ];
109 break;
110
111 case "goose":
112 env.GOOSE_PROVIDER = "ollama";
113 env.GOOSE_MODEL = chatModel.ollamaTag;
114 env.OLLAMA_HOST = `http://127.0.0.1:${OLLAMA_PORT}`;
115 tuiArgs = [...args];
116 break;
117
118 case "gptme":
119 env.OPENAI_BASE_URL = `http://127.0.0.1:${OLLAMA_PORT}/v1`;
120 tuiArgs = ["--model", `local/${chatModel.ollamaTag}`, ...args];
121 break;
122
123 default:
124 tuiArgs = [...args];
125 break;
126 }
127
128 if (tui.resumeArgs) {
129 tuiArgs.push(...tui.resumeArgs);
130 }
131 }
132
133 log(`Launching ${tui.name} with ${chatModel.name}...`);
134 const child = spawn(tuiCmd, tuiArgs, { stdio: "inherit", env });
135 child.on("exit", (code) => process.exit(code ?? 0));
136}