Script for easily configuring, using, switching and comparing local offline coding models
1import { execSync } from "node:child_process";
2import { MODELS, getChatModels, getAutocompleteModels, getModelById } from "../registry/models.js";
3import { loadConfig, saveConfig, getActiveChatModel, getActiveAutocompleteModel } from "../runtime-config.js";
4import { createLauncherScripts } from "../steps/scripts.js";
5import { writeTuiConfig } from "../steps/aider-config.js";
6import { log, err } from "../log.js";
7import { runPassthrough } from "../util.js";
8
9const BOLD = "\x1b[1m";
10const GREEN = "\x1b[0;32m";
11const DIM = "\x1b[2m";
12const RESET = "\x1b[0m";
13
14function isPulled(ollamaTag: string): boolean {
15 try {
16 const output = execSync("ollama list", { encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] });
17 return output.includes(ollamaTag);
18 } catch {
19 return false;
20 }
21}
22
23export function listModels(): void {
24 const activeChatId = getActiveChatModel().id;
25 const activeAutoId = getActiveAutocompleteModel().id;
26
27 console.log(`\n${BOLD}Chat models:${RESET}`);
28 for (const m of getChatModels()) {
29 const active = m.id === activeChatId ? ` ${GREEN}<- active${RESET}` : "";
30 const pulled = isPulled(m.ollamaTag) ? "" : ` ${DIM}(not pulled)${RESET}`;
31 console.log(
32 ` ${BOLD}${m.id}${RESET} ${m.name} ${DIM}${m.ollamaTag}${RESET}${active}${pulled}`,
33 );
34 }
35
36 console.log(`\n${BOLD}Autocomplete models:${RESET}`);
37 for (const m of getAutocompleteModels()) {
38 const active = m.id === activeAutoId ? ` ${GREEN}<- active${RESET}` : "";
39 const pulled = isPulled(m.ollamaTag) ? "" : ` ${DIM}(not pulled)${RESET}`;
40 console.log(
41 ` ${BOLD}${m.id}${RESET} ${m.name} ${DIM}${m.ollamaTag}${RESET}${active}${pulled}`,
42 );
43 }
44 console.log("");
45}
46
47export async function setChatModel(id: string): Promise<void> {
48 const model = getModelById(id);
49 if (!model) {
50 err(`Unknown model: ${id}\nAvailable: ${MODELS.map((m) => m.id).join(", ")}`);
51 }
52 if (model.role !== "chat") {
53 err(`${id} is an ${model.role} model, not a chat model.`);
54 }
55
56 const config = loadConfig();
57 config.chatModel = id;
58 saveConfig(config);
59 log(`Chat model set to ${model.name}`);
60
61 // Pull if needed
62 if (!isPulled(model.ollamaTag)) {
63 log(`Pulling ${model.name} (${model.ollamaTag})...`);
64 runPassthrough(`ollama pull ${model.ollamaTag}`);
65 }
66
67 // Regenerate configs
68 await createLauncherScripts();
69 await writeTuiConfig();
70 log("Configs regenerated.");
71}
72
73export async function setAutocompleteModel(id: string): Promise<void> {
74 const model = getModelById(id);
75 if (!model) {
76 err(`Unknown model: ${id}\nAvailable: ${MODELS.map((m) => m.id).join(", ")}`);
77 }
78 if (model.role !== "autocomplete") {
79 err(`${id} is a ${model.role} model, not an autocomplete model.`);
80 }
81
82 const config = loadConfig();
83 config.autocompleteModel = id;
84 saveConfig(config);
85 log(`Autocomplete model set to ${model.name}`);
86
87 // Pull if needed
88 if (!isPulled(model.ollamaTag)) {
89 log(`Pulling ${model.name} (${model.ollamaTag})...`);
90 runPassthrough(`ollama pull ${model.ollamaTag}`);
91 }
92
93 await createLauncherScripts();
94 await writeTuiConfig();
95 log("Configs regenerated.");
96}