Script for easily configuring, using, switching and comparing local offline coding models
1import { runSetup } from "./commands/setup.js";
2import { listModels, listTuis } from "./commands/list.js";
3import { runBench } from "./commands/bench.js";
4import { runDefault } from "./commands/run.js";
5import { showStatus } from "./commands/status.js";
6import { startServers, stopServers } from "./commands/server.js";
7import { runPipe } from "./commands/pipe.js";
8import { runAsk } from "./commands/ask.js";
9import { getTuiById } from "./registry/tuis.js";
10import { getModelById } from "./registry/models.js";
11
12const BOLD = "\x1b[1m";
13const DIM = "\x1b[2m";
14const RESET = "\x1b[0m";
15
16function printUsage(): void {
17 console.log(`
18${BOLD}localcode${RESET} — local AI coding environment (Ollama)
19
20${BOLD}Launch:${RESET}
21 localcode Launch with defaults
22 localcode <tui> Launch with specified TUI
23 localcode <model> Launch with specified model
24 localcode <tui> <model> Launch with both overrides
25
26${BOLD}Server:${RESET}
27 localcode start Start Ollama + pull models
28 localcode stop Stop Ollama
29 localcode status Show config and server health
30
31${BOLD}List:${RESET}
32 localcode models List available models
33 localcode tuis List available TUIs
34
35${BOLD}Benchmark:${RESET}
36 localcode bench Benchmark the running chat model
37 localcode bench history Show past benchmark results
38
39${BOLD}Quick:${RESET}
40 localcode ask "question" Quick coding Q&A (streamed)
41 localcode pipe "prompt" Pipe stdin through the model
42
43${BOLD}Other:${RESET}
44 localcode setup Full install (Ollama, models, tools)
45`);
46}
47
48interface LaunchOverrides {
49 model?: string;
50 tui?: string;
51 passthrough: string[];
52}
53
54function parseLaunchArgs(argv: string[]): LaunchOverrides {
55 const overrides: LaunchOverrides = { passthrough: [] };
56 for (const arg of argv) {
57 if (getTuiById(arg)) {
58 overrides.tui = arg;
59 } else if (getModelById(arg)) {
60 overrides.model = arg;
61 } else {
62 overrides.passthrough.push(arg);
63 }
64 }
65 return overrides;
66}
67
68async function main(): Promise<void> {
69 const cmd = process.argv[2];
70
71 switch (cmd) {
72 case "status":
73 await showStatus();
74 break;
75
76 case "start":
77 await startServers();
78 break;
79
80 case "stop":
81 stopServers();
82 break;
83
84 case "models":
85 listModels();
86 break;
87
88 case "tuis":
89 listTuis();
90 break;
91
92 case "bench": {
93 const sub = process.argv[3];
94 if (sub === "history") {
95 await runBench(["--history"]);
96 } else {
97 await runBench(process.argv.slice(3));
98 }
99 break;
100 }
101
102 case "ask": {
103 const question = process.argv.slice(3).join(" ");
104 if (!question) {
105 console.error("Usage: localcode ask \"question\"");
106 process.exit(1);
107 }
108 await runAsk(question);
109 break;
110 }
111
112 case "pipe": {
113 const prompt = process.argv[3] ?? "Improve this code";
114 await runPipe(prompt);
115 break;
116 }
117
118 case "setup":
119 await runSetup();
120 break;
121
122 case "help":
123 case "--help":
124 case "-h":
125 printUsage();
126 break;
127
128 default: {
129 // Everything else is a launch command with optional overrides:
130 // localcode
131 // localcode tui aider
132 // localcode model qwen3-coder
133 // localcode model qwen3-coder tui goose
134 const args = process.argv.slice(2);
135 const overrides = parseLaunchArgs(args);
136 await runDefault(overrides);
137 break;
138 }
139 }
140}
141
142main().catch((e: unknown) => {
143 console.error(e);
144 process.exit(1);
145});