Script for easily configuring, using, switching and comparing local offline coding models
1import { checkPreflight } from "../steps/preflight.js";
2import { installHomebrew } from "../steps/homebrew.js";
3import { installOllama } from "../steps/llama.js";
4import { downloadModels } from "../steps/models.js";
5import { installTools } from "../steps/tools.js";
6import { createLauncherScripts } from "../steps/scripts.js";
7import { writeTuiConfig } from "../steps/aider-config.js";
8import { addToPath } from "../steps/shell-path.js";
9import { OLLAMA_PORT } from "../config.js";
10import {
11 getActiveChatModel,
12 getActiveAutocompleteModel,
13 getActiveTui,
14} from "../runtime-config.js";
15
16const BOLD = "\x1b[1m";
17const GREEN = "\x1b[0;32m";
18const RESET = "\x1b[0m";
19
20function printSummary(): void {
21 const chatModel = getActiveChatModel();
22 const autocompleteModel = getActiveAutocompleteModel();
23 const tui = getActiveTui();
24 const shellRC =
25 process.env.SHELL?.endsWith("/zsh") !== false
26 ? "~/.zshrc"
27 : process.env.SHELL?.endsWith("/bash")
28 ? "~/.bashrc"
29 : "~/.profile";
30
31 console.log("");
32 console.log(
33 `${GREEN}${BOLD}═══════════════════════════════════════════════════${RESET}`,
34 );
35 console.log(`${GREEN}${BOLD} Setup complete!${RESET}`);
36 console.log(
37 `${GREEN}${BOLD}═══════════════════════════════════════════════════${RESET}`,
38 );
39 console.log("");
40 console.log(` ${BOLD}Backend:${RESET} Ollama on port ${OLLAMA_PORT}`);
41 console.log(
42 ` Chat: ${chatModel.name} (${chatModel.ollamaTag})`,
43 );
44 console.log(
45 ` Autocomplete: ${autocompleteModel.name} (${autocompleteModel.ollamaTag})`,
46 );
47 console.log("");
48 console.log(` ${BOLD}Active TUI:${RESET} ${tui.name}`);
49 console.log("");
50 console.log(` ${BOLD}Usage:${RESET}`);
51 console.log(` ${BOLD}localcode${RESET} Launch ${tui.name} in current directory`);
52 console.log(` ${BOLD}localcode start${RESET} Start Ollama + pull models`);
53 console.log(` ${BOLD}localcode stop${RESET} Stop Ollama`);
54 console.log(` ${BOLD}localcode status${RESET} Show config and server health`);
55 console.log("");
56 console.log(
57 ` Run ${BOLD}source ${shellRC}${RESET} or open a new terminal to get started.`,
58 );
59 console.log("");
60}
61
62export async function runSetup(): Promise<void> {
63 console.log(
64 `\n${BOLD}Local AI Coding Environment Installer (Ollama)${RESET}\n`,
65 );
66
67 checkPreflight();
68 installHomebrew();
69 installOllama();
70 downloadModels();
71 installTools();
72 await createLauncherScripts();
73 await writeTuiConfig();
74 addToPath();
75 printSummary();
76}