Script for easily configuring, using, switching and comparing local offline coding models
1import { OLLAMA_URL, OLLAMA_PORT } from "../config.js";
2import {
3 getActiveChatModel,
4 getActiveAutocompleteModel,
5 getActiveTui,
6} from "../runtime-config.js";
7
8const BOLD = "\x1b[1m";
9const GREEN = "\x1b[0;32m";
10const RED = "\x1b[0;31m";
11const DIM = "\x1b[2m";
12const RESET = "\x1b[0m";
13
14async function checkOllama(): Promise<boolean> {
15 try {
16 const res = await fetch(`${OLLAMA_URL}/api/tags`);
17 return res.ok;
18 } catch {
19 return false;
20 }
21}
22
23export async function showStatus(): Promise<void> {
24 const chatModel = getActiveChatModel();
25 const autoModel = getActiveAutocompleteModel();
26 const tui = getActiveTui();
27
28 const ollamaOk = await checkOllama();
29
30 const on = `${GREEN}running${RESET}`;
31 const off = `${RED}stopped${RESET}`;
32
33 console.log(`
34${BOLD}localcode${RESET} — current configuration
35
36 ${BOLD}Chat model:${RESET} ${chatModel.name} ${DIM}(${chatModel.ollamaTag})${RESET}
37 ${BOLD}Autocomplete model:${RESET} ${autoModel.name} ${DIM}(${autoModel.ollamaTag})${RESET}
38 ${BOLD}Active TUI:${RESET} ${tui.name} ${DIM}(${tui.id})${RESET}
39
40 ${BOLD}Ollama:${RESET} :${OLLAMA_PORT} ${ollamaOk ? on : off}
41`);
42}