Script for easily configuring, using, switching and comparing local offline coding models
at main 26 lines 590 B view raw
1import { OLLAMA_PORT } from "../config.js"; 2 3export function opencodeConfig(modelId: string, modelName: string): string { 4 return JSON.stringify( 5 { 6 model: `ollama/${modelId}`, 7 provider: { 8 ollama: { 9 npm: "@ai-sdk/openai-compatible", 10 name: "Ollama (local)", 11 options: { 12 baseURL: `http://127.0.0.1:${OLLAMA_PORT}/v1`, 13 apiKey: "not-needed", 14 }, 15 models: { 16 [modelId]: { 17 name: modelName, 18 }, 19 }, 20 }, 21 }, 22 }, 23 null, 24 2, 25 ) + "\n"; 26}