Script for easily configuring, using, switching and comparing local offline coding models
at main 20 lines 453 B view raw
1{ 2 "$schema": "https://opencode.ai/config.json", 3 "model": "llama-cpp/qwen2.5-coder-32b", 4 "provider": { 5 "llama-cpp": { 6 "npm": "@ai-sdk/openai-compatible", 7 "name": "llama.cpp (local)", 8 "options": { 9 "baseURL": "http://127.0.0.1:8080/v1", 10 "apiKey": "not-needed" 11 }, 12 "models": { 13 "qwen2.5-coder-32b": { 14 "name": "Qwen 2.5 Coder 32B", 15 "tools": true 16 } 17 } 18 } 19 } 20}