Reference implementation for the Phoenix Architecture. Work in progress.
aicoding.leaflet.pub/
ai
coding
crazy
1/**
2 * LLM Provider Resolution — auto-detect, preference, config.
3 *
4 * Priority order:
5 * 1. PHOENIX_LLM_PROVIDER env var (explicit override)
6 * 2. Saved preference in .phoenix/config.json
7 * 3. Auto-detect from available API keys:
8 * - ANTHROPIC_API_KEY → anthropic
9 * - OPENAI_API_KEY → openai
10 * If both present, prefer anthropic.
11 * 4. null (no provider available — fall back to stubs)
12 */
13
14import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs';
15import { join } from 'node:path';
16import type { LLMProvider, LLMConfig } from './provider.js';
17import { DEFAULT_MODELS } from './provider.js';
18import { AnthropicProvider } from './anthropic.js';
19import { OpenAIProvider } from './openai.js';
20
21interface PhoenixConfig {
22 llm?: LLMConfig;
23}
24
25/**
26 * Resolve the LLM provider. Returns null if no provider is available.
27 */
28export function resolveProvider(phoenixDir?: string): LLMProvider | null {
29 const config = phoenixDir ? loadConfig(phoenixDir) : {};
30
31 // 1. Explicit env var override
32 const envProvider = process.env.PHOENIX_LLM_PROVIDER;
33 const envModel = process.env.PHOENIX_LLM_MODEL;
34
35 // 2. Determine provider name
36 let providerName = envProvider || config.llm?.provider || detectProvider();
37 if (!providerName) return null;
38
39 // 3. Determine model
40 const model = envModel || config.llm?.model || DEFAULT_MODELS[providerName] || DEFAULT_MODELS.anthropic;
41
42 // 4. Build provider
43 const provider = buildProvider(providerName, model);
44 if (!provider) return null;
45
46 // 5. Save preference if we detected it (and have a phoenix dir)
47 if (phoenixDir && !config.llm) {
48 saveConfig(phoenixDir, {
49 ...config,
50 llm: { provider: providerName, model },
51 });
52 }
53
54 return provider;
55}
56
57/**
58 * Auto-detect which provider is available from env vars.
59 */
60function detectProvider(): string | null {
61 if (process.env.ANTHROPIC_API_KEY) return 'anthropic';
62 if (process.env.OPENAI_API_KEY) return 'openai';
63 return null;
64}
65
66/**
67 * Build a provider instance.
68 */
69function buildProvider(name: string, model: string): LLMProvider | null {
70 switch (name) {
71 case 'anthropic': {
72 const key = process.env.ANTHROPIC_API_KEY;
73 if (!key) return null;
74 return new AnthropicProvider(key, model);
75 }
76 case 'openai': {
77 const key = process.env.OPENAI_API_KEY;
78 if (!key) return null;
79 return new OpenAIProvider(key, model);
80 }
81 default:
82 return null;
83 }
84}
85
86/**
87 * Load Phoenix config from .phoenix/config.json.
88 */
89function loadConfig(phoenixDir: string): PhoenixConfig {
90 const configPath = join(phoenixDir, 'config.json');
91 if (!existsSync(configPath)) return {};
92 try {
93 return JSON.parse(readFileSync(configPath, 'utf8'));
94 } catch {
95 return {};
96 }
97}
98
99/**
100 * Save Phoenix config to .phoenix/config.json.
101 */
102function saveConfig(phoenixDir: string, config: PhoenixConfig): void {
103 mkdirSync(phoenixDir, { recursive: true });
104 writeFileSync(
105 join(phoenixDir, 'config.json'),
106 JSON.stringify(config, null, 2) + '\n',
107 'utf8',
108 );
109}
110
111/**
112 * Describe which providers are available (for CLI help).
113 */
114export function describeAvailability(): { available: string[]; configured: string | null; hint: string } {
115 const available: string[] = [];
116 if (process.env.ANTHROPIC_API_KEY) available.push('anthropic');
117 if (process.env.OPENAI_API_KEY) available.push('openai');
118
119 const configured = process.env.PHOENIX_LLM_PROVIDER || null;
120
121 let hint: string;
122 if (available.length === 0) {
123 hint = 'No LLM API keys found. Set ANTHROPIC_API_KEY or OPENAI_API_KEY to enable code generation. Falling back to stubs.';
124 } else if (available.length === 1) {
125 hint = `Using ${available[0]} (detected from env).`;
126 } else {
127 hint = `Multiple providers available: ${available.join(', ')}. Using ${configured || available[0]}. Set PHOENIX_LLM_PROVIDER to override.`;
128 }
129
130 return { available, configured, hint };
131}