Monorepo for Aesthetic.Computer
aesthetic.computer
1import http from 'http';
2import { URL } from 'url';
3import { WebSocketServer } from 'ws';
4import { MongoClient } from 'mongodb';
5import dotenv from 'dotenv';
6import { fileURLToPath } from 'url';
7import { dirname, join } from 'path';
8
9// Load environment variables from vault
10const __filename = fileURLToPath(import.meta.url);
11const __dirname = dirname(__filename);
12const vaultEnvPath = join(__dirname, '../aesthetic-computer-vault/censor/.env');
13dotenv.config({ path: vaultEnvPath });
14
15const OLLAMA_API = 'http://localhost:11434/api/generate';
16const MODEL = 'gemma2:2b';
17const PORT = 3000;
18
19// MongoDB connection
20const MONGODB_CONNECTION_STRING = process.env.MONGODB_CONNECTION_STRING || 'mongodb://localhost:27017';
21const MONGODB_NAME = process.env.MONGODB_NAME || 'aesthetic';
22const COLLECTION_NAME = process.env.COLLECTION_NAME || 'chat-system';
23let mongoClient = null;
24let chatCollection = null;
25
26async function connectMongo() {
27 if (!mongoClient) {
28 mongoClient = new MongoClient(MONGODB_CONNECTION_STRING, {
29 serverSelectionTimeoutMS: 2000, // Timeout after 2 seconds
30 connectTimeoutMS: 2000
31 });
32 await mongoClient.connect();
33 chatCollection = mongoClient.db(MONGODB_NAME).collection(COLLECTION_NAME);
34 console.log('📦 Connected to MongoDB');
35 }
36 return chatCollection;
37}
38
39// PG-13 content filter prompt
40const systemPrompt = `You are a PG-13 content filter for a chat room. Respond in s-expression format.
41
42ALWAYS reply with: (yes) (why "short lowercase explanation") or (no) (why "short lowercase explanation")
43
44Reply (yes) for URLs (http://, https://, www., or domain.tld patterns).
45
46Reply (no) if the message contains:
47- Sexual content or innuendo
48- Body functions (bathroom humor, gross-out content)
49- Profanity or explicit language
50- Violence or threats
51- Drug references
52- Hate speech or slurs
53
54Reply (yes) for:
55- Normal conversation
56- URLs and links
57- Questions and discussions
58- Greetings and casual chat
59
60Example responses:
61(yes) (why "normal greeting")
62(no) (why "contains profanity")
63(yes) (why "url allowed")
64
65Keep all explanations lowercase and brief. Reply only in this s-expression format.`;
66
67async function filterMessage(message, onChunk = null) {
68 const prompt = `${systemPrompt}\n\nMessage: "${message}"\n\nReply (t/f):`;
69
70 const response = await fetch(OLLAMA_API, {
71 method: 'POST',
72 headers: { 'Content-Type': 'application/json' },
73 body: JSON.stringify({
74 model: MODEL,
75 prompt: prompt,
76 stream: true, // Enable streaming
77 }),
78 });
79
80 if (!response.ok) {
81 throw new Error(`Ollama API error: ${response.status}`);
82 }
83
84 let fullResponse = '';
85 let totalDuration = 0;
86
87 // Stream the response
88 const reader = response.body.getReader();
89 const decoder = new TextDecoder();
90
91 while (true) {
92 const { done, value } = await reader.read();
93 if (done) break;
94
95 const chunk = decoder.decode(value);
96 const lines = chunk.split('\n').filter(line => line.trim());
97
98 for (const line of lines) {
99 try {
100 const data = JSON.parse(line);
101 if (data.response) {
102 fullResponse += data.response;
103 if (onChunk) {
104 onChunk({ chunk: data.response, full: fullResponse });
105 }
106 }
107 if (data.total_duration) {
108 totalDuration = data.total_duration;
109 }
110 } catch (e) {
111 // Skip invalid JSON lines
112 }
113 }
114 }
115
116 const finalResponse = fullResponse.trim();
117
118 // Parse s-expression: (yes) or (no) and extract why
119 const decision = finalResponse.toLowerCase().includes('(yes)') ? 't' : 'f';
120
121 // Extract (why "...") - proper s-expression format
122 let reason = '';
123 const reasonMatch = finalResponse.match(/\(why\s+"((?:[^"\\]|\\.)*)"\)/is);
124 if (reasonMatch) {
125 reason = reasonMatch[1].toLowerCase().trim(); // Force lowercase and trim
126 }
127
128 return {
129 decision: decision,
130 sentiment: finalResponse,
131 reason: reason,
132 responseTime: totalDuration / 1e9, // Convert to seconds
133 };
134}
135
136const server = http.createServer(async (req, res) => {
137 // CORS headers
138 res.setHeader('Access-Control-Allow-Origin', '*');
139 res.setHeader('Access-Control-Allow-Methods', 'POST, OPTIONS');
140 res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
141
142 if (req.method === 'OPTIONS') {
143 res.writeHead(200);
144 res.end();
145 return;
146 }
147
148 if (req.method === 'GET' && req.url.startsWith('/api/chat-messages')) {
149 // Fetch random sample of chat messages for auto-testing
150 try {
151 const collection = await connectMongo();
152 const messages = await collection
153 .aggregate([
154 { $match: { content: { $exists: true, $ne: '' } } },
155 { $sample: { size: 100 } },
156 { $project: { content: 1, _id: 0 } }
157 ])
158 .toArray();
159
160 res.writeHead(200, { 'Content-Type': 'application/json' });
161 res.end(JSON.stringify(messages.map(m => m.content)));
162 } catch (error) {
163 console.error('MongoDB error:', error);
164 // Fallback to test messages if MongoDB unavailable
165 const testMessages = [
166 "hello world",
167 "how are you doing today?",
168 "fuck you",
169 "shit this sucks",
170 "nice painting!",
171 "what time is it?",
172 "https://example.com/image.png",
173 "i hate you so much",
174 "beautiful colors",
175 "damn that's cool",
176 "check out my site www.example.com",
177 "you're an idiot",
178 "love this piece",
179 "kill yourself",
180 "great work!",
181 "this is garbage",
182 "can you help me?",
183 "stupid ass program",
184 "amazing art",
185 "go to hell"
186 ];
187 res.writeHead(200, { 'Content-Type': 'application/json' });
188 res.end(JSON.stringify(testMessages));
189 }
190 } else if (req.method === 'POST' && req.url === '/api/filter') {
191 let body = '';
192
193 req.on('data', chunk => {
194 body += chunk.toString();
195 });
196
197 req.on('end', async () => {
198 try {
199 const { message } = JSON.parse(body);
200
201 if (!message || typeof message !== 'string') {
202 res.writeHead(400, { 'Content-Type': 'application/json' });
203 res.end(JSON.stringify({ error: 'Invalid message' }));
204 return;
205 }
206
207 console.log(`[${new Date().toISOString()}] Testing: "${message.substring(0, 50)}${message.length > 50 ? '...' : ''}"`);
208
209 const result = await filterMessage(message);
210
211 console.log(` → Decision: ${result.decision === 't' ? 'PASS' : 'FAIL'} (${(result.responseTime * 1000).toFixed(0)}ms)`);
212
213 res.writeHead(200, { 'Content-Type': 'application/json' });
214 res.end(JSON.stringify(result));
215 } catch (error) {
216 console.error('Error:', error);
217 res.writeHead(500, { 'Content-Type': 'application/json' });
218 res.end(JSON.stringify({ error: error.message }));
219 }
220 });
221 } else {
222 res.writeHead(404, { 'Content-Type': 'application/json' });
223 res.end(JSON.stringify({ error: 'Not found' }));
224 }
225});
226
227// Track warmup state
228let isWarmedUp = false;
229let warmupPromise = null;
230
231async function warmupModel() {
232 if (isWarmedUp) return;
233 if (warmupPromise) return warmupPromise;
234
235 warmupPromise = (async () => {
236 console.log(`⏳ Warming up model...`);
237 try {
238 await filterMessage('hello');
239 isWarmedUp = true;
240 console.log(`✅ Model warmed up and ready!\n`);
241 } catch (error) {
242 console.log(`⚠️ Model warmup failed: ${error.message}`);
243 console.log(` First request may be slower.\n`);
244 }
245 })();
246
247 return warmupPromise;
248}
249
250server.listen(PORT, async () => {
251 console.log(`🛡️ Content Filter API running on http://localhost:${PORT}`);
252 console.log(`📊 Dashboard available via Caddy on http://localhost:8080`);
253 console.log(`🤖 Using model: ${MODEL}\n`);
254
255 // Start warmup (don't block server startup)
256 warmupModel().catch(err => console.error('❌ Warmup error:', err));
257});
258
259// Handle uncaught errors
260process.on('uncaughtException', (err) => {
261 console.error('❌ Uncaught exception:', err);
262});
263
264process.on('unhandledRejection', (err) => {
265 console.error('❌ Unhandled rejection:', err);
266});
267
268// WebSocket server for live streaming
269const wss = new WebSocketServer({ server, path: '/ws' });
270
271wss.on('connection', (ws) => {
272 console.log('📡 WebSocket client connected');
273
274 ws.on('message', async (data) => {
275 try {
276 const { message } = JSON.parse(data);
277
278 if (!message || typeof message !== 'string') {
279 ws.send(JSON.stringify({ error: 'Invalid message' }));
280 return;
281 }
282
283 // Wait for warmup to complete before processing
284 if (!isWarmedUp) {
285 ws.send(JSON.stringify({ type: 'warming', message: 'Model warming up...' }));
286 await warmupPromise;
287 }
288
289 console.log(`[${new Date().toISOString()}] Streaming test: "${message.substring(0, 50)}${message.length > 50 ? '...' : ''}"`);
290
291 const startTime = Date.now();
292
293 // Send start event
294 ws.send(JSON.stringify({ type: 'start' }));
295
296 // Filter with streaming
297 const result = await filterMessage(message, (update) => {
298 ws.send(JSON.stringify({
299 type: 'chunk',
300 chunk: update.chunk,
301 full: update.full
302 }));
303 });
304
305 const responseTime = Date.now() - startTime;
306
307 console.log(` → Decision: ${result.decision === 't' ? 'PASS' : 'FAIL'} (${responseTime}ms)`);
308
309 // Send final result
310 ws.send(JSON.stringify({
311 type: 'complete',
312 decision: result.decision,
313 sentiment: result.sentiment,
314 reason: result.reason,
315 responseTime: responseTime
316 }));
317
318 } catch (error) {
319 console.error('WebSocket error:', error);
320 ws.send(JSON.stringify({ type: 'error', error: error.message }));
321 }
322 });
323
324 ws.on('close', () => {
325 console.log('📡 WebSocket client disconnected');
326 });
327});