Aethel Bot OSS repository!
aethel.xyz
bot
fun
ai
discord
discord-bot
aethel
1import { Message, ChannelType } from 'discord.js';
2import BotClient from '@/services/Client';
3import logger from '@/utils/logger';
4import {
5 makeAIRequest,
6 getApiConfiguration,
7 buildSystemPrompt,
8 buildConversation,
9 getUserCredentials,
10 incrementAndCheckDailyLimit,
11 incrementAndCheckServerDailyLimit,
12 splitResponseIntoChunks,
13 processUrls,
14} from '@/commands/utilities/ai';
15import type { ConversationMessage, AIResponse } from '@/commands/utilities/ai';
16
17type ApiConfiguration = ReturnType<typeof getApiConfiguration>;
18import { createMemoryManager } from '@/utils/memoryManager';
19
20const serverConversations = createMemoryManager<string, ConversationMessage[]>({
21 maxSize: 1000,
22 maxAge: 2 * 60 * 60 * 1000,
23 cleanupInterval: 10 * 60 * 1000,
24});
25
26const serverMessageContext = createMemoryManager<
27 string,
28 Array<{
29 username: string;
30 content: string;
31 timestamp: number;
32 }>
33>({
34 maxSize: 1000,
35 maxAge: 2 * 60 * 60 * 1000,
36 cleanupInterval: 10 * 60 * 1000,
37});
38
39const userConversations = createMemoryManager<string, ConversationMessage[]>({
40 maxSize: 2000,
41 maxAge: 2 * 60 * 60 * 1000,
42 cleanupInterval: 10 * 60 * 1000,
43});
44
45function getServerConversationKey(guildId: string): string {
46 return `server:${guildId}`;
47}
48
49function getUserConversationKey(userId: string): string {
50 return `dm:${userId}`;
51}
52
53export default class MessageCreateEvent {
54 constructor(private client: BotClient) {
55 this.client = client;
56 client.on('messageCreate', this.execute.bind(this));
57 }
58
59 private async execute(message: Message): Promise<void> {
60 logger.debug(`Message received in channel type: ${message.channel.type}`);
61
62 if (message.author.bot) {
63 logger.debug('Ignoring message from bot');
64 return;
65 }
66
67 const isDM = message.channel.type === ChannelType.DM;
68 const isMentioned =
69 message.mentions.users.has(this.client.user!.id) && !message.mentions.everyone;
70
71 if (!isDM && message.guildId) {
72 const contextKey = getServerConversationKey(message.guildId);
73 const existingContext = serverMessageContext.get(contextKey) || [];
74
75 const newMessage = {
76 username: message.author.username,
77 content: message.content,
78 timestamp: Date.now(),
79 };
80
81 const updatedContext = [...existingContext, newMessage].slice(-10);
82 serverMessageContext.set(contextKey, updatedContext);
83 }
84
85 if (!isDM && !isMentioned) {
86 logger.debug(
87 `Storing message for context but not responding - not a DM and bot not mentioned (channel type: ${message.channel.type})`,
88 );
89 return;
90 }
91
92 logger.info(isDM ? 'Processing DM message...' : 'Processing mention in server...');
93
94 try {
95 logger.debug(
96 `${isDM ? 'DM' : 'Message'} received (${message.content.length} characters) - content hidden for privacy`,
97 );
98
99 const hasImageAttachments = message.attachments.some(
100 (att) =>
101 att.contentType?.startsWith('image/') ||
102 att.name?.match(/\.(jpg|jpeg|png|gif|webp|bmp|svg)$/i),
103 );
104
105 logger.debug(`hasImageAttachments: ${hasImageAttachments}`);
106 const hasImages = hasImageAttachments;
107
108 let selectedModel: string;
109 let config: ApiConfiguration;
110 let usingDefaultKey = true;
111
112 if (isDM) {
113 const {
114 model: userCustomModel,
115 apiKey: userApiKey,
116 apiUrl: userApiUrl,
117 } = await getUserCredentials(`user:${message.author.id}`);
118
119 selectedModel = hasImages
120 ? 'google/gemma-3-4b-it'
121 : userCustomModel || 'moonshotai/kimi-k2';
122
123 config = getApiConfiguration(userApiKey ?? null, selectedModel, userApiUrl ?? null);
124 usingDefaultKey = config.usingDefaultKey;
125 } else {
126 selectedModel = hasImages ? 'google/gemma-3-4b-it' : 'google/gemini-2.5-flash-lite';
127
128 config = getApiConfiguration(null, selectedModel, null);
129 }
130
131 logger.info(
132 `Using model: ${selectedModel} for message with images: ${hasImages}${
133 isDM && !usingDefaultKey ? ' (user custom model)' : ' (default model)'
134 }`,
135 );
136
137 const systemPrompt = buildSystemPrompt(
138 usingDefaultKey,
139 this.client,
140 selectedModel,
141 message.author.username,
142 undefined,
143 !isDM,
144 !isDM ? message.guild?.name : undefined,
145 );
146
147 let messageContent:
148 | string
149 | Array<{
150 type: 'text' | 'image_url';
151 text?: string;
152 image_url?: {
153 url: string;
154 detail?: 'low' | 'high' | 'auto';
155 };
156 }> = isDM ? message.content : message.content.replace(/<@!?\d+>/g, '').trim();
157
158 if (hasImages) {
159 const imageAttachments = message.attachments.filter(
160 (att) =>
161 att.contentType?.startsWith('image/') ||
162 att.name?.match(/\.(jpg|jpeg|png|gif|webp|bmp|svg)$/i),
163 );
164
165 const contentArray: Array<{
166 type: 'text' | 'image_url';
167 text?: string;
168 image_url?: {
169 url: string;
170 detail?: 'low' | 'high' | 'auto';
171 };
172 }> = [];
173
174 const cleanContent = isDM
175 ? message.content
176 : message.content.replace(/<@!?\d+>/g, '').trim();
177 if (cleanContent.trim()) {
178 contentArray.push({
179 type: 'text',
180 text: cleanContent,
181 });
182 }
183
184 imageAttachments.forEach((att) => {
185 contentArray.push({
186 type: 'image_url',
187 image_url: {
188 url: att.url,
189 detail: 'auto',
190 },
191 });
192 });
193
194 messageContent = contentArray;
195 }
196
197 let conversation: ConversationMessage[] = [];
198
199 if (isDM) {
200 const conversationKey = getUserConversationKey(message.author.id);
201 conversation = userConversations.get(conversationKey) || [];
202 } else {
203 const serverKey = getServerConversationKey(message.guildId!);
204 const serverConversation = serverConversations.get(serverKey) || [];
205 const recentMessages = serverMessageContext.get(serverKey) || [];
206
207 const contextMessages = recentMessages.slice(-6, -1).map((msg) => ({
208 role: 'user' as const,
209 content: `**${msg.username}**: ${msg.content}`,
210 username: msg.username,
211 }));
212
213 const aiHistory = serverConversation.slice(-3);
214
215 const formattedAiHistory = aiHistory.map((msg) => {
216 if (msg.role === 'user' && msg.username) {
217 const content = Array.isArray(msg.content)
218 ? msg.content.map((c) => (c.type === 'text' ? c.text : '[Image]')).join(' ')
219 : msg.content;
220 return {
221 ...msg,
222 content: `**${msg.username}**: ${content}`,
223 };
224 }
225 return msg;
226 });
227
228 conversation = [...contextMessages, ...formattedAiHistory];
229 }
230
231 let filteredConversation = conversation;
232 if (selectedModel === 'moonshotai/kimi-k2') {
233 filteredConversation = conversation.map((msg) => {
234 if (Array.isArray(msg.content)) {
235 const textContent = msg.content
236 .filter((item) => item.type === 'text')
237 .map((item) => item.text)
238 .join(' ');
239 return { ...msg, content: textContent || '[Image content]' };
240 }
241 return msg;
242 });
243 }
244
245 const updatedConversation = buildConversation(
246 filteredConversation,
247 messageContent,
248 systemPrompt,
249 );
250
251 if (config.usingDefaultKey) {
252 const exemptUserId = process.env.AI_EXEMPT_USER_ID;
253 const actorId = message.author.id;
254
255 if (actorId !== exemptUserId) {
256 if (isDM) {
257 const allowed = await incrementAndCheckDailyLimit(actorId, 10);
258 if (!allowed) {
259 await message.reply(
260 "❌ You've reached your daily limit of AI requests. Please try again tomorrow or set up your own API key using the `/ai` command.",
261 );
262 return;
263 }
264 } else {
265 let serverLimit = 30;
266 try {
267 const memberCount = message.guild?.memberCount || 0;
268 if (memberCount >= 1000) {
269 serverLimit = 500;
270 } else if (memberCount >= 100) {
271 serverLimit = 150;
272 }
273
274 const serverAllowed = await incrementAndCheckServerDailyLimit(
275 message.guildId!,
276 serverLimit,
277 );
278 if (!serverAllowed) {
279 await message.reply(
280 `❌ This server has reached its daily limit of ${serverLimit} AI requests. Please try again tomorrow.`,
281 );
282 return;
283 }
284 } catch (error) {
285 logger.error('Error checking server member count:', error);
286 const serverAllowed = await incrementAndCheckServerDailyLimit(message.guildId!, 30);
287 if (!serverAllowed) {
288 await message.reply(
289 '❌ This server has reached its daily limit of AI requests. Please try again tomorrow.',
290 );
291 return;
292 }
293 }
294 }
295 }
296 } else if (!config.finalApiKey) {
297 await message.reply('❌ Please set up your API key first using the `/ai` command.');
298 return;
299 }
300
301 let aiResponse = await makeAIRequest(config, updatedConversation);
302
303 if (!aiResponse && hasImages) {
304 logger.warn(`First attempt failed for ${selectedModel}, retrying once...`);
305 await new Promise((resolve) => setTimeout(resolve, 1000));
306 aiResponse = await makeAIRequest(config, updatedConversation);
307 }
308
309 if (!aiResponse && hasImages) {
310 logger.warn(`Image model ${selectedModel} failed, falling back to text-only model`);
311
312 let fallbackContent = message.content;
313 if (Array.isArray(messageContent)) {
314 const textParts = messageContent
315 .filter((item) => item.type === 'text')
316 .map((item) => item.text)
317 .filter((text) => text && text.trim());
318
319 const imageParts = messageContent
320 .filter((item) => item.type === 'image_url')
321 .map((item) => `[Image: ${item.image_url?.url}]`);
322
323 fallbackContent =
324 [...textParts, ...imageParts].join(' ') ||
325 `[Message contained images that could not be processed: ${message.content}]`;
326 }
327
328 const cleanedConversation = conversation.map((msg) => {
329 if (Array.isArray(msg.content)) {
330 const textContent = msg.content
331 .filter((item) => item.type === 'text')
332 .map((item) => item.text)
333 .join(' ');
334 return { ...msg, content: textContent || '[Image content]' };
335 }
336 return msg;
337 });
338
339 const fallbackModel =
340 isDM && !usingDefaultKey
341 ? 'moonshotai/kimi-k2'
342 : isDM
343 ? 'moonshotai/kimi-k2'
344 : 'google/gemini-2.5-flash-lite';
345
346 const fallbackConversation = buildConversation(
347 cleanedConversation,
348 fallbackContent,
349 buildSystemPrompt(
350 usingDefaultKey,
351 this.client,
352 fallbackModel,
353 message.author.username,
354 undefined,
355 !isDM,
356 !isDM ? message.guild?.name : undefined,
357 ),
358 );
359
360 const fallbackConfig = isDM ? config : getApiConfiguration(null, fallbackModel, null);
361 aiResponse = await makeAIRequest(fallbackConfig, fallbackConversation);
362
363 if (aiResponse) {
364 logger.info('Successfully processed message with fallback text-only model');
365 }
366 }
367
368 if (!aiResponse) {
369 await message.reply({
370 content: 'Sorry, I encountered an error processing your message. Please try again later.',
371 allowedMentions: { parse: ['users'] as const },
372 });
373 return;
374 }
375
376 aiResponse.content = processUrls(aiResponse.content);
377 aiResponse.content = aiResponse.content.replace(/@(everyone|here)/gi, '@\u200b$1');
378
379 const { getUnallowedWordCategory } = await import('@/utils/validation');
380 const category = getUnallowedWordCategory(aiResponse.content);
381 if (category) {
382 logger.warn(`AI response contained unallowed words in category: ${category}`);
383 await message.reply({
384 content:
385 'Sorry, I cannot provide that response as it contains prohibited content. Please try a different prompt.',
386 allowedMentions: { parse: ['users'] as const },
387 });
388 return;
389 }
390
391 await this.sendResponse(message, aiResponse);
392
393 const userMessage: ConversationMessage = {
394 role: 'user',
395 content: messageContent,
396 username: message.author.username,
397 };
398 const assistantMessage: ConversationMessage = {
399 role: 'assistant',
400 content: aiResponse.content,
401 };
402
403 if (isDM) {
404 const conversationKey = getUserConversationKey(message.author.id);
405 const newConversation = [...filteredConversation, userMessage, assistantMessage];
406 userConversations.set(conversationKey, newConversation);
407 } else {
408 const serverKey = getServerConversationKey(message.guildId!);
409 const serverConversation = serverConversations.get(serverKey) || [];
410 const newServerConversation = [...serverConversation, userMessage, assistantMessage];
411 serverConversations.set(serverKey, newServerConversation);
412 }
413
414 logger.info(`${isDM ? 'DM' : 'Server'} response sent successfully`);
415 } catch (error) {
416 logger.error(
417 `Error processing ${isDM ? 'DM' : 'server message'}:`,
418 error instanceof Error ? error.message : String(error),
419 );
420 try {
421 await message.reply(
422 'Sorry, I encountered an error processing your message. Please try again later.',
423 );
424 } catch (replyError) {
425 logger.error('Failed to send error message:', replyError);
426 }
427 }
428 }
429
430 private async sendResponse(message: Message, aiResponse: AIResponse): Promise<void> {
431 let fullResponse = '';
432
433 if (aiResponse.reasoning) {
434 fullResponse += `> ${aiResponse.reasoning}\n\n`;
435 }
436
437 fullResponse += aiResponse.content;
438
439 const maxLength = 2000;
440 if (fullResponse.length <= maxLength) {
441 await message.reply({
442 content: fullResponse,
443 allowedMentions: { parse: ['users'] as const },
444 });
445 } else {
446 const chunks = splitResponseIntoChunks(fullResponse, maxLength);
447
448 await message.reply({ content: chunks[0], allowedMentions: { parse: ['users'] as const } });
449
450 for (let i = 1; i < chunks.length; i++) {
451 if ('send' in message.channel) {
452 await message.channel.send({
453 content: chunks[i],
454 allowedMentions: { parse: ['users'] as const },
455 });
456 }
457 }
458 }
459 }
460}