A chill Bluesky bot, with responses powered by Gemini.
1import {
2 isAuthorizedUser,
3 logInteraction,
4 getRecentInteractions,
5} from "../utils/interactions";
6import * as threadUtils from "../utils/thread";
7import modelPrompt from "../model/prompt.txt";
8import { GoogleGenAI } from "@google/genai";
9import { type Post } from "@skyware/bot";
10import * as c from "../constants";
11import * as tools from "../tools";
12import consola from "consola";
13import { env } from "../env";
14import { MemoryHandler } from "../utils/memory";
15import * as yaml from "js-yaml";
16
17const logger = consola.withTag("Post Handler");
18
19type SupportedFunctionCall = typeof c.SUPPORTED_FUNCTION_CALLS[number];
20
21async function generateAIResponse(memory: string, parsedThread: string) {
22 const genai = new GoogleGenAI({
23 apiKey: env.GEMINI_API_KEY,
24 });
25
26 const config = {
27 model: env.GEMINI_MODEL,
28 config: {
29 tools: tools.declarations,
30 },
31 };
32
33 const contents = [
34 {
35 role: "model" as const,
36 parts: [
37 {
38 /*
39 ? Once memory blocks are working, this will pull the prompt from the database, and the prompt will be
40 ? automatically initialized with the administrator's handle from the env variables. I only did this so
41 ? that if anybody runs the code themselves, they just have to edit the env variables, nothing else.
42 */
43 text: modelPrompt.replace(
44 "{{ administrator }}",
45 env.ADMIN_HANDLE,
46 ),
47 },
48 {
49 text: memory,
50 },
51 ],
52 },
53 {
54 role: "user" as const,
55 parts: [
56 {
57 text:
58 `This is the thread. The top replies are older, the bottom replies are newer.
59 ${parsedThread}`,
60 },
61 ],
62 },
63 ];
64
65 let inference = await genai.models.generateContent({
66 ...config,
67 contents,
68 });
69
70 logger.log(
71 `Initial inference took ${inference.usageMetadata?.totalTokenCount} tokens`,
72 );
73
74 if (inference.functionCalls && inference.functionCalls.length > 0) {
75 const call = inference.functionCalls[0];
76
77 if (
78 call &&
79 c.SUPPORTED_FUNCTION_CALLS.includes(
80 call.name as SupportedFunctionCall,
81 )
82 ) {
83 logger.log("Function called invoked:", call.name);
84
85 const functionResponse = await tools.handler(
86 call as typeof call & { name: SupportedFunctionCall },
87 );
88
89 logger.log("Function response:", functionResponse);
90
91 //@ts-ignore
92 contents.push(inference.candidates[0]?.content!);
93
94 contents.push({
95 role: "user" as const,
96 parts: [{
97 //@ts-ignore
98 functionResponse: {
99 name: call.name as string,
100 response: { res: functionResponse },
101 },
102 }],
103 });
104
105 inference = await genai.models.generateContent({
106 ...config,
107 contents,
108 });
109 }
110 }
111
112 return inference;
113}
114
115async function sendResponse(post: Post, text: string): Promise<void> {
116 post.like();
117
118 if (threadUtils.exceedsGraphemes(text)) {
119 threadUtils.multipartResponse(text, post);
120 } else {
121 post.reply({
122 text,
123 tags: c.TAGS,
124 });
125 }
126}
127
128export async function handler(post: Post): Promise<void> {
129 try {
130 if (!isAuthorizedUser(post.author.did)) {
131 await post.reply({
132 text: c.UNAUTHORIZED_MESSAGE,
133 tags: c.TAGS,
134 });
135 return;
136 }
137
138 const isMuted = await threadUtils.isThreadMuted(post);
139 if (isMuted) {
140 logger.warn("Thread is muted.");
141 await logInteraction(post, {
142 responseText: null,
143 wasMuted: true,
144 });
145 return;
146 }
147
148 const thread = await threadUtils.traverseThread(post);
149 const parsedThread = threadUtils.parseThread(thread);
150 logger.success("Generated thread context:", parsedThread);
151
152 const botMemory = new MemoryHandler(
153 env.DID,
154 await MemoryHandler.getBlocks(env.DID),
155 );
156 const userMemory = new MemoryHandler(
157 post.author.did,
158 await MemoryHandler.getBlocks(post.author.did),
159 );
160
161 const recentInteractions = await getRecentInteractions(
162 post.author.did,
163 thread,
164 );
165
166 const memory = yaml.dump({
167 users_with_memory_blocks: {
168 [env.HANDLE]: botMemory.parseBlocks(),
169 [post.author.handle]: userMemory.parseBlocks(),
170 },
171 recent_interactions: recentInteractions,
172 });
173
174 logger.log("Parsed memory blocks: ", memory);
175
176 const inference = await generateAIResponse(memory, parsedThread);
177 logger.success("Generated text:", inference.text);
178
179 const responseText = inference.text;
180 if (responseText) {
181 await sendResponse(post, responseText);
182 }
183
184 await logInteraction(post, {
185 responseText: responseText ?? null,
186 wasMuted: false,
187 });
188 } catch (error) {
189 logger.error("Error in post handler:", error);
190
191 await post.reply({
192 text:
193 "aw, shucks, something went wrong! gonna take a quick nap and try again later. 😴",
194 tags: c.TAGS,
195 });
196 }
197}