A chill Bluesky bot, with responses powered by Gemini.
1import {
2 isAuthorizedUser,
3 logInteraction,
4 getRecentInteractions,
5} from "../utils/interactions";
6import * as threadUtils from "../utils/thread";
7import modelPrompt from "../model/prompt.txt";
8import { GoogleGenAI } from "@google/genai";
9import { type Post } from "@skyware/bot";
10import * as c from "../constants";
11import * as tools from "../tools";
12import consola from "consola";
13import { env } from "../env";
14import { MemoryHandler } from "../utils/memory";
15import * as yaml from "js-yaml";
16
17const logger = consola.withTag("Post Handler");
18
19type SupportedFunctionCall = typeof c.SUPPORTED_FUNCTION_CALLS[number];
20
21async function generateAIResponse(memory: string, parsedThread: string) {
22 const genai = new GoogleGenAI({
23 apiKey: env.GEMINI_API_KEY,
24 });
25
26 const config = {
27 model: env.GEMINI_MODEL,
28 config: {
29 tools: tools.declarations,
30 },
31 };
32
33 const contents = [
34 {
35 role: "model" as const,
36 parts: [
37 {
38 /*
39 ? Once memory blocks are working, this will pull the prompt from the database, and the prompt will be
40 ? automatically initialized with the administrator's handle from the env variables. I only did this so
41 ? that if anybody runs the code themselves, they just have to edit the env variables, nothing else.
42 */
43 text: modelPrompt
44 .replace("{{ administrator }}", env.ADMIN_HANDLE)
45 .replace("{{ handle }}", env.HANDLE),
46 },
47 {
48 text: memory,
49 },
50 ],
51 },
52 {
53 role: "user" as const,
54 parts: [
55 {
56 text: `below is the yaml for the current thread. your job is to respond to the last message.
57
58${parsedThread}`,
59 },
60 ],
61 },
62 ];
63
64 let inference = await genai.models.generateContent({
65 ...config,
66 contents,
67 });
68
69 logger.log(
70 `Initial inference took ${inference.usageMetadata?.totalTokenCount} tokens`,
71 );
72
73 if (inference.functionCalls && inference.functionCalls.length > 0) {
74 const call = inference.functionCalls[0];
75
76 if (
77 call &&
78 c.SUPPORTED_FUNCTION_CALLS.includes(
79 call.name as SupportedFunctionCall,
80 )
81 ) {
82 logger.log("Function called invoked:", call.name);
83
84 const functionResponse = await tools.handler(
85 call as typeof call & { name: SupportedFunctionCall },
86 );
87
88 logger.log("Function response:", functionResponse);
89
90 //@ts-ignore
91 contents.push(inference.candidates[0]?.content!);
92
93 contents.push({
94 role: "user" as const,
95 parts: [{
96 //@ts-ignore
97 functionResponse: {
98 name: call.name as string,
99 response: { res: functionResponse },
100 },
101 }],
102 });
103
104 inference = await genai.models.generateContent({
105 ...config,
106 contents,
107 });
108 }
109 }
110
111 return inference;
112}
113
114async function sendResponse(post: Post, text: string): Promise<void> {
115 post.like();
116
117 if (threadUtils.exceedsGraphemes(text)) {
118 threadUtils.multipartResponse(text, post);
119 } else {
120 post.reply({
121 text,
122 tags: c.TAGS,
123 });
124 }
125}
126
127export async function handler(post: Post): Promise<void> {
128 try {
129 if (!isAuthorizedUser(post.author.did)) {
130 await post.reply({
131 text: c.UNAUTHORIZED_MESSAGE,
132 tags: c.TAGS,
133 });
134 return;
135 }
136
137 const isMuted = await threadUtils.isThreadMuted(post);
138 if (isMuted) {
139 logger.warn("Thread is muted.");
140 await logInteraction(post, {
141 responseText: null,
142 wasMuted: true,
143 });
144 return;
145 }
146
147 const thread = await threadUtils.traverseThread(post);
148 const parsedThread = threadUtils.parseThread(thread);
149 logger.success("Generated thread context:", parsedThread);
150
151 const botMemory = new MemoryHandler(
152 env.DID,
153 await MemoryHandler.getBlocks(env.DID),
154 );
155 const userMemory = new MemoryHandler(
156 post.author.did,
157 await MemoryHandler.getBlocks(post.author.did),
158 );
159
160 const recentInteractions = await getRecentInteractions(
161 post.author.did,
162 thread,
163 );
164
165 const memory = yaml.dump({
166 users_with_memory_blocks: {
167 [env.HANDLE]: botMemory.parseBlocks(),
168 [post.author.handle]: userMemory.parseBlocks(),
169 },
170 recent_interactions: recentInteractions,
171 });
172
173 logger.log("Parsed memory blocks: ", memory);
174
175 const inference = await generateAIResponse(memory, parsedThread);
176 logger.success("Generated text:", inference.text);
177
178 const responseText = inference.text;
179 if (responseText) {
180 await sendResponse(post, responseText);
181 }
182
183 await logInteraction(post, {
184 responseText: responseText ?? null,
185 wasMuted: false,
186 });
187 } catch (error) {
188 logger.error("Error in post handler:", error);
189
190 await post.reply({
191 text:
192 "aw, shucks, something went wrong! gonna take a quick nap and try again later. 😴",
193 tags: c.TAGS,
194 });
195 }
196}