A chill Bluesky bot, with responses powered by Gemini.
1import {
2 isAuthorizedUser,
3 logInteraction,
4 getRecentInteractions,
5} from "../utils/interactions";
6import * as threadUtils from "../utils/thread";
7import modelPrompt from "../model/prompt.txt";
8import { GoogleGenAI } from "@google/genai";
9import { type Post } from "@skyware/bot";
10import * as c from "../constants";
11import * as tools from "../tools";
12import consola from "consola";
13import { env } from "../env";
14import { MemoryHandler } from "../utils/memory";
15import * as yaml from "js-yaml";
16
17const logger = consola.withTag("Post Handler");
18
19type SupportedFunctionCall = typeof c.SUPPORTED_FUNCTION_CALLS[number];
20
21async function generateAIResponse(post: Post, memory: string, parsedThread: string) {
22 const genai = new GoogleGenAI({
23 apiKey: env.GEMINI_API_KEY,
24 });
25
26 const config = {
27 model: env.GEMINI_MODEL,
28 config: {
29 tools: tools.declarations,
30 },
31 };
32
33 const contents = [
34 {
35 role: "model" as const,
36 parts: [
37 {
38 text: `${modelPrompt
39 .replace("{{ administrator }}", env.ADMIN_HANDLE)
40 .replace("{{ handle }}", env.HANDLE)}\n\n${memory}`,
41 },
42 ],
43 },
44 {
45 role: "user" as const,
46 parts: [
47 {
48 text: `below is the yaml for the current thread. your job is to respond to the last message.
49
50${parsedThread}`,
51 },
52 ],
53 },
54 ];
55
56 let inference = await genai.models.generateContent({
57 ...config,
58 contents,
59 });
60
61 logger.log(
62 `Initial inference took ${inference.usageMetadata?.totalTokenCount} tokens`,
63 );
64
65 if (inference.functionCalls && inference.functionCalls.length > 0) {
66 const call = inference.functionCalls[0];
67
68 if (
69 call &&
70 c.SUPPORTED_FUNCTION_CALLS.includes(
71 call.name as SupportedFunctionCall,
72 )
73 ) {
74 logger.log("Function call invoked:", call.name);
75 logger.log("Function call arguments:", call.args);
76
77 const functionResponse = await tools.handler(
78 call as typeof call & { name: SupportedFunctionCall },
79 post.author.did,
80 );
81
82 logger.log("Function response:", functionResponse);
83
84 //@ts-ignore
85 contents.push(inference.candidates[0]?.content!);
86
87 contents.push({
88 role: "user" as const,
89 parts: [{
90 //@ts-ignore
91 functionResponse: {
92 name: call.name as string,
93 response: functionResponse,
94 },
95 }],
96 });
97
98 inference = await genai.models.generateContent({
99 ...config,
100 contents,
101 });
102 }
103 }
104
105 return inference;
106}
107
108async function sendResponse(post: Post, text: string): Promise<void> {
109 post.like();
110
111 if (threadUtils.exceedsGraphemes(text)) {
112 threadUtils.multipartResponse(text, post);
113 } else {
114 post.reply({
115 text,
116 tags: c.TAGS,
117 });
118 }
119}
120
121export async function handler(post: Post): Promise<void> {
122 try {
123 if (!isAuthorizedUser(post.author.did)) {
124 await post.reply({
125 text: c.UNAUTHORIZED_MESSAGE,
126 tags: c.TAGS,
127 });
128 return;
129 }
130
131 const isMuted = await threadUtils.isThreadMuted(post);
132 if (isMuted) {
133 logger.warn("Thread is muted.");
134 await logInteraction(post, {
135 responseText: null,
136 wasMuted: true,
137 });
138 return;
139 }
140
141 const thread = await threadUtils.traverseThread(post);
142 const parsedThread = threadUtils.parseThread(thread);
143 logger.success("Generated thread context:", parsedThread);
144
145 const botMemory = new MemoryHandler(
146 env.DID,
147 await MemoryHandler.getBlocks(env.DID),
148 );
149 const userMemory = new MemoryHandler(
150 post.author.did,
151 await MemoryHandler.getBlocks(post.author.did),
152 );
153
154 const recentInteractions = await getRecentInteractions(
155 post.author.did,
156 thread,
157 );
158
159 const memory = yaml.dump({
160 users_with_memory_blocks: {
161 [env.HANDLE]: botMemory.parseBlocks(),
162 [post.author.handle]: userMemory.parseBlocks(),
163 },
164 recent_interactions: recentInteractions,
165 });
166
167 logger.log("Parsed memory blocks: ", memory);
168
169 const inference = await generateAIResponse(post, memory, parsedThread);
170 logger.success("Generated text:", inference.text);
171
172 const responseText = inference.text;
173 if (responseText) {
174 await sendResponse(post, responseText);
175 }
176
177 await logInteraction(post, {
178 responseText: responseText ?? null,
179 wasMuted: false,
180 });
181 } catch (error) {
182 logger.error("Error in post handler:", error);
183
184 await post.reply({
185 text:
186 "aw, shucks, something went wrong! gonna take a quick nap and try again later. 😴",
187 tags: c.TAGS,
188 });
189 }
190}