Schedule posts to Bluesky with Cloudflare workers.
skyscheduler.work
cf
tool
bsky-tool
cloudflare
bluesky
schedule
bsky
service
social-media
cloudflare-workers
1import { Context } from 'hono';
2import { imageDimensionsFromStream } from 'image-dimensions';
3import { v4 as uuidv4 } from 'uuid';
4import {
5 BSKY_GIF_MIME_TYPES,
6 BSKY_IMG_MIME_TYPES,
7 BSKY_IMG_SIZE_LIMIT,
8 BSKY_VIDEO_MIME_TYPES,
9 BSKY_VIDEO_SIZE_LIMIT,
10 CF_IMAGES_FILE_SIZE_LIMIT,
11 CF_IMAGES_FILE_SIZE_LIMIT_IN_MB,
12 CF_IMAGES_MAX_DIMENSION,
13 GIF_UPLOAD_ALLOWED,
14 MB_TO_BYTES,
15 R2_FILE_SIZE_LIMIT,
16 R2_FILE_SIZE_LIMIT_IN_MB
17} from "../limits";
18import { AllContext, EmbedData, EmbedDataType, R2BucketObject } from '../types';
19import { addFileListing, deleteFileListings } from './db/file';
20
21type FileMetaData = {
22 name: string,
23 size: number,
24 user: string,
25 type: string,
26 qualityLevel?: number;
27};
28
29export const deleteEmbedsFromR2 = async (c: AllContext, embeds: EmbedData[]|undefined, isQueued: boolean=false) => {
30 let itemsToDelete:string[] = [];
31
32 if (embeds !== undefined && embeds.length > 0) {
33 embeds.forEach((data) => {
34 // We don't store any data locally for weblinks/records
35 if (data.type === EmbedDataType.Image || data.type === EmbedDataType.Video) {
36 console.log(`Pushing ${data.content} for deletion...`);
37 itemsToDelete.push(data.content.toLowerCase());
38 }
39 });
40 await deleteFromR2(c, itemsToDelete, isQueued);
41 }
42 return itemsToDelete;
43};
44
45export const deleteFromR2 = async (c: AllContext, embeds: string[]|string, isQueued: boolean=false) => {
46 if (embeds.length <= 0)
47 return;
48
49 console.log(`Deleting ${embeds}`);
50 const killFilesPromise = c.env.R2.delete(embeds);
51 const deleteFileListingPromise = deleteFileListings(c, embeds);
52 if (isQueued) {
53 await killFilesPromise;
54 await deleteFileListingPromise;
55 } else {
56 c.executionCtx.waitUntil(killFilesPromise);
57 c.executionCtx.waitUntil(deleteFileListingPromise);
58 }
59};
60
61const rawUploadToR2 = async (c: AllContext, buffer: ArrayBuffer|ReadableStream, metaData: FileMetaData) => {
62 const fileExt:string|undefined = metaData.name.split(".").pop();
63 if (fileExt === undefined) {
64 return {"success": false, "error": "unable to upload, file name is invalid"};
65 }
66
67 const fileName = `${uuidv4()}.${fileExt.toLowerCase()}`;
68 const R2UploadRes = await c.env.R2.put(fileName, buffer, {
69 customMetadata: {"user": metaData.user, "type": metaData.type }
70 });
71 if (R2UploadRes) {
72 await addFileListing(c, fileName, metaData.user);
73 return {"success": true, "data": R2UploadRes.key,
74 "originalName": metaData.name, "fileSize": metaData.size,
75 "qualityLevel": metaData.qualityLevel};
76 } else {
77 return {"success": false, "error": "unable to push to file storage"};
78 }
79};
80
81const uploadImageToR2 = async(c: Context, file: File, userId: string) => {
82 const originalName = file.name;
83 // The maximum size of CF Image transforms.
84 if (file.size > CF_IMAGES_FILE_SIZE_LIMIT) {
85 return {"success": false, "error": `An image has a maximum file size of ${CF_IMAGES_FILE_SIZE_LIMIT_IN_MB}MB`};
86 }
87
88 // We need to double check this image for various size information.
89 const imageMetaData = await imageDimensionsFromStream(file.stream());
90 if (imageMetaData === undefined) {
91 return {"success": false, "error": "image data could not be processed "}
92 }
93
94 // if the image is over the cf image transforms, then return an error.
95 if (imageMetaData.width > CF_IMAGES_MAX_DIMENSION || imageMetaData.height > CF_IMAGES_MAX_DIMENSION) {
96 return {"success": false, "error": "image dimensions are too large to autosize. make sure your files fit the limits."};
97 }
98
99 // If the image is over any bsky limits, we will need to resize it
100 let finalFileSize: number = file.size;
101 // final quality level
102 let finalQualityLevel: number = 100;
103
104 // The file we'll eventually upload to R2 (this object will change based on compression/resizes)
105 let fileToProcess: ArrayBuffer|ReadableStream|null = null;
106
107 if (file.size > BSKY_IMG_SIZE_LIMIT) {
108 let failedToResize = true;
109
110 if (c.env.IMAGE_SETTINGS.enabled) {
111 // Randomly generated id to be used during the resize process
112 const resizeFilename = uuidv4();
113 const resizeBucketPush = await c.env.R2RESIZE.put(resizeFilename, await file.bytes(), {
114 customMetadata: {"user": userId },
115 httpMetadata: { contentType: file.type }
116 });
117
118 if (!resizeBucketPush) {
119 console.error(`Failed to push ${file.name} to the resizing bucket`);
120 return {"success": false, "error": "resize process ran out of disk space, you'll need to resize the image or try again"};
121 }
122
123 // Default image rules for resizing an image
124 const imageRules: RequestInitCfPropertiesImage = {
125 fit: "scale-down",
126 metadata: "copyright",
127 format: "jpeg",
128 };
129
130 // if the application is to also resize the width automatically, do so here.
131 // this will preserve aspect ratio
132 if (c.env.IMAGE_SETTINGS.max_width) {
133 imageRules.width = c.env.IMAGE_SETTINGS.max_width;
134 }
135
136 for (var i = 0; i < c.env.IMAGE_SETTINGS.steps.length; ++i) {
137 const qualityLevel: number = c.env.IMAGE_SETTINGS.steps[i];
138 const response = await fetch(new URL(resizeFilename, c.env.IMAGE_SETTINGS.bucket_url!), {
139 headers: {
140 "x-skyscheduler-helper": c.env.RESIZE_SECRET_HEADER
141 },
142 cf: {
143 image: {
144 quality: qualityLevel,
145 ...imageRules
146 }
147 }
148 });
149 if (response.ok) {
150 const resizedHeader = response.headers.get("Cf-Resized");
151 const returnType = response.headers.get("Content-Type") || "";
152 const transformFileSize: number = Number(response.headers.get("Content-Length")) || 0;
153 const resizeHadError: boolean = (resizedHeader === null || resizedHeader.indexOf("err=") !== -1);
154
155 if (!resizeHadError && BSKY_IMG_MIME_TYPES.includes(returnType)) {
156 console.log(`Attempting quality level ${qualityLevel}% for ${originalName}, size: ${transformFileSize}`);
157
158 // If we make the file size less than the actual limit
159 if (transformFileSize < BSKY_IMG_SIZE_LIMIT && transformFileSize !== 0) {
160 console.log(`${originalName}: Quality level ${qualityLevel}% processed, fits correctly with size.`);
161 failedToResize = false;
162 // Set some extra variables
163 finalQualityLevel = qualityLevel;
164 finalFileSize = transformFileSize;
165 fileToProcess = response.body;
166 break;
167 } else {
168 if (transformFileSize !== 0) {
169 // Print how over the image was if we cannot properly resize it
170 console.log(`${originalName}: file size ${transformFileSize - BSKY_IMG_SIZE_LIMIT} over the appropriate size`);
171 }
172
173 // dispose the body
174 await response.body?.cancel();
175 }
176 } else {
177 console.warn(`File ${file.name} was not handled ${response.statusText}`);
178 }
179 } else {
180 console.error(`image tfs got error: ${response.statusText}`);
181 }
182 }
183 // Delete the file from the resize bucket.
184 c.executionCtx.waitUntil(c.env.R2RESIZE.delete(resizeFilename));
185 }
186
187 if (failedToResize) {
188 const fileSizeOverAmount: string = ((file.size - BSKY_IMG_SIZE_LIMIT)/MB_TO_BYTES).toFixed(2);
189 return {"success": false, "originalName": originalName, "error": `Image is too large for BSky, size is over by ${fileSizeOverAmount}MB`};
190 }
191 }
192
193 const fileMetaData: FileMetaData = {
194 name: originalName,
195 size: finalFileSize,
196 user: userId,
197 type: file.type,
198 qualityLevel: finalQualityLevel
199 };
200
201 if (fileToProcess === null)
202 fileToProcess = await file.arrayBuffer();
203 return await rawUploadToR2(c, fileToProcess, fileMetaData);
204};
205
206const uploadVideoToR2 = async (c: Context, file: File, userId: string) => {
207 // Technically this will never hit because it is greater than our own internal limits
208 if (file.size > BSKY_VIDEO_SIZE_LIMIT) {
209 return {"success": false, "error": `max video size is ${BSKY_VIDEO_SIZE_LIMIT}MB`};
210 }
211
212 const fileMetaData: FileMetaData = {
213 name: file.name,
214 size: file.size,
215 type: file.type,
216 user: userId
217 };
218 return await rawUploadToR2(c, await file.stream(), fileMetaData);
219};
220
221export const uploadFileR2 = async (c: Context, file: File, userId: string) => {
222 if (!(file instanceof File)) {
223 console.warn("Failed to upload");
224 return {"success": false, "error": "data invalid"};
225 }
226
227 // Invalid file size check
228 if (file.size <= 0) {
229 return {"success": false, "error": "file had an invalid file size"};
230 }
231
232 // The file size limit for R2 before having to do a multipart upload.
233 if (file.size > R2_FILE_SIZE_LIMIT) {
234 return {"success": false, "error": `max file size is ${R2_FILE_SIZE_LIMIT_IN_MB}MB`};
235 }
236
237 const fileType: string = file.type.toLowerCase();
238 if (BSKY_IMG_MIME_TYPES.includes(fileType)) {
239 return await uploadImageToR2(c, file, userId);
240 } else if (BSKY_VIDEO_MIME_TYPES.includes(fileType)) {
241 return await uploadVideoToR2(c, file, userId);
242 } else if (GIF_UPLOAD_ALLOWED && BSKY_GIF_MIME_TYPES.includes(fileType)) {
243 // TODO: modify this in the future to transform the image to a webm
244 // then push to uploadVideo
245 return await uploadVideoToR2(c, file, userId);
246 }
247 return {"success": false, "error": "unable to push to R2"};
248};
249
250export const getAllFilesList = async (c: AllContext) => {
251 let options: R2ListOptions = {
252 limit: 1000,
253 include: ["customMetadata"]
254 };
255 let values:R2BucketObject[] = [];
256
257 while (true) {
258 const response = await c.env.R2.list(options);
259 for (const file of response.objects) {
260 values.push({
261 name: file.key,
262 user: file.customMetadata?.user || null,
263 date: file.uploaded
264 });
265 }
266
267 if (response.truncated)
268 options.cursor = response.cursor;
269 else
270 break;
271 }
272 return values;
273};