const std = @import("std"); const Makko = @import("Makko.zig"); const Callbacks = @import("Callbacks.zig"); const Data = @import("Data.zig"); const Parsed = @import("Parsed.zig"); const helper = @import("helper.zig"); const MAX_FILE_SIZE = 1024 * 1024 * 1024 * 9; const Pass = @This(); const ChangeList = std.ArrayList(Callbacks.Change); parent: *Makko, changes: ChangeList, changes_mutex: std.Thread.Mutex = .{}, progress: std.Progress.Node, pub fn init(self: *Makko) Pass { return Pass{ .parent = self, .changes = ChangeList.init(self.allocator), .progress = self.progress.start("Pass", 1), }; } pub fn deinit(pass: *Pass) void { for (pass.changes.items) |changes| changes.deinit(pass.parent.allocator); pass.changes.deinit(); pass.progress.end(); } fn overlapping(state: Makko, path: []const u8) bool { const Feeds = @import("FeedGenerator.zig"); inline for (Feeds.generator_list) |generator| { const feed_path = @field(state.origin.value.feeds, generator.name); if (feed_path) |p| if (std.mem.eql(u8, p, path)) return true; } return if (state.paths.source.access(path, .{})) true else |_| false; } fn getURL(file_path: []const u8, allocator: std.mem.Allocator) ![]const u8 { const ext_size = std.fs.path.extension(file_path).len; return try std.mem.concat(allocator, u8, &.{ file_path[0 .. file_path.len - ext_size], ".html", }); } pub fn processFile( pass: *Pass, file: []const u8, ) !?Parsed.Visibility { const parent = pass.parent; // TODO: maybe get rid of the thread_safe_allocator? const allocator = parent.thread_safe_allocator.allocator(); const paths = parent.paths; var data_copy = parent.data; data_copy.pass.feed_template = 0; const file_node = pass.progress.start(file, 3); defer file_node.end(); // FIXME //if (overlapping(parent.*, file)) // return error.OverlappingFiles; ///////////////////////////////////////////////////////////////////// const parse_progress = file_node.start("Parse", 1); const f_n = Parsed.fromFile( paths.source, file, allocator, data_copy, ) catch |err| { parent.log.err("Cannot process {s}! ({})", .{ file, err }); return err; }; const f = f_n orelse { const copy_progress = file_node.start("Copy", 1); defer copy_progress.end(); parent.paths.output.deleteFile(file) catch {}; if (parent.symlinks_enabled) { const source = try paths.source.realpathAlloc( allocator, file, ); defer allocator.free(source); const output_dir = try paths.output.realpathAlloc( allocator, ".", ); defer allocator.free(output_dir); const output = try std.fs.path.join( allocator, &.{ output_dir, file }, ); defer allocator.free(output); try helper.relativeSymlink(source, output, allocator); return null; } try paths.source.copyFile( file, paths.output, file, .{}, ); return null; }; defer f.deinit(); defer parse_progress.end(); ///////////////////////////////////////////////////////////////////// if (f.visibility == .draft) return f.visibility; const url = try getURL(file, allocator); defer allocator.free(url); const body = f.html orelse f.body; data_copy.post = Data.Post{ .id = f.id, .title = f.title, .description = f.description, .author = f.author, .tags = f.tags, .created = Data.timeFromTimestamp(f.created), .updated = Data.empty_time, .is_secret = f.visibility == .secret, .source = file, .url = url, .body = body, }; { parent.processed_posts_mutex.lock(); defer parent.processed_posts_mutex.unlock(); try parent.processed_posts.put( f.id, try allocator.dupe(u8, file), ); } ///////////////////////////////////////////////////////////////////// const hasher_progress = file_node.start("Hash", 1); const hash = blk: { const json_data = try helper.valueToJson( allocator, data_copy, ); defer json_data.deinit(); break :blk helper.hashJsonMap(json_data.value.object); }; var status: Callbacks.Status = .unchanged; { parent.hashes_mutex.lock(); defer parent.hashes_mutex.unlock(); const now = Data.timeFromTimestamp(std.time.milliTimestamp()); if (parent.hashes.getPtr(f.id)) |ptr| { if (ptr.* != hash) { status = .modified; ptr.* = hash; data_copy.post.?.updated = now; } } else { status = .created; data_copy.post.?.created = now; data_copy.post.?.updated = now; try parent.hashes.put(f.id, hash); } } hasher_progress.end(); // CRITICAL TODO: IMPLEMENT CHANGES ///////////////////////////////////////////////////////////////////// const writer_progress = file_node.start("Write", 1); defer writer_progress.end(); var should_generate_origin = !helper.exists(parent.paths.output, file); var should_generate_output = !helper.exists(parent.paths.output, url); if (status != .unchanged) { pass.changes_mutex.lock(); defer pass.changes_mutex.unlock(); const real_source = try parent.paths.source.realpathAlloc(allocator, file); defer allocator.free(real_source); const pre_real_output = try parent.paths.output.realpathAlloc(allocator, "."); defer allocator.free(pre_real_output); const real_output = try std.fs.path.join(allocator, &.{ pre_real_output, url, }); defer allocator.free(real_output); const change: Callbacks.Change = .{ .status = status, .id = f.id, .source = real_source, .output = real_output, .title = f.title, .description = f.description, .author = f.author, .timestamp = data_copy.post.?.updated.raw, }; should_generate_origin = true; should_generate_output = true; try pass.changes.append(try change.copy(allocator)); } if (should_generate_origin) try parent.paths.output.writeFile(.{ .sub_path = file, .data = f.body, }); if (should_generate_output) { const json_data = try helper.valueToJson( allocator, data_copy, ); defer json_data.deinit(); const second_pass = try helper.template( allocator, parent.templates.post.?, json_data.value, ); defer allocator.free(second_pass); try parent.paths.output.writeFile(.{ .sub_path = url, .data = second_pass, }); } if (f.visibility == .public) { parent.posts_mutex.lock(); defer parent.posts_mutex.unlock(); const arena = parent.arena.allocator(); const new_post = data_copy.post.?; const posts = parent.posts.items; for (0..posts.len) |i| { const post = posts[i]; if (post.id == new_post.id) { Data.freePost(post, arena); posts[i] = try Data.copyPost(new_post, arena); return f.visibility; } } try parent.posts.append( try Data.copyPost( new_post, arena, ), ); } return f.visibility; } pub fn markDeletedById(pass: *Pass, id: Data.Id) !void { const parent = pass.parent; const allocator = parent.allocator; { parent.hashes_mutex.lock(); defer parent.hashes_mutex.unlock(); _ = parent.hashes.remove(id); } { parent.processed_posts_mutex.lock(); defer parent.processed_posts_mutex.unlock(); _ = parent.processed_posts.swapRemove(id); } { pass.changes_mutex.lock(); defer pass.changes_mutex.unlock(); var source: ?[]const u8 = null; if (pass.parent.processed_posts.get(id)) |path| { source = try allocator.dupe(u8, path); } try pass.changes.append(.{ .timestamp = std.time.milliTimestamp(), .status = .deleted, .id = id, .source = source, }); } { parent.posts_mutex.lock(); defer parent.posts_mutex.unlock(); const posts = parent.posts.items; for (0..posts.len) |i| { const post = posts[i]; if (post.id == id) { Data.freePost( parent.posts.swapRemove(i), parent.arena.allocator(), ); break; } } } } // Assumes file has already been parsed at least once per Makko state. pub fn deleteFileByPath(pass: *Pass, path: []const u8) !void { const parent = pass.parent; const paths = parent.paths; const file_node = pass.progress.start(path, 3); defer file_node.end(); const processed = parent.processed_posts; paths.output.deleteTree(path) catch {}; var id: Data.Id = 0; { var maybe_id: ?Data.Id = null; var iter = processed.iterator(); while (iter.next()) |entry| { if (std.mem.eql(u8, entry.value_ptr.*, path)) maybe_id = entry.key_ptr.*; } id = maybe_id orelse return; const url = try getURL(path, parent.allocator); defer parent.allocator.free(url); paths.output.deleteTree(url) catch {}; } try pass.markDeletedById(id); } pub fn printChanges(pass: *Pass) !void { if (pass.changes.items.len == 0) return; const allocator = pass.parent.allocator; const log = pass.parent.log; log.header("Changes"); const source_path = try pass.parent.paths.source.realpathAlloc(allocator, "."); defer allocator.free(source_path); for (pass.changes.items) |item| { if (item.source) |src| { log.raw("- '{s}' ", .{src[source_path.len + 1 ..]}); } else log.raw("- 'id: {}' ", .{item.id}); log.raw("[{s}]\n", .{@tagName(item.status)}); } } pub fn runCallbacks(pass: *Pass) !void { const parent = pass.parent; const changes = try pass.changes.toOwnedSlice(); defer { for (changes) |change| change.deinit(parent.allocator); pass.changes.allocator.free(changes); } try parent.origin.value.callbacks.run( changes, pass.parent.paths.root, pass.parent.log, parent.allocator, ); }