[mirror] Command-line application for uploading a site to a git-pages server

Perform streamed upload of site data.

This both speeds up the uploads and eliminates possibility of running
out of memory for large sites.

Changed files
+14 -10
+14 -10
main.go
··· 89 89 }) 90 90 } 91 91 92 - func archiveFS(root fs.FS) (result []byte, err error) { 93 - buffer := bytes.Buffer{} 94 - zstdWriter, _ := zstd.NewWriter(&buffer) 92 + func archiveFS(writer io.Writer, root fs.FS) (err error) { 93 + zstdWriter, _ := zstd.NewWriter(writer) 95 94 tarWriter := tar.NewWriter(zstdWriter) 96 95 err = tarWriter.AddFS(root) 97 96 if err != nil { ··· 105 104 if err != nil { 106 105 return 107 106 } 108 - result = buffer.Bytes() 109 107 return 110 108 } 111 109 ··· 184 182 } 185 183 } 186 184 187 - requestBody, err := archiveFS(uploadDirFS.FS()) 188 - if err != nil { 189 - fmt.Fprintf(os.Stderr, "error: %s\n", err) 190 - os.Exit(1) 191 - } 185 + // Stream archive data without ever loading the entire working set into RAM. 186 + reader, writer := io.Pipe() 187 + go func() { 188 + err = archiveFS(writer, uploadDirFS.FS()) 189 + if err != nil { 190 + fmt.Fprintf(os.Stderr, "error: %s\n", err) 191 + os.Exit(1) 192 + } 193 + writer.Close() 194 + }() 192 195 193 - request, err = http.NewRequest("PUT", siteURL.String(), bytes.NewReader(requestBody)) 196 + request, err = http.NewRequest("PUT", siteURL.String(), reader) 194 197 if err != nil { 195 198 fmt.Fprintf(os.Stderr, "error: %s\n", err) 196 199 os.Exit(1) 197 200 } 201 + request.ContentLength = -1 198 202 request.Header.Add("Content-Type", "application/x-tar+zstd") 199 203 200 204 case *deleteFlag: