+3
README.md
+3
README.md
···
90
90
* Files with a certain name, when placed in the root of a site, have special functions:
91
91
- [Netlify `_redirects`][_redirects] file can be used to specify HTTP redirect and rewrite rules. The _git-pages_ implementation currently does not support placeholders, query parameters, or conditions, and may differ from Netlify in other minor ways. If you find that a supported `_redirects` file feature does not work the same as on Netlify, please file an issue. (Note that _git-pages_ does not perform URL normalization; `/foo` and `/foo/` are *not* the same, unlike with Netlify.)
92
92
- [Netlify `_headers`][_headers] file can be used to specify custom HTTP response headers (if allowlisted by configuration). In particular, this is useful to enable [CORS requests][cors]. The _git-pages_ implementation may differ from Netlify in minor ways; if you find that a `_headers` file feature does not work the same as on Netlify, please file an issue.
93
+
* Incremental updates can be made using `PUT` or `PATCH` requests where the body contains an archive (both tar and zip are supported).
94
+
- Any archive entry that is a symlink to `/git/pages/<git-sha256>` is replaced with an existing manifest entry for the same site whose git blob hash matches `<git-sha256>`. If there is no existing manifest entry with the specified git hash, the update fails with a `422 Unprocessable Entity`.
95
+
- For this error response only, if the negotiated content type is `application/vnd.git-pages.unresolved`, the response will contain the `<git-sha256>` of each unresolved reference, one per line.
93
96
* Support for SHA-256 Git hashes is [limited by go-git][go-git-sha256]; once go-git implements the required features, _git-pages_ will automatically gain support for SHA-256 Git hashes. Note that shallow clones (used by _git-pages_ to conserve bandwidth if available) aren't supported yet in the Git protocol as of 2025.
94
97
95
98
[_redirects]: https://docs.netlify.com/manage/routing/redirects/overview/
+58
-19
src/http.go
+58
-19
src/http.go
···
10
10
11
11
var httpAcceptRegexp = regexp.MustCompile(`` +
12
12
// token optionally prefixed by whitespace
13
-
`^[ \t]*([a-zA-Z0-9$!#$%&'*+.^_\x60|~-]+)` +
13
+
`^[ \t]*([a-zA-Z0-9$!#$%&'*+./^_\x60|~-]+)` +
14
14
// quality value prefixed by a semicolon optionally surrounded by whitespace
15
15
`(?:[ \t]*;[ \t]*q=(0(?:\.[0-9]{1,3})?|1(?:\.0{1,3})?))?` +
16
16
// optional whitespace followed by comma or end of line
···
22
22
qval float64
23
23
}
24
24
25
-
type HTTPEncodings struct {
26
-
encodings []httpAcceptOffer
27
-
}
28
-
29
-
func ParseHTTPAcceptEncoding(headerValue string) (result HTTPEncodings) {
25
+
func parseGenericAcceptHeader(headerValue string) (result []httpAcceptOffer) {
30
26
for headerValue != "" {
31
27
matches := httpAcceptRegexp.FindStringSubmatch(headerValue)
32
28
if matches == nil {
33
-
return HTTPEncodings{}
29
+
return
34
30
}
35
-
enc := httpAcceptOffer{strings.ToLower(matches[1]), 1.0}
31
+
offer := httpAcceptOffer{strings.ToLower(matches[1]), 1.0}
36
32
if matches[2] != "" {
37
-
enc.qval, _ = strconv.ParseFloat(matches[2], 64)
33
+
offer.qval, _ = strconv.ParseFloat(matches[2], 64)
38
34
}
39
-
result.encodings = append(result.encodings, enc)
35
+
result = append(result, offer)
40
36
headerValue = headerValue[len(matches[0]):]
41
37
}
38
+
return
39
+
}
40
+
41
+
func preferredAcceptOffer(offers []httpAcceptOffer) string {
42
+
slices.SortStableFunc(offers, func(a, b httpAcceptOffer) int {
43
+
return -cmp.Compare(a.qval, b.qval)
44
+
})
45
+
for _, offer := range offers {
46
+
if offer.qval != 0 {
47
+
return offer.code
48
+
}
49
+
}
50
+
return ""
51
+
}
52
+
53
+
type HTTPContentTypes struct {
54
+
contentTypes []httpAcceptOffer
55
+
}
56
+
57
+
func ParseAcceptHeader(headerValue string) (result HTTPContentTypes) {
58
+
result = HTTPContentTypes{parseGenericAcceptHeader(headerValue)}
59
+
return
60
+
}
61
+
62
+
func (e *HTTPContentTypes) Negotiate(offers ...string) string {
63
+
prefs := make(map[string]float64, len(offers))
64
+
for _, code := range offers {
65
+
prefs[code] = 0
66
+
}
67
+
for _, ctyp := range e.contentTypes {
68
+
if ctyp.code == "*" || ctyp.code == "*/*" {
69
+
for code := range prefs {
70
+
prefs[code] = ctyp.qval
71
+
}
72
+
} else if _, ok := prefs[ctyp.code]; ok {
73
+
prefs[ctyp.code] = ctyp.qval
74
+
}
75
+
}
76
+
ctyps := make([]httpAcceptOffer, len(offers))
77
+
for idx, code := range offers {
78
+
ctyps[idx] = httpAcceptOffer{code, prefs[code]}
79
+
}
80
+
return preferredAcceptOffer(ctyps)
81
+
}
82
+
83
+
type HTTPEncodings struct {
84
+
encodings []httpAcceptOffer
85
+
}
86
+
87
+
func ParseAcceptEncodingHeader(headerValue string) (result HTTPEncodings) {
88
+
result = HTTPEncodings{parseGenericAcceptHeader(headerValue)}
42
89
if len(result.encodings) == 0 {
43
90
// RFC 9110 says (https://httpwg.org/specs/rfc9110.html#field.accept-encoding):
44
91
// "If no Accept-Encoding header field is in the request, any content
···
77
124
for idx, code := range offers {
78
125
encs[idx] = httpAcceptOffer{code, prefs[code]}
79
126
}
80
-
slices.SortStableFunc(encs, func(a, b httpAcceptOffer) int {
81
-
return -cmp.Compare(a.qval, b.qval)
82
-
})
83
-
for _, enc := range encs {
84
-
if enc.qval != 0 {
85
-
return enc.code
86
-
}
87
-
}
88
-
return ""
127
+
return preferredAcceptOffer(encs)
89
128
}
+27
-8
src/pages.go
+27
-8
src/pages.go
···
214
214
215
215
// we only offer `/.git-pages/archive.tar` and not the `.tar.gz`/`.tar.zst` variants
216
216
// because HTTP can already request compression using the `Content-Encoding` mechanism
217
-
acceptedEncodings := ParseHTTPAcceptEncoding(r.Header.Get("Accept-Encoding"))
217
+
acceptedEncodings := ParseAcceptEncodingHeader(r.Header.Get("Accept-Encoding"))
218
218
negotiated := acceptedEncodings.Negotiate("zstd", "gzip", "identity")
219
219
if negotiated != "" {
220
220
w.Header().Set("Content-Encoding", negotiated)
···
322
322
defer closer.Close()
323
323
}
324
324
325
-
offeredEncodings := []string{}
326
-
acceptedEncodings := ParseHTTPAcceptEncoding(r.Header.Get("Accept-Encoding"))
325
+
var offeredEncodings []string
326
+
acceptedEncodings := ParseAcceptEncodingHeader(r.Header.Get("Accept-Encoding"))
327
327
negotiatedEncoding := true
328
328
switch entry.GetTransform() {
329
329
case Transform_Identity:
···
379
379
if !negotiatedEncoding {
380
380
w.Header().Set("Accept-Encoding", strings.Join(offeredEncodings, ", "))
381
381
w.WriteHeader(http.StatusNotAcceptable)
382
-
return fmt.Errorf("no supported content encodings (Accept-Encoding: %q)",
382
+
return fmt.Errorf("no supported content encodings (Accept-Encoding: %s)",
383
383
r.Header.Get("Accept-Encoding"))
384
384
}
385
385
···
506
506
result = UpdateFromArchive(ctx, webRoot, contentType, reader)
507
507
}
508
508
509
-
return reportUpdateResult(w, result)
509
+
return reportUpdateResult(w, r, result)
510
510
}
511
511
512
512
func patchPage(w http.ResponseWriter, r *http.Request) error {
···
569
569
contentType := getMediaType(r.Header.Get("Content-Type"))
570
570
reader := http.MaxBytesReader(w, r.Body, int64(config.Limits.MaxSiteSize.Bytes()))
571
571
result := PartialUpdateFromArchive(ctx, webRoot, contentType, reader, parents)
572
-
return reportUpdateResult(w, result)
572
+
return reportUpdateResult(w, r, result)
573
573
}
574
574
575
-
func reportUpdateResult(w http.ResponseWriter, result UpdateResult) error {
575
+
func reportUpdateResult(w http.ResponseWriter, r *http.Request, result UpdateResult) error {
576
+
var unresolvedRefErr UnresolvedRefError
577
+
if result.outcome == UpdateError && errors.As(result.err, &unresolvedRefErr) {
578
+
offeredContentTypes := []string{"application/vnd.git-pages.unresolved", "text/plain"}
579
+
acceptedContentTypes := ParseAcceptHeader(r.Header.Get("Accept"))
580
+
switch acceptedContentTypes.Negotiate(offeredContentTypes...) {
581
+
default:
582
+
w.Header().Set("Accept", strings.Join(offeredContentTypes, ", "))
583
+
w.WriteHeader(http.StatusNotAcceptable)
584
+
return fmt.Errorf("no supported content types (Accept: %s)", r.Header.Get("Accept"))
585
+
case "application/vnd.git-pages.unresolved":
586
+
w.WriteHeader(http.StatusUnprocessableEntity)
587
+
for _, missingRef := range unresolvedRefErr.missing {
588
+
fmt.Fprintln(w, missingRef)
589
+
}
590
+
return nil
591
+
case "text/plain":
592
+
// handled below
593
+
}
594
+
}
595
+
576
596
switch result.outcome {
577
597
case UpdateError:
578
-
var unresolvedRefErr UnresolvedRefError
579
598
if errors.Is(result.err, ErrManifestTooLarge) {
580
599
w.WriteHeader(http.StatusRequestEntityTooLarge)
581
600
} else if errors.Is(result.err, errArchiveFormat) {