forked from
tangled.org/core
fork
Configure Feed
Select the types of activity you want to include in your feed.
Monorepo for Tangled
fork
Configure Feed
Select the types of activity you want to include in your feed.
1package git
2
3import (
4 "bufio"
5 "context"
6 "crypto/sha256"
7 "fmt"
8 "io"
9 "os/exec"
10 "path"
11 "strings"
12 "time"
13
14 "github.com/dgraph-io/ristretto"
15 "github.com/go-git/go-git/v5/plumbing"
16 "github.com/go-git/go-git/v5/plumbing/object"
17)
18
19var (
20 commitCache *ristretto.Cache
21)
22
23func init() {
24 cache, _ := ristretto.NewCache(&ristretto.Config{
25 NumCounters: 1e7,
26 MaxCost: 1 << 30,
27 BufferItems: 64,
28 TtlTickerDurationInSec: 120,
29 })
30 commitCache = cache
31}
32
33// processReader wraps a reader and ensures the associated process is cleaned up
34type processReader struct {
35 io.Reader
36 cmd *exec.Cmd
37 stdout io.ReadCloser
38}
39
40func (pr *processReader) Close() error {
41 if err := pr.stdout.Close(); err != nil {
42 return err
43 }
44 return pr.cmd.Wait()
45}
46
47func (g *GitRepo) streamingGitLog(ctx context.Context, extraArgs ...string) (io.ReadCloser, error) {
48 args := []string{}
49 args = append(args, "log")
50 args = append(args, g.h.String())
51 args = append(args, extraArgs...)
52
53 cmd := exec.CommandContext(ctx, "git", args...)
54 cmd.Dir = g.path
55
56 stdout, err := cmd.StdoutPipe()
57 if err != nil {
58 return nil, err
59 }
60
61 if err := cmd.Start(); err != nil {
62 return nil, err
63 }
64
65 return &processReader{
66 Reader: stdout,
67 cmd: cmd,
68 stdout: stdout,
69 }, nil
70}
71
72type commit struct {
73 hash plumbing.Hash
74 when time.Time
75 files []string
76 message string
77}
78
79func cacheKey(g *GitRepo, path string) string {
80 sep := byte(':')
81 hash := sha256.Sum256(fmt.Append([]byte{}, g.path, sep, g.h.String(), sep, path))
82 return fmt.Sprintf("%x", hash)
83}
84
85func (g *GitRepo) calculateCommitTimeIn(ctx context.Context, subtree *object.Tree, parent string, timeout time.Duration) (map[string]commit, error) {
86 ctx, cancel := context.WithTimeout(ctx, timeout)
87 defer cancel()
88 return g.calculateCommitTime(ctx, subtree, parent)
89}
90
91func (g *GitRepo) calculateCommitTime(ctx context.Context, subtree *object.Tree, parent string) (map[string]commit, error) {
92 filesToDo := make(map[string]struct{})
93 filesDone := make(map[string]commit)
94 for _, e := range subtree.Entries {
95 fpath := path.Clean(path.Join(parent, e.Name))
96 filesToDo[fpath] = struct{}{}
97 }
98
99 for _, e := range subtree.Entries {
100 f := path.Clean(path.Join(parent, e.Name))
101 cacheKey := cacheKey(g, f)
102 if cached, ok := commitCache.Get(cacheKey); ok {
103 filesDone[f] = cached.(commit)
104 delete(filesToDo, f)
105 } else {
106 filesToDo[f] = struct{}{}
107 }
108 }
109
110 if len(filesToDo) == 0 {
111 return filesDone, nil
112 }
113
114 ctx, cancel := context.WithCancel(ctx)
115 defer cancel()
116
117 pathSpec := "."
118 if parent != "" {
119 pathSpec = parent
120 }
121 output, err := g.streamingGitLog(ctx, "--pretty=format:%H,%ad,%s", "--date=iso", "--name-only", "--", pathSpec)
122 if err != nil {
123 return nil, err
124 }
125 defer output.Close() // Ensure the git process is properly cleaned up
126
127 reader := bufio.NewReader(output)
128 var current commit
129 for {
130 line, err := reader.ReadString('\n')
131 if err != nil && err != io.EOF {
132 return nil, err
133 }
134 line = strings.TrimSpace(line)
135
136 if line == "" {
137 if !current.hash.IsZero() {
138 // we have a fully parsed commit
139 for _, f := range current.files {
140 if _, ok := filesToDo[f]; ok {
141 filesDone[f] = current
142 delete(filesToDo, f)
143 commitCache.Set(cacheKey(g, f), current, 0)
144 }
145 }
146
147 if len(filesToDo) == 0 {
148 cancel()
149 break
150 }
151 current = commit{}
152 }
153 } else if current.hash.IsZero() {
154 parts := strings.SplitN(line, ",", 3)
155 if len(parts) == 3 {
156 current.hash = plumbing.NewHash(parts[0])
157 current.when, _ = time.Parse("2006-01-02 15:04:05 -0700", parts[1])
158 current.message = parts[2]
159 }
160 } else {
161 // all ancestors along this path should also be included
162 file := path.Clean(line)
163 ancestors := ancestors(file)
164 current.files = append(current.files, file)
165 current.files = append(current.files, ancestors...)
166 }
167
168 if err == io.EOF {
169 break
170 }
171 }
172
173 return filesDone, nil
174}
175
176func ancestors(p string) []string {
177 var ancestors []string
178
179 for {
180 p = path.Dir(p)
181 if p == "." || p == "/" {
182 break
183 }
184 ancestors = append(ancestors, p)
185 }
186 return ancestors
187}