tangled
alpha
login
or
join now
back
round
1
view raw
workflow: move workflow parsing into compiler
#487
merged
opened by
oppi.li
5 months ago
targeting
master
from
push-vnxxyxursomy
this simplifies the error collection logic a lot.
Signed-off-by: oppiliappan
me@oppi.li
options
unified
split
Changed files
+65
-49
knotserver
ingester.go
internal.go
workflow
compile.go
+6
-10
knotserver/ingester.go
···
152
return err
153
}
154
155
-
var pipeline workflow.Pipeline
156
for _, e := range workflowDir {
157
if !e.IsFile {
158
continue
···
164
continue
165
}
166
167
-
wf, err := workflow.FromFile(e.Name, contents)
168
-
if err != nil {
169
-
// TODO: log here, respond to client that is pushing
170
-
h.l.Error("failed to parse workflow", "err", err, "path", fpath)
171
-
continue
172
-
}
173
-
174
-
pipeline = append(pipeline, wf)
175
}
176
177
trigger := tangled.Pipeline_PullRequestTriggerData{
···
193
},
194
}
195
196
-
cp := compiler.Compile(pipeline)
197
eventJson, err := json.Marshal(cp)
198
if err != nil {
199
return err
···
152
return err
153
}
154
155
+
var pipeline workflow.RawPipeline
156
for _, e := range workflowDir {
157
if !e.IsFile {
158
continue
···
164
continue
165
}
166
167
+
pipeline = append(pipeline, workflow.RawWorkflow{
168
+
Name: e.Name,
169
+
Contents: contents,
170
+
})
0
0
0
0
171
}
172
173
trigger := tangled.Pipeline_PullRequestTriggerData{
···
189
},
190
}
191
192
+
cp := compiler.Compile(compiler.Parse(pipeline))
193
eventJson, err := json.Marshal(cp)
194
if err != nil {
195
return err
+14
-34
knotserver/internal.go
···
200
return err
201
}
202
203
-
pipelineParseErrors := []string{}
204
-
205
-
var pipeline workflow.Pipeline
206
for _, e := range workflowDir {
207
if !e.IsFile {
208
continue
···
214
continue
215
}
216
217
-
wf, err := workflow.FromFile(e.Name, contents)
218
-
if err != nil {
219
-
h.l.Error("failed to parse workflow", "err", err, "path", fpath)
220
-
pipelineParseErrors = append(pipelineParseErrors, fmt.Sprintf("- at %s: %s\n", fpath, err))
221
-
continue
222
-
}
223
-
224
-
pipeline = append(pipeline, wf)
225
}
226
227
trigger := tangled.Pipeline_PushTriggerData{
···
242
},
243
}
244
245
-
cp := compiler.Compile(pipeline)
246
eventJson, err := json.Marshal(cp)
247
if err != nil {
248
return err
249
}
250
251
if pushOptions.verboseCi {
252
-
hasDiagnostics := false
253
-
if len(pipelineParseErrors) > 0 {
254
-
hasDiagnostics = true
255
-
*clientMsgs = append(*clientMsgs, "error: failed to parse workflow(s):")
256
-
for _, error := range pipelineParseErrors {
257
-
*clientMsgs = append(*clientMsgs, error)
258
-
}
259
-
}
260
-
if len(compiler.Diagnostics.Errors) > 0 {
261
-
hasDiagnostics = true
262
-
*clientMsgs = append(*clientMsgs, "error(s) on pipeline:")
263
-
for _, error := range compiler.Diagnostics.Errors {
264
-
*clientMsgs = append(*clientMsgs, fmt.Sprintf("- %s:", error))
265
-
}
266
}
267
-
if len(compiler.Diagnostics.Warnings) > 0 {
268
-
hasDiagnostics = true
269
-
*clientMsgs = append(*clientMsgs, "warning(s) on pipeline:")
270
-
for _, warning := range compiler.Diagnostics.Warnings {
271
-
*clientMsgs = append(*clientMsgs, fmt.Sprintf("- at %s: %s: %s", warning.Path, warning.Type, warning.Reason))
272
-
}
273
}
274
-
if !hasDiagnostics {
275
-
*clientMsgs = append(*clientMsgs, "success: pipeline compiled with no diagnostics")
0
276
}
277
}
278
···
200
return err
201
}
202
203
+
var pipeline workflow.RawPipeline
0
0
204
for _, e := range workflowDir {
205
if !e.IsFile {
206
continue
···
212
continue
213
}
214
215
+
pipeline = append(pipeline, workflow.RawWorkflow{
216
+
Name: e.Name,
217
+
Contents: contents,
218
+
})
0
0
0
0
219
}
220
221
trigger := tangled.Pipeline_PushTriggerData{
···
236
},
237
}
238
239
+
cp := compiler.Compile(compiler.Parse(pipeline))
240
eventJson, err := json.Marshal(cp)
241
if err != nil {
242
return err
243
}
244
245
if pushOptions.verboseCi {
246
+
if compiler.Diagnostics.IsEmpty() {
247
+
*clientMsgs = append(*clientMsgs, "success: pipeline compiled with no diagnostics")
0
0
0
0
0
0
0
0
0
0
0
0
248
}
249
+
250
+
for _, e := range compiler.Diagnostics.Errors {
251
+
*clientMsgs = append(*clientMsgs, e.String())
0
0
0
252
}
253
+
254
+
for _, w := range compiler.Diagnostics.Warnings {
255
+
*clientMsgs = append(*clientMsgs, w.String())
256
}
257
}
258
+45
-5
workflow/compile.go
···
6
"tangled.sh/tangled.sh/core/api/tangled"
7
)
8
0
0
0
0
0
0
0
9
type Compiler struct {
10
Trigger tangled.Pipeline_TriggerMetadata
11
Diagnostics Diagnostics
12
}
13
14
type Diagnostics struct {
15
-
Errors []error
16
Warnings []Warning
17
}
18
0
0
0
0
19
func (d *Diagnostics) Combine(o Diagnostics) {
20
d.Errors = append(d.Errors, o.Errors...)
21
d.Warnings = append(d.Warnings, o.Warnings...)
···
25
d.Warnings = append(d.Warnings, Warning{path, kind, reason})
26
}
27
28
-
func (d *Diagnostics) AddError(err error) {
29
-
d.Errors = append(d.Errors, err)
30
}
31
32
func (d Diagnostics) IsErr() bool {
33
return len(d.Errors) != 0
34
}
35
0
0
0
0
0
0
0
0
0
36
type Warning struct {
37
Path string
38
Type WarningKind
39
Reason string
40
}
41
0
0
0
0
42
type WarningKind string
43
44
var (
···
46
InvalidConfiguration WarningKind = "invalid configuration"
47
)
48
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
49
// convert a repositories' workflow files into a fully compiled pipeline that runners accept
50
func (compiler *Compiler) Compile(p Pipeline) tangled.Pipeline {
51
cp := tangled.Pipeline{
52
TriggerMetadata: &compiler.Trigger,
53
}
54
55
-
for _, w := range p {
56
-
cw := compiler.compileWorkflow(w)
57
58
// empty workflows are not added to the pipeline
59
if len(cw.Steps) == 0 {
···
6
"tangled.sh/tangled.sh/core/api/tangled"
7
)
8
9
+
type RawWorkflow struct {
10
+
Name string
11
+
Contents []byte
12
+
}
13
+
14
+
type RawPipeline = []RawWorkflow
15
+
16
type Compiler struct {
17
Trigger tangled.Pipeline_TriggerMetadata
18
Diagnostics Diagnostics
19
}
20
21
type Diagnostics struct {
22
+
Errors []Error
23
Warnings []Warning
24
}
25
26
+
func (d *Diagnostics) IsEmpty() bool {
27
+
return len(d.Errors) == 0 && len(d.Warnings) == 0
28
+
}
29
+
30
func (d *Diagnostics) Combine(o Diagnostics) {
31
d.Errors = append(d.Errors, o.Errors...)
32
d.Warnings = append(d.Warnings, o.Warnings...)
···
36
d.Warnings = append(d.Warnings, Warning{path, kind, reason})
37
}
38
39
+
func (d *Diagnostics) AddError(path string, err error) {
40
+
d.Errors = append(d.Errors, Error{path, err})
41
}
42
43
func (d Diagnostics) IsErr() bool {
44
return len(d.Errors) != 0
45
}
46
47
+
type Error struct {
48
+
Path string
49
+
Error error
50
+
}
51
+
52
+
func (e Error) String() string {
53
+
return fmt.Sprintf("error: %s: %s", e.Path, e.Error.Error())
54
+
}
55
+
56
type Warning struct {
57
Path string
58
Type WarningKind
59
Reason string
60
}
61
62
+
func (w Warning) String() string {
63
+
return fmt.Sprintf("warning: %s: %s: %s", w.Path, w.Type, w.Reason)
64
+
}
65
+
66
type WarningKind string
67
68
var (
···
70
InvalidConfiguration WarningKind = "invalid configuration"
71
)
72
73
+
func (compiler *Compiler) Parse(p RawPipeline) Pipeline {
74
+
var pp Pipeline
75
+
76
+
for _, w := range p {
77
+
wf, err := FromFile(w.Name, w.Contents)
78
+
if err != nil {
79
+
compiler.Diagnostics.AddError(w.Name, err)
80
+
continue
81
+
}
82
+
83
+
pp = append(pp, wf)
84
+
}
85
+
86
+
return pp
87
+
}
88
+
89
// convert a repositories' workflow files into a fully compiled pipeline that runners accept
90
func (compiler *Compiler) Compile(p Pipeline) tangled.Pipeline {
91
cp := tangled.Pipeline{
92
TriggerMetadata: &compiler.Trigger,
93
}
94
95
+
for _, wf := range p {
96
+
cw := compiler.compileWorkflow(wf)
97
98
// empty workflows are not added to the pipeline
99
if len(cw.Steps) == 0 {