Monorepo for Tangled tangled.org

spindle: move the clone step out of nixery into a shared package for all spindle engines #827

merged opened by evan.jarrett.net targeting master from evan.jarrett.net/core: spindle-clone
Labels
refactor
assignee

None yet.

Participants 3
AT URI
at://did:plc:pddp4xt5lgnv2qsegbzzs4xg/sh.tangled.repo.pull/3m5xkfp2rcn22
+553 -101
Diff #1
+2 -4
appview/db/pipeline.go
··· 168 168 169 169 // this is a mega query, but the most useful one: 170 170 // get N pipelines, for each one get the latest status of its N workflows 171 - func GetPipelineStatuses(e Execer, limit int, filters ...filter) ([]models.Pipeline, error) { 171 + func GetPipelineStatuses(e Execer, filters ...filter) ([]models.Pipeline, error) { 172 172 var conditions []string 173 173 var args []any 174 174 for _, filter := range filters { ··· 205 205 join 206 206 triggers t ON p.trigger_id = t.id 207 207 %s 208 - order by p.created desc 209 - limit %d 210 - `, whereClause, limit) 208 + `, whereClause) 211 209 212 210 rows, err := e.Query(query, args...) 213 211 if err != nil {
+1 -1
appview/pages/templates/repo/compare/compare.html
··· 17 17 {{ end }} 18 18 19 19 {{ define "mainLayout" }} 20 - <div class="px-1 flex-grow col-span-full flex flex-col gap-4"> 20 + <div class="px-1 col-span-full flex flex-col gap-4"> 21 21 {{ block "contentLayout" . }} 22 22 {{ block "content" . }}{{ end }} 23 23 {{ end }}
+1 -1
appview/pages/templates/repo/settings/general.html
··· 58 58 {{ i "loader-circle" "w-4 h-4 animate-spin hidden group-[.htmx-request]:inline" }} 59 59 </button> 60 60 </div> 61 - </fieldset> 61 + <fieldset> 62 62 </form> 63 63 {{ end }} 64 64
+1
appview/pages/templates/user/fragments/editBio.html
··· 31 31 class="py-1 px-1 w-full" 32 32 name="pronouns" 33 33 placeholder="they/them" 34 + pattern="[a-zA-Z]{1,6}[\/\s\-][a-zA-Z]{1,6}" 34 35 value="{{ $pronouns }}" 35 36 > 36 37 </div>
-3
appview/pipelines/pipelines.go
··· 82 82 83 83 ps, err := db.GetPipelineStatuses( 84 84 p.db, 85 - 30, 86 85 db.FilterEq("repo_owner", repoInfo.OwnerDid), 87 86 db.FilterEq("repo_name", repoInfo.Name), 88 87 db.FilterEq("knot", repoInfo.Knot), ··· 125 124 126 125 ps, err := db.GetPipelineStatuses( 127 126 p.db, 128 - 1, 129 127 db.FilterEq("repo_owner", repoInfo.OwnerDid), 130 128 db.FilterEq("repo_name", repoInfo.Name), 131 129 db.FilterEq("knot", repoInfo.Knot), ··· 195 193 196 194 ps, err := db.GetPipelineStatuses( 197 195 p.db, 198 - 1, 199 196 db.FilterEq("repo_owner", repoInfo.OwnerDid), 200 197 db.FilterEq("repo_name", repoInfo.Name), 201 198 db.FilterEq("knot", repoInfo.Knot),
-2
appview/pulls/pulls.go
··· 178 178 179 179 ps, err := db.GetPipelineStatuses( 180 180 s.db, 181 - len(shas), 182 181 db.FilterEq("repo_owner", repoInfo.OwnerDid), 183 182 db.FilterEq("repo_name", repoInfo.Name), 184 183 db.FilterEq("knot", repoInfo.Knot), ··· 649 648 repoInfo := f.RepoInfo(user) 650 649 ps, err := db.GetPipelineStatuses( 651 650 s.db, 652 - len(shas), 653 651 db.FilterEq("repo_owner", repoInfo.OwnerDid), 654 652 db.FilterEq("repo_name", repoInfo.Name), 655 653 db.FilterEq("knot", repoInfo.Knot),
+10 -14
appview/repo/compare.go
··· 116 116 } 117 117 118 118 // if user is navigating to one of 119 - // /compare/{base}...{head} 120 119 // /compare/{base}/{head} 121 - var base, head string 122 - rest := chi.URLParam(r, "*") 123 - 124 - var parts []string 125 - if strings.Contains(rest, "...") { 126 - parts = strings.SplitN(rest, "...", 2) 127 - } else if strings.Contains(rest, "/") { 128 - parts = strings.SplitN(rest, "/", 2) 129 - } 130 - 131 - if len(parts) == 2 { 132 - base = parts[0] 133 - head = parts[1] 120 + // /compare/{base}...{head} 121 + base := chi.URLParam(r, "base") 122 + head := chi.URLParam(r, "head") 123 + if base == "" && head == "" { 124 + rest := chi.URLParam(r, "*") // master...feature/xyz 125 + parts := strings.SplitN(rest, "...", 2) 126 + if len(parts) == 2 { 127 + base = parts[0] 128 + head = parts[1] 129 + } 134 130 } 135 131 136 132 base, _ = url.PathUnescape(base)
+14 -1
appview/repo/repo_util.go
··· 1 1 package repo 2 2 3 3 import ( 4 + "crypto/rand" 5 + "math/big" 4 6 "slices" 5 7 "sort" 6 8 "strings" ··· 88 90 return 89 91 } 90 92 93 + func randomString(n int) string { 94 + const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" 95 + result := make([]byte, n) 96 + 97 + for i := 0; i < n; i++ { 98 + n, _ := rand.Int(rand.Reader, big.NewInt(int64(len(letters)))) 99 + result[i] = letters[n.Int64()] 100 + } 101 + 102 + return string(result) 103 + } 104 + 91 105 // grab pipelines from DB and munge that into a hashmap with commit sha as key 92 106 // 93 107 // golang is so blessed that it requires 35 lines of imperative code for this ··· 104 118 105 119 ps, err := db.GetPipelineStatuses( 106 120 d, 107 - len(shas), 108 121 db.FilterEq("repo_owner", repoInfo.OwnerDid), 109 122 db.FilterEq("repo_name", repoInfo.Name), 110 123 db.FilterEq("knot", repoInfo.Knot),
+1
appview/repo/router.go
··· 61 61 // for example: 62 62 // /compare/master...some/feature 63 63 // /compare/master...example.com:another/feature <- this is a fork 64 + r.Get("/{base}/{head}", rp.Compare) 64 65 r.Get("/*", rp.Compare) 65 66 }) 66 67
+1 -1
nix/pkgs/knot-unwrapped.nix
··· 4 4 sqlite-lib, 5 5 src, 6 6 }: let 7 - version = "1.11.0-alpha"; 7 + version = "1.9.1-alpha"; 8 8 in 9 9 buildGoApplication { 10 10 pname = "knot";
+1 -1
spindle/engines/nixery/engine.go
··· 109 109 setup := &setupSteps{} 110 110 111 111 setup.addStep(nixConfStep()) 112 - setup.addStep(cloneStep(twf, *tpl.TriggerMetadata, e.cfg.Server.Dev)) 112 + setup.addStep(models.BuildCloneStep(twf, *tpl.TriggerMetadata, workspaceDir, e.cfg.Server.Dev)) 113 113 // this step could be empty 114 114 if s := dependencyStep(dwf.Dependencies); s != nil { 115 115 setup.addStep(*s)
-73
spindle/engines/nixery/setup_steps.go
··· 2 2 3 3 import ( 4 4 "fmt" 5 - "path" 6 5 "strings" 7 - 8 - "tangled.org/core/api/tangled" 9 - "tangled.org/core/workflow" 10 6 ) 11 7 12 8 func nixConfStep() Step { ··· 17 13 command: setupCmd, 18 14 name: "Configure Nix", 19 15 } 20 - } 21 - 22 - // cloneOptsAsSteps processes clone options and adds corresponding steps 23 - // to the beginning of the workflow's step list if cloning is not skipped. 24 - // 25 - // the steps to do here are: 26 - // - git init 27 - // - git remote add origin <url> 28 - // - git fetch --depth=<d> --recurse-submodules=<yes|no> <sha> 29 - // - git checkout FETCH_HEAD 30 - func cloneStep(twf tangled.Pipeline_Workflow, tr tangled.Pipeline_TriggerMetadata, dev bool) Step { 31 - if twf.Clone.Skip { 32 - return Step{} 33 - } 34 - 35 - var commands []string 36 - 37 - // initialize git repo in workspace 38 - commands = append(commands, "git init") 39 - 40 - // add repo as git remote 41 - scheme := "https://" 42 - if dev { 43 - scheme = "http://" 44 - tr.Repo.Knot = strings.ReplaceAll(tr.Repo.Knot, "localhost", "host.docker.internal") 45 - } 46 - url := scheme + path.Join(tr.Repo.Knot, tr.Repo.Did, tr.Repo.Repo) 47 - commands = append(commands, fmt.Sprintf("git remote add origin %s", url)) 48 - 49 - // run git fetch 50 - { 51 - var fetchArgs []string 52 - 53 - // default clone depth is 1 54 - depth := 1 55 - if twf.Clone.Depth > 1 { 56 - depth = int(twf.Clone.Depth) 57 - } 58 - fetchArgs = append(fetchArgs, fmt.Sprintf("--depth=%d", depth)) 59 - 60 - // optionally recurse submodules 61 - if twf.Clone.Submodules { 62 - fetchArgs = append(fetchArgs, "--recurse-submodules=yes") 63 - } 64 - 65 - // set remote to fetch from 66 - fetchArgs = append(fetchArgs, "origin") 67 - 68 - // set revision to checkout 69 - switch workflow.TriggerKind(tr.Kind) { 70 - case workflow.TriggerKindManual: 71 - // TODO: unimplemented 72 - case workflow.TriggerKindPush: 73 - fetchArgs = append(fetchArgs, tr.Push.NewSha) 74 - case workflow.TriggerKindPullRequest: 75 - fetchArgs = append(fetchArgs, tr.PullRequest.SourceSha) 76 - } 77 - 78 - commands = append(commands, fmt.Sprintf("git fetch %s", strings.Join(fetchArgs, " "))) 79 - } 80 - 81 - // run git checkout 82 - commands = append(commands, "git checkout FETCH_HEAD") 83 - 84 - cloneStep := Step{ 85 - command: strings.Join(commands, "\n"), 86 - name: "Clone repository into workspace", 87 - } 88 - return cloneStep 89 16 } 90 17 91 18 // dependencyStep processes dependencies defined in the workflow.
+157
spindle/models/clone.go
··· 1 + package models 2 + 3 + import ( 4 + "fmt" 5 + "strings" 6 + 7 + "tangled.org/core/api/tangled" 8 + "tangled.org/core/workflow" 9 + ) 10 + 11 + type CloneStep struct { 12 + name string 13 + kind StepKind 14 + commands []string 15 + } 16 + 17 + func (s CloneStep) Name() string { 18 + return s.name 19 + } 20 + 21 + func (s CloneStep) Commands() []string { 22 + return s.commands 23 + } 24 + 25 + func (s CloneStep) Command() string { 26 + return strings.Join(s.commands, "\n") 27 + } 28 + 29 + func (s CloneStep) Kind() StepKind { 30 + return s.kind 31 + } 32 + 33 + // BuildCloneStep generates git clone commands. 34 + // The shared builder handles: 35 + // - git init 36 + // - git remote add origin <url> 37 + // - git fetch --depth=<d> --recurse-submodules=<yes|no> <sha> 38 + // - git checkout FETCH_HEAD 39 + // And supports all trigger types (push, PR, manual) and clone options. 40 + func BuildCloneStep(twf tangled.Pipeline_Workflow, tr tangled.Pipeline_TriggerMetadata, workspaceDir string, dev bool) CloneStep { 41 + if twf.Clone != nil && twf.Clone.Skip { 42 + return CloneStep{} 43 + } 44 + 45 + commitSHA, err := extractCommitSHA(tr) 46 + if err != nil { 47 + return CloneStep{ 48 + kind: StepKindSystem, 49 + name: "Clone repository into workspace (error)", 50 + commands: []string{fmt.Sprintf("echo 'Failed to get clone info: %s' && exit 1", err.Error())}, 51 + } 52 + } 53 + 54 + repoURL := buildRepoURL(tr, dev) 55 + 56 + if workspaceDir == "" { 57 + workspaceDir = "/tangled/workspace" 58 + } 59 + 60 + initCmd := fmt.Sprintf("git init %s", workspaceDir) 61 + remoteCmd := fmt.Sprintf("git remote add origin %s", repoURL) 62 + 63 + var cloneOpts tangled.Pipeline_CloneOpts 64 + if twf.Clone != nil { 65 + cloneOpts = *twf.Clone 66 + } 67 + fetchArgs := buildFetchArgs(cloneOpts, commitSHA) 68 + fetchCmd := fmt.Sprintf("git fetch %s", strings.Join(fetchArgs, " ")) 69 + checkoutCmd := "git checkout FETCH_HEAD" 70 + 71 + return CloneStep{ 72 + kind: StepKindSystem, 73 + name: "Clone repository into workspace", 74 + commands: []string{ 75 + initCmd, 76 + fmt.Sprintf("cd %s", workspaceDir), 77 + remoteCmd, 78 + fetchCmd, 79 + checkoutCmd, 80 + }, 81 + } 82 + } 83 + 84 + // extractCommitSHA extracts the commit SHA from trigger metadata based on trigger type 85 + func extractCommitSHA(tr tangled.Pipeline_TriggerMetadata) (string, error) { 86 + switch workflow.TriggerKind(tr.Kind) { 87 + case workflow.TriggerKindPush: 88 + if tr.Push == nil { 89 + return "", fmt.Errorf("push trigger metadata is nil") 90 + } 91 + return tr.Push.NewSha, nil 92 + 93 + case workflow.TriggerKindPullRequest: 94 + if tr.PullRequest == nil { 95 + return "", fmt.Errorf("pull request trigger metadata is nil") 96 + } 97 + return tr.PullRequest.SourceSha, nil 98 + 99 + case workflow.TriggerKindManual: 100 + // Manual triggers don't have an explicit SHA in the metadata 101 + // For now, return empty string - could be enhanced to fetch from default branch 102 + // TODO: Implement manual trigger SHA resolution (fetch default branch HEAD) 103 + return "", nil 104 + 105 + default: 106 + return "", fmt.Errorf("unknown trigger kind: %s", tr.Kind) 107 + } 108 + } 109 + 110 + // buildRepoURL constructs the repository URL from trigger metadata 111 + func buildRepoURL(tr tangled.Pipeline_TriggerMetadata, devMode bool) string { 112 + if tr.Repo == nil { 113 + return "" 114 + } 115 + 116 + // Determine protocol 117 + scheme := "https://" 118 + if devMode { 119 + scheme = "http://" 120 + } 121 + 122 + // Get host from knot 123 + host := tr.Repo.Knot 124 + 125 + // In dev mode, replace localhost with host.docker.internal for Docker networking 126 + if devMode && strings.Contains(host, "localhost") { 127 + host = strings.ReplaceAll(host, "localhost", "host.docker.internal") 128 + } 129 + 130 + // Build URL: {scheme}{knot}/{did}/{repo} 131 + return fmt.Sprintf("%s%s/%s/%s", scheme, host, tr.Repo.Did, tr.Repo.Repo) 132 + } 133 + 134 + // buildFetchArgs constructs the arguments for git fetch based on clone options 135 + func buildFetchArgs(clone tangled.Pipeline_CloneOpts, sha string) []string { 136 + args := []string{} 137 + 138 + // Set fetch depth (default to 1 for shallow clone) 139 + depth := clone.Depth 140 + if depth == 0 { 141 + depth = 1 142 + } 143 + args = append(args, fmt.Sprintf("--depth=%d", depth)) 144 + 145 + // Add submodules if requested 146 + if clone.Submodules { 147 + args = append(args, "--recurse-submodules=yes") 148 + } 149 + 150 + // Add remote and SHA 151 + args = append(args, "origin") 152 + if sha != "" { 153 + args = append(args, sha) 154 + } 155 + 156 + return args 157 + }
+364
spindle/models/clone_test.go
··· 1 + package models 2 + 3 + import ( 4 + "strings" 5 + "testing" 6 + 7 + "tangled.org/core/api/tangled" 8 + "tangled.org/core/workflow" 9 + ) 10 + 11 + func TestBuildCloneStep_PushTrigger(t *testing.T) { 12 + twf := tangled.Pipeline_Workflow{ 13 + Clone: &tangled.Pipeline_CloneOpts{ 14 + Depth: 1, 15 + Submodules: false, 16 + Skip: false, 17 + }, 18 + } 19 + tr := tangled.Pipeline_TriggerMetadata{ 20 + Kind: string(workflow.TriggerKindPush), 21 + Push: &tangled.Pipeline_PushTriggerData{ 22 + NewSha: "abc123", 23 + OldSha: "def456", 24 + Ref: "refs/heads/main", 25 + }, 26 + Repo: &tangled.Pipeline_TriggerRepo{ 27 + Knot: "example.com", 28 + Did: "did:plc:user123", 29 + Repo: "my-repo", 30 + }, 31 + } 32 + 33 + step := BuildCloneStep(twf, tr, "/tangled/workspace", false) 34 + 35 + if step.Kind() != StepKindSystem { 36 + t.Errorf("Expected StepKindSystem, got %v", step.Kind()) 37 + } 38 + 39 + if step.Name() != "Clone repository into workspace" { 40 + t.Errorf("Expected 'Clone repository into workspace', got '%s'", step.Name()) 41 + } 42 + 43 + commands := step.Commands() 44 + if len(commands) != 5 { 45 + t.Errorf("Expected 5 commands, got %d", len(commands)) 46 + } 47 + 48 + // Verify commands contain expected git operations 49 + allCmds := strings.Join(commands, " ") 50 + if !strings.Contains(allCmds, "git init") { 51 + t.Error("Commands should contain 'git init'") 52 + } 53 + if !strings.Contains(allCmds, "git remote add origin") { 54 + t.Error("Commands should contain 'git remote add origin'") 55 + } 56 + if !strings.Contains(allCmds, "git fetch") { 57 + t.Error("Commands should contain 'git fetch'") 58 + } 59 + if !strings.Contains(allCmds, "abc123") { 60 + t.Error("Commands should contain commit SHA") 61 + } 62 + if !strings.Contains(allCmds, "git checkout FETCH_HEAD") { 63 + t.Error("Commands should contain 'git checkout FETCH_HEAD'") 64 + } 65 + if !strings.Contains(allCmds, "https://example.com/did:plc:user123/my-repo") { 66 + t.Error("Commands should contain expected repo URL") 67 + } 68 + } 69 + 70 + func TestBuildCloneStep_PullRequestTrigger(t *testing.T) { 71 + twf := tangled.Pipeline_Workflow{ 72 + Clone: &tangled.Pipeline_CloneOpts{ 73 + Depth: 1, 74 + Skip: false, 75 + }, 76 + } 77 + tr := tangled.Pipeline_TriggerMetadata{ 78 + Kind: string(workflow.TriggerKindPullRequest), 79 + PullRequest: &tangled.Pipeline_PullRequestTriggerData{ 80 + SourceSha: "pr-sha-789", 81 + SourceBranch: "feature-branch", 82 + TargetBranch: "main", 83 + Action: "opened", 84 + }, 85 + Repo: &tangled.Pipeline_TriggerRepo{ 86 + Knot: "example.com", 87 + Did: "did:plc:user123", 88 + Repo: "my-repo", 89 + }, 90 + } 91 + 92 + step := BuildCloneStep(twf, tr, "/tangled/workspace", false) 93 + 94 + allCmds := strings.Join(step.Commands(), " ") 95 + if !strings.Contains(allCmds, "pr-sha-789") { 96 + t.Error("Commands should contain PR commit SHA") 97 + } 98 + } 99 + 100 + func TestBuildCloneStep_ManualTrigger(t *testing.T) { 101 + twf := tangled.Pipeline_Workflow{ 102 + Clone: &tangled.Pipeline_CloneOpts{ 103 + Depth: 1, 104 + Skip: false, 105 + }, 106 + } 107 + tr := tangled.Pipeline_TriggerMetadata{ 108 + Kind: string(workflow.TriggerKindManual), 109 + Manual: &tangled.Pipeline_ManualTriggerData{ 110 + Inputs: nil, 111 + }, 112 + Repo: &tangled.Pipeline_TriggerRepo{ 113 + Knot: "example.com", 114 + Did: "did:plc:user123", 115 + Repo: "my-repo", 116 + }, 117 + } 118 + 119 + step := BuildCloneStep(twf, tr, "/tangled/workspace", false) 120 + 121 + // Manual triggers don't have a SHA yet (TODO), so git fetch won't include a SHA 122 + allCmds := strings.Join(step.Commands(), " ") 123 + // Should still have basic git commands 124 + if !strings.Contains(allCmds, "git init") { 125 + t.Error("Commands should contain 'git init'") 126 + } 127 + if !strings.Contains(allCmds, "git fetch") { 128 + t.Error("Commands should contain 'git fetch'") 129 + } 130 + } 131 + 132 + func TestBuildCloneStep_SkipFlag(t *testing.T) { 133 + twf := tangled.Pipeline_Workflow{ 134 + Clone: &tangled.Pipeline_CloneOpts{ 135 + Skip: true, 136 + }, 137 + } 138 + tr := tangled.Pipeline_TriggerMetadata{ 139 + Kind: string(workflow.TriggerKindPush), 140 + Push: &tangled.Pipeline_PushTriggerData{ 141 + NewSha: "abc123", 142 + }, 143 + Repo: &tangled.Pipeline_TriggerRepo{ 144 + Knot: "example.com", 145 + Did: "did:plc:user123", 146 + Repo: "my-repo", 147 + }, 148 + } 149 + 150 + step := BuildCloneStep(twf, tr, "/tangled/workspace", false) 151 + 152 + // Empty step when skip is true 153 + if step.Name() != "" { 154 + t.Error("Expected empty step name when Skip is true") 155 + } 156 + if len(step.Commands()) != 0 { 157 + t.Errorf("Expected no commands when Skip is true, got %d commands", len(step.Commands())) 158 + } 159 + } 160 + 161 + func TestBuildCloneStep_DevMode(t *testing.T) { 162 + twf := tangled.Pipeline_Workflow{ 163 + Clone: &tangled.Pipeline_CloneOpts{ 164 + Depth: 1, 165 + Skip: false, 166 + }, 167 + } 168 + tr := tangled.Pipeline_TriggerMetadata{ 169 + Kind: string(workflow.TriggerKindPush), 170 + Push: &tangled.Pipeline_PushTriggerData{ 171 + NewSha: "abc123", 172 + }, 173 + Repo: &tangled.Pipeline_TriggerRepo{ 174 + Knot: "localhost:3000", 175 + Did: "did:plc:user123", 176 + Repo: "my-repo", 177 + }, 178 + } 179 + 180 + step := BuildCloneStep(twf, tr, "/tangled/workspace", true) 181 + 182 + // In dev mode, should use http:// and replace localhost with host.docker.internal 183 + allCmds := strings.Join(step.Commands(), " ") 184 + expectedURL := "http://host.docker.internal:3000/did:plc:user123/my-repo" 185 + if !strings.Contains(allCmds, expectedURL) { 186 + t.Errorf("Expected dev mode URL '%s' in commands", expectedURL) 187 + } 188 + } 189 + 190 + func TestBuildCloneStep_DepthAndSubmodules(t *testing.T) { 191 + twf := tangled.Pipeline_Workflow{ 192 + Clone: &tangled.Pipeline_CloneOpts{ 193 + Depth: 10, 194 + Submodules: true, 195 + Skip: false, 196 + }, 197 + } 198 + tr := tangled.Pipeline_TriggerMetadata{ 199 + Kind: string(workflow.TriggerKindPush), 200 + Push: &tangled.Pipeline_PushTriggerData{ 201 + NewSha: "abc123", 202 + }, 203 + Repo: &tangled.Pipeline_TriggerRepo{ 204 + Knot: "example.com", 205 + Did: "did:plc:user123", 206 + Repo: "my-repo", 207 + }, 208 + } 209 + 210 + step := BuildCloneStep(twf, tr, "/tangled/workspace", false) 211 + 212 + allCmds := strings.Join(step.Commands(), " ") 213 + if !strings.Contains(allCmds, "--depth=10") { 214 + t.Error("Commands should contain '--depth=10'") 215 + } 216 + 217 + if !strings.Contains(allCmds, "--recurse-submodules=yes") { 218 + t.Error("Commands should contain '--recurse-submodules=yes'") 219 + } 220 + } 221 + 222 + func TestBuildCloneStep_DefaultDepth(t *testing.T) { 223 + twf := tangled.Pipeline_Workflow{ 224 + Clone: &tangled.Pipeline_CloneOpts{ 225 + Depth: 0, // Default should be 1 226 + Skip: false, 227 + }, 228 + } 229 + tr := tangled.Pipeline_TriggerMetadata{ 230 + Kind: string(workflow.TriggerKindPush), 231 + Push: &tangled.Pipeline_PushTriggerData{ 232 + NewSha: "abc123", 233 + }, 234 + Repo: &tangled.Pipeline_TriggerRepo{ 235 + Knot: "example.com", 236 + Did: "did:plc:user123", 237 + Repo: "my-repo", 238 + }, 239 + } 240 + 241 + step := BuildCloneStep(twf, tr, "/tangled/workspace", false) 242 + 243 + allCmds := strings.Join(step.Commands(), " ") 244 + if !strings.Contains(allCmds, "--depth=1") { 245 + t.Error("Commands should default to '--depth=1'") 246 + } 247 + } 248 + 249 + func TestBuildCloneStep_NilPushData(t *testing.T) { 250 + twf := tangled.Pipeline_Workflow{ 251 + Clone: &tangled.Pipeline_CloneOpts{ 252 + Depth: 1, 253 + Skip: false, 254 + }, 255 + } 256 + tr := tangled.Pipeline_TriggerMetadata{ 257 + Kind: string(workflow.TriggerKindPush), 258 + Push: nil, // Nil push data should create error step 259 + Repo: &tangled.Pipeline_TriggerRepo{ 260 + Knot: "example.com", 261 + Did: "did:plc:user123", 262 + Repo: "my-repo", 263 + }, 264 + } 265 + 266 + step := BuildCloneStep(twf, tr, "/tangled/workspace", false) 267 + 268 + // Should return an error step 269 + if !strings.Contains(step.Name(), "error") { 270 + t.Error("Expected error in step name when push data is nil") 271 + } 272 + 273 + allCmds := strings.Join(step.Commands(), " ") 274 + if !strings.Contains(allCmds, "Failed to get clone info") { 275 + t.Error("Commands should contain error message") 276 + } 277 + if !strings.Contains(allCmds, "exit 1") { 278 + t.Error("Commands should exit with error") 279 + } 280 + } 281 + 282 + func TestBuildCloneStep_NilPRData(t *testing.T) { 283 + twf := tangled.Pipeline_Workflow{ 284 + Clone: &tangled.Pipeline_CloneOpts{ 285 + Depth: 1, 286 + Skip: false, 287 + }, 288 + } 289 + tr := tangled.Pipeline_TriggerMetadata{ 290 + Kind: string(workflow.TriggerKindPullRequest), 291 + PullRequest: nil, // Nil PR data should create error step 292 + Repo: &tangled.Pipeline_TriggerRepo{ 293 + Knot: "example.com", 294 + Did: "did:plc:user123", 295 + Repo: "my-repo", 296 + }, 297 + } 298 + 299 + step := BuildCloneStep(twf, tr, "/tangled/workspace", false) 300 + 301 + // Should return an error step 302 + if !strings.Contains(step.Name(), "error") { 303 + t.Error("Expected error in step name when pull request data is nil") 304 + } 305 + 306 + allCmds := strings.Join(step.Commands(), " ") 307 + if !strings.Contains(allCmds, "Failed to get clone info") { 308 + t.Error("Commands should contain error message") 309 + } 310 + } 311 + 312 + func TestBuildCloneStep_CustomWorkspace(t *testing.T) { 313 + twf := tangled.Pipeline_Workflow{ 314 + Clone: &tangled.Pipeline_CloneOpts{ 315 + Depth: 1, 316 + Skip: false, 317 + }, 318 + } 319 + tr := tangled.Pipeline_TriggerMetadata{ 320 + Kind: string(workflow.TriggerKindPush), 321 + Push: &tangled.Pipeline_PushTriggerData{ 322 + NewSha: "abc123", 323 + }, 324 + Repo: &tangled.Pipeline_TriggerRepo{ 325 + Knot: "example.com", 326 + Did: "did:plc:user123", 327 + Repo: "my-repo", 328 + }, 329 + } 330 + 331 + step := BuildCloneStep(twf, tr, "/custom/path", false) 332 + 333 + allCmds := strings.Join(step.Commands(), " ") 334 + if !strings.Contains(allCmds, "/custom/path") { 335 + t.Error("Commands should use custom workspace directory") 336 + } 337 + } 338 + 339 + func TestBuildCloneStep_DefaultWorkspace(t *testing.T) { 340 + twf := tangled.Pipeline_Workflow{ 341 + Clone: &tangled.Pipeline_CloneOpts{ 342 + Depth: 1, 343 + Skip: false, 344 + }, 345 + } 346 + tr := tangled.Pipeline_TriggerMetadata{ 347 + Kind: string(workflow.TriggerKindPush), 348 + Push: &tangled.Pipeline_PushTriggerData{ 349 + NewSha: "abc123", 350 + }, 351 + Repo: &tangled.Pipeline_TriggerRepo{ 352 + Knot: "example.com", 353 + Did: "did:plc:user123", 354 + Repo: "my-repo", 355 + }, 356 + } 357 + 358 + step := BuildCloneStep(twf, tr, "", false) // Empty should default to /tangled/workspace 359 + 360 + allCmds := strings.Join(step.Commands(), " ") 361 + if !strings.Contains(allCmds, "/tangled/workspace") { 362 + t.Error("Commands should default to /tangled/workspace") 363 + } 364 + }

Submissions

sign up or login to add to the discussion
evan.jarrett.net submitted #3
1 commit
expand
spindle: implement shared clone step outside of nixery engine.
anirudh.fi

Gave it a once-over; looks good! And thanks for the tests.

pull request successfully merged
evan.jarrett.net submitted #2
2 commits
expand
spindle: move the clone step out of nixery into a shared function for all spindle engines
Create a BuildCloneStep function that returns a step struct that inherits from models.Step
evan.jarrett.net

Not entirely sure why this says there are merge conflicts when i rebased master This is an attempt to go off of your design with returning a whole step struct. I think it turns out essentially the same code... so maybe i'm missing something obvious.

that is a bit strange, i don't see anything off with the logs on the knotserver itself. that aside, some comments on the code itself:

  • don't think we need to cd workspaceDir, we already have this upon contrainer creation (the WorkingDir is set):
	resp, err := e.docker.ContainerCreate(ctx, &container.Config{
		Image:      addl.image,
		Cmd:        []string{"cat"},
		OpenStdin:  true, // so cat stays alive :3
		Tty:        false,
		Hostname:   "spindle",
		WorkingDir: workspaceDir,
  • the rest of the clone code itself looks good to me!
  • could you squash the two commits into one? the commit message could be spindle: introduce common clone step or similar
evan.jarrett.net submitted #1
2 commits
expand
spindle: move the clone step out of nixery into a shared function for all spindle engines
Create a BuildCloneStep function that returns a step struct that inherits from models.Step
evan.jarrett.net submitted #0
1 commit
expand
spindle: move the clone step out of nixery into a shared function for all spindle engines

thanks for the contribution! couple of nits:

  • we already have a top level workflow package that also defines CloneOpts (to unmarshal the workflow yaml file however), it is a bit confusing to introduce another workflow package inside spindle with a similar struct CloneOptions (it also shares some of the same fields)
  • IMO a simpler route here would be to have a generic CloneStep method that is calculated from tangled.Pipeline_Workflow and returns a struct that follows the models.Step interface
evan.jarrett.net

@oppi.li What package would you want this generic cloneStep in?

I think your solution is better, there was no precedent for a custom models.Step outside of nixery, so I was originally trying to avoid that.

I would still eventually want RepoURL and CommitSHA exposed for my usecase. but I might have to just submit a different PR for system-level env vars.