cli + tui to publish to leaflet (wip) & manage tasks, notes & watch/read lists ๐Ÿƒ
charm leaflet readability golang

build: add generic github repo fetching utility

* add leaflet lexicons

* add prod build flag

+422 -13
+50
README.md
··· 30 30 ### First Steps 31 31 32 32 For a comprehensive walkthrough including task management, time tracking, notes, and media tracking, see the [Quickstart Guide](website/docs/Quickstart.md). 33 + 34 + ## Development 35 + 36 + Noteleaf uses [Task](https://taskfile.dev) for build automation. Development builds include additional tooling commands not available in production builds. 37 + 38 + ### Building 39 + 40 + ```sh 41 + # Production build 42 + task build 43 + 44 + # Development build (with version info and dev tools) 45 + task build:dev 46 + 47 + # Run tests 48 + task test 49 + task cov # ...with coverage 50 + ``` 51 + 52 + ### Development Tools 53 + 54 + Dev builds (`task build:dev`) include a `tools` subcommand with maintenance utilities: 55 + 56 + **Documentation Generation:** 57 + 58 + ```sh 59 + # Generate Docusaurus documentation 60 + noteleaf tools docgen --format docusaurus --out website/docs/manual 61 + 62 + # Generate man pages 63 + noteleaf tools docgen --format man --out docs/manual 64 + ``` 65 + 66 + **Data Synchronization:** 67 + 68 + ```sh 69 + # Fetch Leaflet lexicons from GitHub 70 + noteleaf tools fetch lexicons 71 + 72 + # Fetch from a specific commit 73 + noteleaf tools fetch lexicons --sha abc123def 74 + 75 + # Generic GitHub repository archive fetcher 76 + noteleaf tools fetch gh-repo \ 77 + --repo owner/repo \ 78 + --path schemas/ \ 79 + --output local/schemas/ 80 + ``` 81 + 82 + Production builds (`task build:rc`, `task build:prod`) use the `-tags prod` flag to exclude dev tools.
+5 -5
Taskfile.yml
··· 51 51 - echo "Built {{.BUILD_DIR}}/{{.BINARY_NAME}}" 52 52 53 53 build:dev: 54 - desc: Build binary with dev version (includes git commit hash) 54 + desc: Build binary with dev version (includes git commit hash and dev tools) 55 55 vars: 56 56 VERSION: "{{.GIT_DESCRIBE}}" 57 57 LDFLAGS: "-X {{.VERSION_PKG}}.Version={{.VERSION}} -X {{.VERSION_PKG}}.Commit={{.GIT_COMMIT}} -X {{.VERSION_PKG}}.BuildDate={{.BUILD_DATE}}" ··· 61 61 - 'echo "Built {{.BUILD_DIR}}/{{.BINARY_NAME}} (version: {{.VERSION}})"' 62 62 63 63 build:rc: 64 - desc: Build release candidate binary (requires git tag with -rc suffix) 64 + desc: Build release candidate binary (requires git tag with -rc suffix, excludes dev tools) 65 65 vars: 66 66 VERSION: "{{.GIT_TAG}}" 67 67 LDFLAGS: "-X {{.VERSION_PKG}}.Version={{.VERSION}} -X {{.VERSION_PKG}}.Commit={{.GIT_COMMIT}} -X {{.VERSION_PKG}}.BuildDate={{.BUILD_DATE}}" ··· 72 72 msg: "Git tag must contain '-rc' for release candidate builds (e.g., v1.0.0-rc1)" 73 73 cmds: 74 74 - mkdir -p {{.BUILD_DIR}} 75 - - go build -ldflags "{{.LDFLAGS}}" -o {{.BUILD_DIR}}/{{.BINARY_NAME}} {{.CMD_DIR}} 75 + - go build -tags prod -ldflags "{{.LDFLAGS}}" -o {{.BUILD_DIR}}/{{.BINARY_NAME}} {{.CMD_DIR}} 76 76 - 'echo "Built {{.BUILD_DIR}}/{{.BINARY_NAME}} (version: {{.VERSION}})"' 77 77 78 78 build:prod: 79 - desc: Build production binary (requires clean semver git tag) 79 + desc: Build production binary (requires clean semver git tag, excludes dev tools) 80 80 vars: 81 81 VERSION: "{{.GIT_TAG}}" 82 82 LDFLAGS: "-X {{.VERSION_PKG}}.Version={{.VERSION}} -X {{.VERSION_PKG}}.Commit={{.GIT_COMMIT}} -X {{.VERSION_PKG}}.BuildDate={{.BUILD_DATE}}" ··· 89 89 msg: "Working directory must be clean (no uncommitted changes) for production builds" 90 90 cmds: 91 91 - mkdir -p {{.BUILD_DIR}} 92 - - go build -ldflags "{{.LDFLAGS}}" -o {{.BUILD_DIR}}/{{.BINARY_NAME}} {{.CMD_DIR}} 92 + - go build -tags prod -ldflags "{{.LDFLAGS}}" -o {{.BUILD_DIR}}/{{.BINARY_NAME}} {{.CMD_DIR}} 93 93 - 'echo "Built {{.BUILD_DIR}}/{{.BINARY_NAME}} (version: {{.VERSION}})"' 94 94 95 95 clean:
+1 -2
cmd/main.go
··· 14 14 "github.com/stormlightlabs/noteleaf/internal/ui" 15 15 "github.com/stormlightlabs/noteleaf/internal/utils" 16 16 "github.com/stormlightlabs/noteleaf/internal/version" 17 - "github.com/stormlightlabs/noteleaf/tools" 18 17 ) 19 18 20 19 var ( ··· 251 250 root.AddCommand(cmd) 252 251 } 253 252 254 - root.AddCommand(tools.NewDocGenCommand(root)) 253 + registerTools(root) 255 254 256 255 opts := []fang.Option{ 257 256 fang.WithVersion(version.String()),
+13
cmd/tools_dev.go
··· 1 + //go:build !prod 2 + 3 + package main 4 + 5 + import ( 6 + "github.com/spf13/cobra" 7 + "github.com/stormlightlabs/noteleaf/tools" 8 + ) 9 + 10 + // registerTools adds development tools to the root command 11 + func registerTools(root *cobra.Command) { 12 + root.AddCommand(tools.NewToolsCommand(root)) 13 + }
+8
cmd/tools_prod.go
··· 1 + //go:build prod 2 + 3 + package main 4 + 5 + import "github.com/spf13/cobra" 6 + 7 + // registerTools is a no-op in production builds 8 + func registerTools(*cobra.Command) {}
+3 -6
tools/docgen.go
··· 1 + //go:build !prod 2 + 1 3 package tools 2 4 3 5 import ( ··· 324 326 } 325 327 326 328 for _, cmd := range root.Commands() { 327 - if cmd.Name() == path[0] || contains(cmd.Aliases, path[0]) { 329 + if cmd.Name() == path[0] || slices.Contains(cmd.Aliases, path[0]) { 328 330 if len(path) == 1 { 329 331 return cmd 330 332 } ··· 334 336 335 337 return nil 336 338 } 337 - 338 - // contains checks if a string is in a slice 339 - func contains(slice []string, str string) bool { 340 - return slices.Contains(slice, str) 341 - }
+21
tools/fetch.go
··· 1 + //go:build !prod 2 + 3 + package tools 4 + 5 + import "github.com/spf13/cobra" 6 + 7 + // NewFetchCommand creates a parent command for fetching remote resources 8 + func NewFetchCommand() *cobra.Command { 9 + cmd := &cobra.Command{ 10 + Use: "fetch", 11 + Short: "Fetch remote resources", 12 + Long: `Fetch and synchronize remote resources from GitHub repositories. 13 + 14 + Includes commands for fetching lexicons, schemas, and other data files.`, 15 + } 16 + 17 + cmd.AddCommand(NewGHRepoCommand()) 18 + cmd.AddCommand(NewLexiconsCommand()) 19 + 20 + return cmd 21 + }
+51
tools/lexicon_fetch.go
··· 1 + //go:build !prod 2 + 3 + package tools 4 + 5 + import ( 6 + "context" 7 + 8 + "github.com/spf13/cobra" 9 + ) 10 + 11 + // NewLexiconsCommand creates a command for fetching Leaflet lexicons 12 + func NewLexiconsCommand() *cobra.Command { 13 + var sha string 14 + var output string 15 + 16 + cmd := &cobra.Command{ 17 + Use: "lexicons", 18 + Short: "Fetch Leaflet lexicons from GitHub", 19 + Long: `Fetches Leaflet lexicons from the hyperlink-academy/leaflet repository. 20 + 21 + This is a convenience wrapper around gh-repo with pre-configured defaults 22 + for the Leaflet lexicon repository.`, 23 + Example: ` # Fetch latest lexicons 24 + noteleaf tools fetch lexicons 25 + 26 + # Fetch from a specific commit 27 + noteleaf tools fetch lexicons --sha abc123def 28 + 29 + # Fetch to a custom directory 30 + noteleaf tools fetch lexicons --output ./tmp/lexicons`, 31 + RunE: func(cmd *cobra.Command, args []string) error { 32 + config := ArchiveConfig{ 33 + Repo: "hyperlink-academy/leaflet", 34 + Path: "lexicons/pub/leaflet/", 35 + Output: output, 36 + SHA: sha, 37 + FormatJSON: true, 38 + } 39 + 40 + ctx := cmd.Context() 41 + if ctx == nil { 42 + ctx = context.Background() 43 + } 44 + 45 + return fetchAndExtractArchive(ctx, config, cmd.OutOrStdout()) 46 + }, 47 + } 48 + cmd.Flags().StringVar(&sha, "sha", "", "Specific commit SHA (default: latest)") 49 + cmd.Flags().StringVar(&output, "output", "lexdocs/leaflet/", "Output directory for lexicons") 50 + return cmd 51 + }
+19
tools/registry.go
··· 1 + //go:build !prod 2 + 3 + package tools 4 + 5 + import "github.com/spf13/cobra" 6 + 7 + // NewToolsCommand creates a parent command for all development tools 8 + func NewToolsCommand(root *cobra.Command) *cobra.Command { 9 + cmd := &cobra.Command{ 10 + Use: "tools", 11 + Short: "Development and maintenance tools", 12 + Long: `Development tools for documentation generation, data synchronization, 13 + and maintenance tasks. These commands are only available in dev builds.`, 14 + } 15 + cmd.AddCommand(NewDocGenCommand(root)) 16 + cmd.AddCommand(NewFetchCommand()) 17 + 18 + return cmd 19 + }
+251
tools/repo_archive.go
··· 1 + //go:build !prod 2 + 3 + package tools 4 + 5 + import ( 6 + "archive/tar" 7 + "compress/gzip" 8 + "context" 9 + "encoding/json" 10 + "fmt" 11 + "io" 12 + "net/http" 13 + "os" 14 + "path/filepath" 15 + "strings" 16 + "time" 17 + 18 + "github.com/spf13/cobra" 19 + ) 20 + 21 + // GitHubCommit represents a GitHub API commit response 22 + type GitHubCommit struct { 23 + SHA string `json:"sha"` 24 + Commit struct { 25 + Message string `json:"message"` 26 + } `json:"commit"` 27 + } 28 + 29 + // ArchiveConfig contains configuration for fetching and extracting archives 30 + type ArchiveConfig struct { 31 + Repo string 32 + Path string 33 + Output string 34 + SHA string 35 + FormatJSON bool 36 + } 37 + 38 + // NewGHRepoCommand creates a command for fetching GitHub repository archives 39 + func NewGHRepoCommand() *cobra.Command { 40 + var config ArchiveConfig 41 + 42 + cmd := &cobra.Command{ 43 + Use: "gh-repo", 44 + Short: "Fetch and extract files from a GitHub repository archive", 45 + Long: `Fetches a GitHub repository archive (tarball), extracts specific paths, 46 + and optionally formats JSON files using Go's standard library. 47 + 48 + This is useful for syncing lexicons, schemas, or other data files from GitHub repositories.`, 49 + Example: ` # Fetch lexicons from a specific path 50 + noteleaf tools fetch gh-repo \ 51 + --repo hyperlink-academy/leaflet \ 52 + --path lexicons/pub/leaflet/ \ 53 + --output lexdocs/leaflet/ 54 + 55 + # Fetch from a specific commit 56 + noteleaf tools fetch gh-repo \ 57 + --repo owner/repo \ 58 + --path schemas/ \ 59 + --output local/schemas/ \ 60 + --sha abc123def`, 61 + RunE: func(cmd *cobra.Command, args []string) error { 62 + if config.Repo == "" { 63 + return fmt.Errorf("--repo is required") 64 + } 65 + if config.Path == "" { 66 + return fmt.Errorf("--path is required") 67 + } 68 + if config.Output == "" { 69 + return fmt.Errorf("--output is required") 70 + } 71 + 72 + ctx := cmd.Context() 73 + if ctx == nil { 74 + ctx = context.Background() 75 + } 76 + 77 + return fetchAndExtractArchive(ctx, config, cmd.OutOrStdout()) 78 + }, 79 + } 80 + 81 + cmd.Flags().StringVar(&config.Repo, "repo", "", "GitHub repository (owner/name)") 82 + cmd.Flags().StringVar(&config.Path, "path", "", "Path within repository to extract") 83 + cmd.Flags().StringVar(&config.Output, "output", "", "Output directory for extracted files") 84 + cmd.Flags().StringVar(&config.SHA, "sha", "", "Specific commit SHA (default: latest)") 85 + cmd.Flags().BoolVar(&config.FormatJSON, "format-json", true, "Format JSON files with indentation") 86 + return cmd 87 + } 88 + 89 + // fetchAndExtractArchive fetches a GitHub archive and extracts specific paths 90 + func fetchAndExtractArchive(ctx context.Context, config ArchiveConfig, out io.Writer) error { 91 + sha := config.SHA 92 + if sha == "" { 93 + var err error 94 + sha, err = getLatestCommit(ctx, config.Repo, config.Path) 95 + if err != nil { 96 + return fmt.Errorf("failed to get latest commit: %w", err) 97 + } 98 + fmt.Fprintf(out, "Latest commit: %s\n", sha) 99 + } 100 + 101 + tmpDir, err := os.MkdirTemp("", "repo-archive-*") 102 + if err != nil { 103 + return fmt.Errorf("failed to create temp directory: %w", err) 104 + } 105 + defer os.RemoveAll(tmpDir) 106 + 107 + fmt.Fprintf(out, "Fetching archive for %s@%s\n", config.Repo, sha[:7]) 108 + if err := downloadAndExtract(ctx, config.Repo, sha, config.Path, tmpDir, config.FormatJSON, out); err != nil { 109 + return fmt.Errorf("failed to download and extract: %w", err) 110 + } 111 + 112 + fmt.Fprintf(out, "Writing README with source information\n") 113 + readme := fmt.Sprintf("Source: https://github.com/%s/tree/%s/%s\n", config.Repo, sha, config.Path) 114 + if err := os.WriteFile(filepath.Join(tmpDir, "README.md"), []byte(readme), 0o644); err != nil { 115 + return fmt.Errorf("failed to write README: %w", err) 116 + } 117 + 118 + fmt.Fprintf(out, "Moving extracted files to %s\n", config.Output) 119 + if err := os.RemoveAll(config.Output); err != nil { 120 + return fmt.Errorf("failed to remove existing output directory: %w", err) 121 + } 122 + if err := os.Rename(tmpDir, config.Output); err != nil { 123 + return fmt.Errorf("failed to move files to output directory: %w", err) 124 + } 125 + 126 + fmt.Fprintf(out, "Successfully extracted archive to %s\n", config.Output) 127 + return nil 128 + } 129 + 130 + // getLatestCommit fetches the latest commit SHA for a given repository and path 131 + func getLatestCommit(ctx context.Context, repo, path string) (string, error) { 132 + url := fmt.Sprintf("https://api.github.com/repos/%s/commits?path=%s&per_page=1", repo, path) 133 + 134 + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) 135 + if err != nil { 136 + return "", err 137 + } 138 + 139 + client := &http.Client{Timeout: 30 * time.Second} 140 + resp, err := client.Do(req) 141 + if err != nil { 142 + return "", err 143 + } 144 + defer resp.Body.Close() 145 + 146 + if resp.StatusCode != http.StatusOK { 147 + return "", fmt.Errorf("GitHub API returned status %d", resp.StatusCode) 148 + } 149 + 150 + var commits []GitHubCommit 151 + if err := json.NewDecoder(resp.Body).Decode(&commits); err != nil { 152 + return "", fmt.Errorf("failed to decode response: %w", err) 153 + } 154 + 155 + if len(commits) == 0 { 156 + return "", fmt.Errorf("no commits found for path %s", path) 157 + } 158 + 159 + return commits[0].SHA, nil 160 + } 161 + 162 + // downloadAndExtract downloads a GitHub archive and extracts files from a specific path 163 + func downloadAndExtract(ctx context.Context, repo, sha, extractPath, outputDir string, formatJSON bool, out io.Writer) error { 164 + url := fmt.Sprintf("https://github.com/%s/archive/%s.tar.gz", repo, sha) 165 + 166 + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) 167 + if err != nil { 168 + return err 169 + } 170 + 171 + client := &http.Client{Timeout: 5 * time.Minute} 172 + resp, err := client.Do(req) 173 + if err != nil { 174 + return err 175 + } 176 + defer resp.Body.Close() 177 + 178 + if resp.StatusCode != http.StatusOK { 179 + return fmt.Errorf("failed to download archive: status %d", resp.StatusCode) 180 + } 181 + 182 + gzr, err := gzip.NewReader(resp.Body) 183 + if err != nil { 184 + return fmt.Errorf("failed to create gzip reader: %w", err) 185 + } 186 + defer gzr.Close() 187 + 188 + tr := tar.NewReader(gzr) 189 + 190 + repoName := strings.Split(repo, "/")[1] 191 + prefix := fmt.Sprintf("%s-%s/%s", repoName, sha, extractPath) 192 + 193 + fmt.Fprintf(out, "Extracting files from %s\n", prefix) 194 + 195 + fileCount := 0 196 + for { 197 + header, err := tr.Next() 198 + if err == io.EOF { 199 + break 200 + } 201 + if err != nil { 202 + return fmt.Errorf("failed to read tar header: %w", err) 203 + } 204 + 205 + if header.Typeflag != tar.TypeReg { 206 + continue 207 + } 208 + 209 + if !strings.HasPrefix(header.Name, prefix) { 210 + continue 211 + } 212 + 213 + if !strings.HasSuffix(header.Name, ".json") { 214 + continue 215 + } 216 + 217 + relativePath := strings.TrimPrefix(header.Name, prefix) 218 + outputPath := filepath.Join(outputDir, relativePath) 219 + 220 + if err := os.MkdirAll(filepath.Dir(outputPath), 0o755); err != nil { 221 + return fmt.Errorf("failed to create directory for %s: %w", outputPath, err) 222 + } 223 + 224 + data, err := io.ReadAll(tr) 225 + if err != nil { 226 + return fmt.Errorf("failed to read file %s: %w", header.Name, err) 227 + } 228 + 229 + if formatJSON { 230 + var jsonData any 231 + if err := json.Unmarshal(data, &jsonData); err != nil { 232 + return fmt.Errorf("failed to parse JSON in %s: %w", header.Name, err) 233 + } 234 + 235 + formattedData, err := json.MarshalIndent(jsonData, "", " ") 236 + if err != nil { 237 + return fmt.Errorf("failed to format JSON in %s: %w", header.Name, err) 238 + } 239 + data = append(formattedData, '\n') 240 + } 241 + 242 + if err := os.WriteFile(outputPath, data, 0o644); err != nil { 243 + return fmt.Errorf("failed to write file %s: %w", outputPath, err) 244 + } 245 + 246 + fileCount++ 247 + } 248 + 249 + fmt.Fprintf(out, "Extracted %d files\n", fileCount) 250 + return nil 251 + }