···3030### First Steps
31313232For a comprehensive walkthrough including task management, time tracking, notes, and media tracking, see the [Quickstart Guide](website/docs/Quickstart.md).
3333+3434+## Development
3535+3636+Noteleaf uses [Task](https://taskfile.dev) for build automation. Development builds include additional tooling commands not available in production builds.
3737+3838+### Building
3939+4040+```sh
4141+# Production build
4242+task build
4343+4444+# Development build (with version info and dev tools)
4545+task build:dev
4646+4747+# Run tests
4848+task test
4949+task cov # ...with coverage
5050+```
5151+5252+### Development Tools
5353+5454+Dev builds (`task build:dev`) include a `tools` subcommand with maintenance utilities:
5555+5656+**Documentation Generation:**
5757+5858+```sh
5959+# Generate Docusaurus documentation
6060+noteleaf tools docgen --format docusaurus --out website/docs/manual
6161+6262+# Generate man pages
6363+noteleaf tools docgen --format man --out docs/manual
6464+```
6565+6666+**Data Synchronization:**
6767+6868+```sh
6969+# Fetch Leaflet lexicons from GitHub
7070+noteleaf tools fetch lexicons
7171+7272+# Fetch from a specific commit
7373+noteleaf tools fetch lexicons --sha abc123def
7474+7575+# Generic GitHub repository archive fetcher
7676+noteleaf tools fetch gh-repo \
7777+ --repo owner/repo \
7878+ --path schemas/ \
7979+ --output local/schemas/
8080+```
8181+8282+Production builds (`task build:rc`, `task build:prod`) use the `-tags prod` flag to exclude dev tools.
+5-5
Taskfile.yml
···5151 - echo "Built {{.BUILD_DIR}}/{{.BINARY_NAME}}"
52525353 build:dev:
5454- desc: Build binary with dev version (includes git commit hash)
5454+ desc: Build binary with dev version (includes git commit hash and dev tools)
5555 vars:
5656 VERSION: "{{.GIT_DESCRIBE}}"
5757 LDFLAGS: "-X {{.VERSION_PKG}}.Version={{.VERSION}} -X {{.VERSION_PKG}}.Commit={{.GIT_COMMIT}} -X {{.VERSION_PKG}}.BuildDate={{.BUILD_DATE}}"
···6161 - 'echo "Built {{.BUILD_DIR}}/{{.BINARY_NAME}} (version: {{.VERSION}})"'
62626363 build:rc:
6464- desc: Build release candidate binary (requires git tag with -rc suffix)
6464+ desc: Build release candidate binary (requires git tag with -rc suffix, excludes dev tools)
6565 vars:
6666 VERSION: "{{.GIT_TAG}}"
6767 LDFLAGS: "-X {{.VERSION_PKG}}.Version={{.VERSION}} -X {{.VERSION_PKG}}.Commit={{.GIT_COMMIT}} -X {{.VERSION_PKG}}.BuildDate={{.BUILD_DATE}}"
···7272 msg: "Git tag must contain '-rc' for release candidate builds (e.g., v1.0.0-rc1)"
7373 cmds:
7474 - mkdir -p {{.BUILD_DIR}}
7575- - go build -ldflags "{{.LDFLAGS}}" -o {{.BUILD_DIR}}/{{.BINARY_NAME}} {{.CMD_DIR}}
7575+ - go build -tags prod -ldflags "{{.LDFLAGS}}" -o {{.BUILD_DIR}}/{{.BINARY_NAME}} {{.CMD_DIR}}
7676 - 'echo "Built {{.BUILD_DIR}}/{{.BINARY_NAME}} (version: {{.VERSION}})"'
77777878 build:prod:
7979- desc: Build production binary (requires clean semver git tag)
7979+ desc: Build production binary (requires clean semver git tag, excludes dev tools)
8080 vars:
8181 VERSION: "{{.GIT_TAG}}"
8282 LDFLAGS: "-X {{.VERSION_PKG}}.Version={{.VERSION}} -X {{.VERSION_PKG}}.Commit={{.GIT_COMMIT}} -X {{.VERSION_PKG}}.BuildDate={{.BUILD_DATE}}"
···8989 msg: "Working directory must be clean (no uncommitted changes) for production builds"
9090 cmds:
9191 - mkdir -p {{.BUILD_DIR}}
9292- - go build -ldflags "{{.LDFLAGS}}" -o {{.BUILD_DIR}}/{{.BINARY_NAME}} {{.CMD_DIR}}
9292+ - go build -tags prod -ldflags "{{.LDFLAGS}}" -o {{.BUILD_DIR}}/{{.BINARY_NAME}} {{.CMD_DIR}}
9393 - 'echo "Built {{.BUILD_DIR}}/{{.BINARY_NAME}} (version: {{.VERSION}})"'
94949595 clean:
···11+//go:build !prod
22+33+package main
44+55+import (
66+ "github.com/spf13/cobra"
77+ "github.com/stormlightlabs/noteleaf/tools"
88+)
99+1010+// registerTools adds development tools to the root command
1111+func registerTools(root *cobra.Command) {
1212+ root.AddCommand(tools.NewToolsCommand(root))
1313+}
+8
cmd/tools_prod.go
···11+//go:build prod
22+33+package main
44+55+import "github.com/spf13/cobra"
66+77+// registerTools is a no-op in production builds
88+func registerTools(*cobra.Command) {}
+3-6
tools/docgen.go
···11+//go:build !prod
22+13package tools
2435import (
···324326 }
325327326328 for _, cmd := range root.Commands() {
327327- if cmd.Name() == path[0] || contains(cmd.Aliases, path[0]) {
329329+ if cmd.Name() == path[0] || slices.Contains(cmd.Aliases, path[0]) {
328330 if len(path) == 1 {
329331 return cmd
330332 }
···334336335337 return nil
336338}
337337-338338-// contains checks if a string is in a slice
339339-func contains(slice []string, str string) bool {
340340- return slices.Contains(slice, str)
341341-}
+21
tools/fetch.go
···11+//go:build !prod
22+33+package tools
44+55+import "github.com/spf13/cobra"
66+77+// NewFetchCommand creates a parent command for fetching remote resources
88+func NewFetchCommand() *cobra.Command {
99+ cmd := &cobra.Command{
1010+ Use: "fetch",
1111+ Short: "Fetch remote resources",
1212+ Long: `Fetch and synchronize remote resources from GitHub repositories.
1313+1414+Includes commands for fetching lexicons, schemas, and other data files.`,
1515+ }
1616+1717+ cmd.AddCommand(NewGHRepoCommand())
1818+ cmd.AddCommand(NewLexiconsCommand())
1919+2020+ return cmd
2121+}
+51
tools/lexicon_fetch.go
···11+//go:build !prod
22+33+package tools
44+55+import (
66+ "context"
77+88+ "github.com/spf13/cobra"
99+)
1010+1111+// NewLexiconsCommand creates a command for fetching Leaflet lexicons
1212+func NewLexiconsCommand() *cobra.Command {
1313+ var sha string
1414+ var output string
1515+1616+ cmd := &cobra.Command{
1717+ Use: "lexicons",
1818+ Short: "Fetch Leaflet lexicons from GitHub",
1919+ Long: `Fetches Leaflet lexicons from the hyperlink-academy/leaflet repository.
2020+2121+This is a convenience wrapper around gh-repo with pre-configured defaults
2222+for the Leaflet lexicon repository.`,
2323+ Example: ` # Fetch latest lexicons
2424+ noteleaf tools fetch lexicons
2525+2626+ # Fetch from a specific commit
2727+ noteleaf tools fetch lexicons --sha abc123def
2828+2929+ # Fetch to a custom directory
3030+ noteleaf tools fetch lexicons --output ./tmp/lexicons`,
3131+ RunE: func(cmd *cobra.Command, args []string) error {
3232+ config := ArchiveConfig{
3333+ Repo: "hyperlink-academy/leaflet",
3434+ Path: "lexicons/pub/leaflet/",
3535+ Output: output,
3636+ SHA: sha,
3737+ FormatJSON: true,
3838+ }
3939+4040+ ctx := cmd.Context()
4141+ if ctx == nil {
4242+ ctx = context.Background()
4343+ }
4444+4545+ return fetchAndExtractArchive(ctx, config, cmd.OutOrStdout())
4646+ },
4747+ }
4848+ cmd.Flags().StringVar(&sha, "sha", "", "Specific commit SHA (default: latest)")
4949+ cmd.Flags().StringVar(&output, "output", "lexdocs/leaflet/", "Output directory for lexicons")
5050+ return cmd
5151+}
+19
tools/registry.go
···11+//go:build !prod
22+33+package tools
44+55+import "github.com/spf13/cobra"
66+77+// NewToolsCommand creates a parent command for all development tools
88+func NewToolsCommand(root *cobra.Command) *cobra.Command {
99+ cmd := &cobra.Command{
1010+ Use: "tools",
1111+ Short: "Development and maintenance tools",
1212+ Long: `Development tools for documentation generation, data synchronization,
1313+and maintenance tasks. These commands are only available in dev builds.`,
1414+ }
1515+ cmd.AddCommand(NewDocGenCommand(root))
1616+ cmd.AddCommand(NewFetchCommand())
1717+1818+ return cmd
1919+}
+251
tools/repo_archive.go
···11+//go:build !prod
22+33+package tools
44+55+import (
66+ "archive/tar"
77+ "compress/gzip"
88+ "context"
99+ "encoding/json"
1010+ "fmt"
1111+ "io"
1212+ "net/http"
1313+ "os"
1414+ "path/filepath"
1515+ "strings"
1616+ "time"
1717+1818+ "github.com/spf13/cobra"
1919+)
2020+2121+// GitHubCommit represents a GitHub API commit response
2222+type GitHubCommit struct {
2323+ SHA string `json:"sha"`
2424+ Commit struct {
2525+ Message string `json:"message"`
2626+ } `json:"commit"`
2727+}
2828+2929+// ArchiveConfig contains configuration for fetching and extracting archives
3030+type ArchiveConfig struct {
3131+ Repo string
3232+ Path string
3333+ Output string
3434+ SHA string
3535+ FormatJSON bool
3636+}
3737+3838+// NewGHRepoCommand creates a command for fetching GitHub repository archives
3939+func NewGHRepoCommand() *cobra.Command {
4040+ var config ArchiveConfig
4141+4242+ cmd := &cobra.Command{
4343+ Use: "gh-repo",
4444+ Short: "Fetch and extract files from a GitHub repository archive",
4545+ Long: `Fetches a GitHub repository archive (tarball), extracts specific paths,
4646+and optionally formats JSON files using Go's standard library.
4747+4848+This is useful for syncing lexicons, schemas, or other data files from GitHub repositories.`,
4949+ Example: ` # Fetch lexicons from a specific path
5050+ noteleaf tools fetch gh-repo \
5151+ --repo hyperlink-academy/leaflet \
5252+ --path lexicons/pub/leaflet/ \
5353+ --output lexdocs/leaflet/
5454+5555+ # Fetch from a specific commit
5656+ noteleaf tools fetch gh-repo \
5757+ --repo owner/repo \
5858+ --path schemas/ \
5959+ --output local/schemas/ \
6060+ --sha abc123def`,
6161+ RunE: func(cmd *cobra.Command, args []string) error {
6262+ if config.Repo == "" {
6363+ return fmt.Errorf("--repo is required")
6464+ }
6565+ if config.Path == "" {
6666+ return fmt.Errorf("--path is required")
6767+ }
6868+ if config.Output == "" {
6969+ return fmt.Errorf("--output is required")
7070+ }
7171+7272+ ctx := cmd.Context()
7373+ if ctx == nil {
7474+ ctx = context.Background()
7575+ }
7676+7777+ return fetchAndExtractArchive(ctx, config, cmd.OutOrStdout())
7878+ },
7979+ }
8080+8181+ cmd.Flags().StringVar(&config.Repo, "repo", "", "GitHub repository (owner/name)")
8282+ cmd.Flags().StringVar(&config.Path, "path", "", "Path within repository to extract")
8383+ cmd.Flags().StringVar(&config.Output, "output", "", "Output directory for extracted files")
8484+ cmd.Flags().StringVar(&config.SHA, "sha", "", "Specific commit SHA (default: latest)")
8585+ cmd.Flags().BoolVar(&config.FormatJSON, "format-json", true, "Format JSON files with indentation")
8686+ return cmd
8787+}
8888+8989+// fetchAndExtractArchive fetches a GitHub archive and extracts specific paths
9090+func fetchAndExtractArchive(ctx context.Context, config ArchiveConfig, out io.Writer) error {
9191+ sha := config.SHA
9292+ if sha == "" {
9393+ var err error
9494+ sha, err = getLatestCommit(ctx, config.Repo, config.Path)
9595+ if err != nil {
9696+ return fmt.Errorf("failed to get latest commit: %w", err)
9797+ }
9898+ fmt.Fprintf(out, "Latest commit: %s\n", sha)
9999+ }
100100+101101+ tmpDir, err := os.MkdirTemp("", "repo-archive-*")
102102+ if err != nil {
103103+ return fmt.Errorf("failed to create temp directory: %w", err)
104104+ }
105105+ defer os.RemoveAll(tmpDir)
106106+107107+ fmt.Fprintf(out, "Fetching archive for %s@%s\n", config.Repo, sha[:7])
108108+ if err := downloadAndExtract(ctx, config.Repo, sha, config.Path, tmpDir, config.FormatJSON, out); err != nil {
109109+ return fmt.Errorf("failed to download and extract: %w", err)
110110+ }
111111+112112+ fmt.Fprintf(out, "Writing README with source information\n")
113113+ readme := fmt.Sprintf("Source: https://github.com/%s/tree/%s/%s\n", config.Repo, sha, config.Path)
114114+ if err := os.WriteFile(filepath.Join(tmpDir, "README.md"), []byte(readme), 0o644); err != nil {
115115+ return fmt.Errorf("failed to write README: %w", err)
116116+ }
117117+118118+ fmt.Fprintf(out, "Moving extracted files to %s\n", config.Output)
119119+ if err := os.RemoveAll(config.Output); err != nil {
120120+ return fmt.Errorf("failed to remove existing output directory: %w", err)
121121+ }
122122+ if err := os.Rename(tmpDir, config.Output); err != nil {
123123+ return fmt.Errorf("failed to move files to output directory: %w", err)
124124+ }
125125+126126+ fmt.Fprintf(out, "Successfully extracted archive to %s\n", config.Output)
127127+ return nil
128128+}
129129+130130+// getLatestCommit fetches the latest commit SHA for a given repository and path
131131+func getLatestCommit(ctx context.Context, repo, path string) (string, error) {
132132+ url := fmt.Sprintf("https://api.github.com/repos/%s/commits?path=%s&per_page=1", repo, path)
133133+134134+ req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
135135+ if err != nil {
136136+ return "", err
137137+ }
138138+139139+ client := &http.Client{Timeout: 30 * time.Second}
140140+ resp, err := client.Do(req)
141141+ if err != nil {
142142+ return "", err
143143+ }
144144+ defer resp.Body.Close()
145145+146146+ if resp.StatusCode != http.StatusOK {
147147+ return "", fmt.Errorf("GitHub API returned status %d", resp.StatusCode)
148148+ }
149149+150150+ var commits []GitHubCommit
151151+ if err := json.NewDecoder(resp.Body).Decode(&commits); err != nil {
152152+ return "", fmt.Errorf("failed to decode response: %w", err)
153153+ }
154154+155155+ if len(commits) == 0 {
156156+ return "", fmt.Errorf("no commits found for path %s", path)
157157+ }
158158+159159+ return commits[0].SHA, nil
160160+}
161161+162162+// downloadAndExtract downloads a GitHub archive and extracts files from a specific path
163163+func downloadAndExtract(ctx context.Context, repo, sha, extractPath, outputDir string, formatJSON bool, out io.Writer) error {
164164+ url := fmt.Sprintf("https://github.com/%s/archive/%s.tar.gz", repo, sha)
165165+166166+ req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
167167+ if err != nil {
168168+ return err
169169+ }
170170+171171+ client := &http.Client{Timeout: 5 * time.Minute}
172172+ resp, err := client.Do(req)
173173+ if err != nil {
174174+ return err
175175+ }
176176+ defer resp.Body.Close()
177177+178178+ if resp.StatusCode != http.StatusOK {
179179+ return fmt.Errorf("failed to download archive: status %d", resp.StatusCode)
180180+ }
181181+182182+ gzr, err := gzip.NewReader(resp.Body)
183183+ if err != nil {
184184+ return fmt.Errorf("failed to create gzip reader: %w", err)
185185+ }
186186+ defer gzr.Close()
187187+188188+ tr := tar.NewReader(gzr)
189189+190190+ repoName := strings.Split(repo, "/")[1]
191191+ prefix := fmt.Sprintf("%s-%s/%s", repoName, sha, extractPath)
192192+193193+ fmt.Fprintf(out, "Extracting files from %s\n", prefix)
194194+195195+ fileCount := 0
196196+ for {
197197+ header, err := tr.Next()
198198+ if err == io.EOF {
199199+ break
200200+ }
201201+ if err != nil {
202202+ return fmt.Errorf("failed to read tar header: %w", err)
203203+ }
204204+205205+ if header.Typeflag != tar.TypeReg {
206206+ continue
207207+ }
208208+209209+ if !strings.HasPrefix(header.Name, prefix) {
210210+ continue
211211+ }
212212+213213+ if !strings.HasSuffix(header.Name, ".json") {
214214+ continue
215215+ }
216216+217217+ relativePath := strings.TrimPrefix(header.Name, prefix)
218218+ outputPath := filepath.Join(outputDir, relativePath)
219219+220220+ if err := os.MkdirAll(filepath.Dir(outputPath), 0o755); err != nil {
221221+ return fmt.Errorf("failed to create directory for %s: %w", outputPath, err)
222222+ }
223223+224224+ data, err := io.ReadAll(tr)
225225+ if err != nil {
226226+ return fmt.Errorf("failed to read file %s: %w", header.Name, err)
227227+ }
228228+229229+ if formatJSON {
230230+ var jsonData any
231231+ if err := json.Unmarshal(data, &jsonData); err != nil {
232232+ return fmt.Errorf("failed to parse JSON in %s: %w", header.Name, err)
233233+ }
234234+235235+ formattedData, err := json.MarshalIndent(jsonData, "", " ")
236236+ if err != nil {
237237+ return fmt.Errorf("failed to format JSON in %s: %w", header.Name, err)
238238+ }
239239+ data = append(formattedData, '\n')
240240+ }
241241+242242+ if err := os.WriteFile(outputPath, data, 0o644); err != nil {
243243+ return fmt.Errorf("failed to write file %s: %w", outputPath, err)
244244+ }
245245+246246+ fileCount++
247247+ }
248248+249249+ fmt.Fprintf(out, "Extracted %d files\n", fileCount)
250250+ return nil
251251+}