+111
Cargo.lock
+111
Cargo.lock
···
34
34
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
35
35
36
36
[[package]]
37
+
name = "block-buffer"
38
+
version = "0.10.4"
39
+
source = "registry+https://github.com/rust-lang/crates.io-index"
40
+
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
41
+
dependencies = [
42
+
"generic-array",
43
+
]
44
+
45
+
[[package]]
37
46
name = "bumpalo"
38
47
version = "3.19.0"
39
48
source = "registry+https://github.com/rust-lang/crates.io-index"
···
72
81
version = "0.8.7"
73
82
source = "registry+https://github.com/rust-lang/crates.io-index"
74
83
checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
84
+
85
+
[[package]]
86
+
name = "cpufeatures"
87
+
version = "0.2.17"
88
+
source = "registry+https://github.com/rust-lang/crates.io-index"
89
+
checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
90
+
dependencies = [
91
+
"libc",
92
+
]
93
+
94
+
[[package]]
95
+
name = "crypto-common"
96
+
version = "0.1.7"
97
+
source = "registry+https://github.com/rust-lang/crates.io-index"
98
+
checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
99
+
dependencies = [
100
+
"generic-array",
101
+
"typenum",
102
+
]
75
103
76
104
[[package]]
77
105
name = "darling"
···
119
147
]
120
148
121
149
[[package]]
150
+
name = "digest"
151
+
version = "0.10.7"
152
+
source = "registry+https://github.com/rust-lang/crates.io-index"
153
+
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
154
+
dependencies = [
155
+
"block-buffer",
156
+
"crypto-common",
157
+
]
158
+
159
+
[[package]]
122
160
name = "find-msvc-tools"
123
161
version = "0.1.5"
124
162
source = "registry+https://github.com/rust-lang/crates.io-index"
···
131
169
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
132
170
133
171
[[package]]
172
+
name = "generic-array"
173
+
version = "0.14.7"
174
+
source = "registry+https://github.com/rust-lang/crates.io-index"
175
+
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
176
+
dependencies = [
177
+
"typenum",
178
+
"version_check",
179
+
]
180
+
181
+
[[package]]
134
182
name = "hashbrown"
135
183
version = "0.12.3"
136
184
source = "registry+https://github.com/rust-lang/crates.io-index"
···
268
316
dependencies = [
269
317
"atrium-lex",
270
318
"heck",
319
+
"hex",
271
320
"pyo3",
272
321
"serde",
273
322
"serde_json",
323
+
"sha2",
274
324
"thiserror",
325
+
"walkdir",
275
326
]
276
327
277
328
[[package]]
···
378
429
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
379
430
380
431
[[package]]
432
+
name = "same-file"
433
+
version = "1.0.6"
434
+
source = "registry+https://github.com/rust-lang/crates.io-index"
435
+
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
436
+
dependencies = [
437
+
"winapi-util",
438
+
]
439
+
440
+
[[package]]
381
441
name = "serde"
382
442
version = "1.0.228"
383
443
source = "registry+https://github.com/rust-lang/crates.io-index"
···
460
520
]
461
521
462
522
[[package]]
523
+
name = "sha2"
524
+
version = "0.10.9"
525
+
source = "registry+https://github.com/rust-lang/crates.io-index"
526
+
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
527
+
dependencies = [
528
+
"cfg-if",
529
+
"cpufeatures",
530
+
"digest",
531
+
]
532
+
533
+
[[package]]
463
534
name = "shlex"
464
535
version = "1.3.0"
465
536
source = "registry+https://github.com/rust-lang/crates.io-index"
···
540
611
]
541
612
542
613
[[package]]
614
+
name = "typenum"
615
+
version = "1.19.0"
616
+
source = "registry+https://github.com/rust-lang/crates.io-index"
617
+
checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
618
+
619
+
[[package]]
543
620
name = "unicode-ident"
544
621
version = "1.0.22"
545
622
source = "registry+https://github.com/rust-lang/crates.io-index"
···
552
629
checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3"
553
630
554
631
[[package]]
632
+
name = "version_check"
633
+
version = "0.9.5"
634
+
source = "registry+https://github.com/rust-lang/crates.io-index"
635
+
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
636
+
637
+
[[package]]
638
+
name = "walkdir"
639
+
version = "2.5.0"
640
+
source = "registry+https://github.com/rust-lang/crates.io-index"
641
+
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
642
+
dependencies = [
643
+
"same-file",
644
+
"winapi-util",
645
+
]
646
+
647
+
[[package]]
555
648
name = "wasm-bindgen"
556
649
version = "0.2.106"
557
650
source = "registry+https://github.com/rust-lang/crates.io-index"
···
597
690
]
598
691
599
692
[[package]]
693
+
name = "winapi-util"
694
+
version = "0.1.11"
695
+
source = "registry+https://github.com/rust-lang/crates.io-index"
696
+
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
697
+
dependencies = [
698
+
"windows-sys",
699
+
]
700
+
701
+
[[package]]
600
702
name = "windows-core"
601
703
version = "0.62.2"
602
704
source = "registry+https://github.com/rust-lang/crates.io-index"
···
654
756
dependencies = [
655
757
"windows-link",
656
758
]
759
+
760
+
[[package]]
761
+
name = "windows-sys"
762
+
version = "0.61.2"
763
+
source = "registry+https://github.com/rust-lang/crates.io-index"
764
+
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
765
+
dependencies = [
766
+
"windows-link",
767
+
]
+3
Cargo.toml
+3
Cargo.toml
+105
-2
README.md
+105
-2
README.md
···
21
21
22
22
# filter by namespace
23
23
uvx pmgfal -p fm.plyr
24
+
25
+
# force regeneration (skip cache)
26
+
uvx pmgfal --no-cache
24
27
```
25
28
29
+
## caching
30
+
31
+
pmgfal caches generated models based on a hash of your lexicon files. on subsequent runs with unchanged lexicons, it copies from cache instead of regenerating.
32
+
33
+
cache location:
34
+
- macos: `~/Library/Caches/pmgfal/`
35
+
- linux: `~/.cache/pmgfal/`
36
+
- windows: `%LOCALAPPDATA%/pmgfal/`
37
+
38
+
the cache key includes:
39
+
- pmgfal version (cache invalidates on upgrade)
40
+
- namespace prefix filter
41
+
- content of all lexicon json files
42
+
26
43
## output
27
44
28
45
```python
···
42
59
duration_ms: int | None = Field(default=None, alias="durationMs")
43
60
```
44
61
62
+
## adoption guide
63
+
64
+
### 1. add lexicons to your project
65
+
66
+
```
67
+
your-project/
68
+
├── lexicons/
69
+
│ └── fm/
70
+
│ └── plyr/
71
+
│ ├── track.json
72
+
│ ├── like.json
73
+
│ └── comment.json
74
+
├── src/
75
+
│ └── models/
76
+
│ └── .gitkeep
77
+
└── pyproject.toml
78
+
```
79
+
80
+
### 2. generate models
81
+
82
+
```bash
83
+
uvx pmgfal ./lexicons -o ./src/models -p fm.plyr
84
+
```
85
+
86
+
### 3. use in your code
87
+
88
+
```python
89
+
from your_project.models import FmPlyrTrack, FmPlyrLike
90
+
91
+
track = FmPlyrTrack(
92
+
uri="at://did:plc:xyz/fm.plyr.track/123",
93
+
title="my song",
94
+
artist="me",
95
+
)
96
+
```
97
+
98
+
### 4. regenerate when lexicons change
99
+
100
+
**option a: pre-commit hook**
101
+
102
+
```yaml
103
+
# .pre-commit-config.yaml
104
+
repos:
105
+
- repo: local
106
+
hooks:
107
+
- id: pmgfal
108
+
name: generate atproto models
109
+
entry: uvx pmgfal ./lexicons -o ./src/models -p fm.plyr
110
+
language: system
111
+
files: ^lexicons/.*\.json$
112
+
pass_filenames: false
113
+
```
114
+
115
+
**option b: justfile**
116
+
117
+
```just
118
+
# justfile
119
+
generate:
120
+
uvx pmgfal ./lexicons -o ./src/models -p fm.plyr
121
+
```
122
+
123
+
**option c: github actions**
124
+
125
+
```yaml
126
+
# .github/workflows/ci.yml
127
+
- name: generate models
128
+
run: uvx pmgfal ./lexicons -o ./src/models -p fm.plyr
129
+
```
130
+
131
+
caching ensures regeneration is fast (~0.3s for 300 lexicons) when files haven't changed.
132
+
133
+
## external refs
134
+
135
+
pmgfal bundles all `com.atproto.*` lexicons and automatically resolves external refs. for example, if your lexicon references `com.atproto.repo.strongRef`, pmgfal generates:
136
+
137
+
```python
138
+
class ComAtprotoRepoStrongRef(BaseModel):
139
+
uri: str
140
+
cid: str
141
+
142
+
class FmPlyrLike(BaseModel):
143
+
subject: ComAtprotoRepoStrongRef # properly typed!
144
+
created_at: str = Field(alias="createdAt")
145
+
```
146
+
45
147
## how it works
46
148
47
149
1. parses lexicon json using [atrium-lex](https://github.com/atrium-rs/atrium) (rust)
48
-
2. generates pydantic v2 models
49
-
3. outputs standalone python - no atproto sdk dependency
150
+
2. resolves internal (`#localDef`) and external (`com.atproto.*`) refs
151
+
3. generates pydantic v2 models with field aliases
152
+
4. outputs standalone python - no atproto sdk dependency
+24
-11
justfile
+24
-11
justfile
···
1
-
# build rust extension
2
-
build:
3
-
uv run maturin develop
1
+
# build rust extension in dev mode
2
+
dev:
3
+
uvx maturin develop
4
4
5
-
# run tests (requires build first)
6
-
test: build
7
-
uv run pytest
5
+
# run tests
6
+
test: dev
7
+
uv run pytest -v
8
8
9
-
# lint python
9
+
# build release wheels
10
+
build:
11
+
uvx maturin build --release
12
+
13
+
# lint
10
14
lint:
11
-
uv run ruff check
12
-
uv run ruff format --check
15
+
uv run ruff check .
16
+
uv run ruff format --check .
13
17
14
-
# format python
18
+
# format
15
19
fmt:
16
-
uv run ruff format
20
+
uv run ruff check --fix .
21
+
uv run ruff format .
22
+
23
+
# benchmark on atproto lexicons
24
+
bench: dev
25
+
./scripts/bench.py
26
+
27
+
# clean build artifacts
28
+
clean:
29
+
rm -rf target dist *.egg-info
+85
-17
python/pmgfal/__init__.py
+85
-17
python/pmgfal/__init__.py
···
3
3
from __future__ import annotations
4
4
5
5
import argparse
6
+
import os
7
+
import shutil
8
+
import subprocess
6
9
import sys
10
+
import tempfile
7
11
from pathlib import Path
8
12
9
-
from pmgfal._pmgfal import __version__, generate
13
+
from pmgfal._pmgfal import __version__, generate, hash_lexicons
10
14
11
-
__all__ = ["__version__", "generate", "main"]
15
+
__all__ = ["__version__", "generate", "get_cache_dir", "hash_lexicons", "main"]
16
+
17
+
18
+
def get_cache_dir() -> Path:
19
+
"""get the user cache directory for pmgfal."""
20
+
if sys.platform == "darwin":
21
+
base = Path.home() / "Library" / "Caches"
22
+
elif sys.platform == "win32":
23
+
base = Path(os.environ.get("LOCALAPPDATA", Path.home() / "AppData" / "Local"))
24
+
else:
25
+
base = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache"))
26
+
return base / "pmgfal"
27
+
28
+
29
+
def is_git_url(path: str) -> bool:
30
+
"""check if path looks like a git url."""
31
+
return path.startswith(("https://", "git@", "ssh://", "git://"))
12
32
13
33
14
34
def main(args: list[str] | None = None) -> int:
···
18
38
description="pydantic model generator for atproto lexicons",
19
39
)
20
40
parser.add_argument(
21
-
"lexicon_dir",
41
+
"lexicon_source",
22
42
nargs="?",
23
-
type=Path,
24
-
help="directory containing lexicon json files (default: ./lexicons or .)",
43
+
help="directory or git url containing lexicon json files (default: ./lexicons or .)",
25
44
)
26
45
parser.add_argument(
27
46
"-o",
···
38
57
help="namespace prefix filter (e.g. 'fm.plyr')",
39
58
)
40
59
parser.add_argument(
60
+
"--no-cache",
61
+
action="store_true",
62
+
help="force regeneration, ignoring cache",
63
+
)
64
+
parser.add_argument(
41
65
"-V",
42
66
"--version",
43
67
action="version",
···
46
70
47
71
parsed = parser.parse_args(args)
48
72
49
-
# auto-detect lexicon directory
50
-
if parsed.lexicon_dir is None:
51
-
if Path("./lexicons").is_dir():
52
-
lexicon_dir = Path("./lexicons")
73
+
temp_dir = None
74
+
try:
75
+
# handle git urls by cloning to temp dir
76
+
if parsed.lexicon_source and is_git_url(parsed.lexicon_source):
77
+
temp_dir = tempfile.mkdtemp(prefix="pmgfal-")
78
+
print(f"cloning {parsed.lexicon_source}...")
79
+
result = subprocess.run(
80
+
["git", "clone", "--depth=1", parsed.lexicon_source, temp_dir],
81
+
capture_output=True,
82
+
text=True,
83
+
)
84
+
if result.returncode != 0:
85
+
print(f"error: git clone failed: {result.stderr}", file=sys.stderr)
86
+
return 1
87
+
# look for lexicons subdir in cloned repo
88
+
if (Path(temp_dir) / "lexicons").is_dir():
89
+
lexicon_dir = Path(temp_dir) / "lexicons"
90
+
else:
91
+
lexicon_dir = Path(temp_dir)
92
+
# auto-detect lexicon directory
93
+
elif parsed.lexicon_source is None:
94
+
if Path("./lexicons").is_dir():
95
+
lexicon_dir = Path("./lexicons")
96
+
else:
97
+
lexicon_dir = Path(".")
53
98
else:
54
-
lexicon_dir = Path(".")
55
-
else:
56
-
lexicon_dir = parsed.lexicon_dir
99
+
lexicon_dir = Path(parsed.lexicon_source)
100
+
101
+
if not lexicon_dir.is_dir():
102
+
print(f"error: not a directory: {lexicon_dir}", file=sys.stderr)
103
+
return 1
104
+
# compute hash of lexicons (in rust)
105
+
lexicon_hash = hash_lexicons(str(lexicon_dir), parsed.prefix)
106
+
cache_dir = get_cache_dir() / lexicon_hash
57
107
58
-
if not lexicon_dir.is_dir():
59
-
print(f"error: not a directory: {lexicon_dir}", file=sys.stderr)
60
-
return 1
108
+
# check cache
109
+
if not parsed.no_cache and cache_dir.exists():
110
+
# cache hit - copy cached files to output
111
+
parsed.output.mkdir(parents=True, exist_ok=True)
112
+
cached_files = list(cache_dir.glob("*.py"))
113
+
for cached in cached_files:
114
+
dest = parsed.output / cached.name
115
+
shutil.copy2(cached, dest)
116
+
print(f"cache hit ({lexicon_hash}) - copied {len(cached_files)} file(s):")
117
+
for f in cached_files:
118
+
print(f" {parsed.output / f.name}")
119
+
return 0
61
120
62
-
try:
121
+
# cache miss - generate
63
122
files = generate(
64
123
str(lexicon_dir),
65
124
str(parsed.output),
66
125
parsed.prefix,
67
126
)
68
-
print(f"generated {len(files)} file(s):")
127
+
128
+
# store in cache
129
+
cache_dir.mkdir(parents=True, exist_ok=True)
130
+
for f in files:
131
+
shutil.copy2(f, cache_dir / Path(f).name)
132
+
133
+
print(f"generated {len(files)} file(s) (cached as {lexicon_hash}):")
69
134
for f in files:
70
135
print(f" {f}")
71
136
return 0
72
137
except Exception as e:
73
138
print(f"error: {e}", file=sys.stderr)
74
139
return 1
140
+
finally:
141
+
if temp_dir and Path(temp_dir).exists():
142
+
shutil.rmtree(temp_dir)
75
143
76
144
77
145
if __name__ == "__main__":
+55
scripts/bench.py
+55
scripts/bench.py
···
1
+
#!/usr/bin/env -S uv run python
2
+
"""benchmark pmgfal on real lexicons."""
3
+
4
+
import subprocess
5
+
import tempfile
6
+
import time
7
+
from pathlib import Path
8
+
9
+
10
+
def bench_atproto():
11
+
"""benchmark against full atproto lexicons."""
12
+
with tempfile.TemporaryDirectory() as tmp:
13
+
# clone atproto
14
+
print("cloning atproto lexicons...")
15
+
subprocess.run(
16
+
["git", "clone", "--depth=1", "https://github.com/bluesky-social/atproto.git", tmp],
17
+
capture_output=True,
18
+
check=True,
19
+
)
20
+
21
+
lexicon_dir = Path(tmp) / "lexicons"
22
+
output_dir = Path(tmp) / "output"
23
+
json_files = list(lexicon_dir.rglob("*.json"))
24
+
25
+
print(f"found {len(json_files)} lexicon files")
26
+
27
+
# benchmark generation (cold)
28
+
start = time.perf_counter()
29
+
subprocess.run(
30
+
["uv", "run", "pmgfal", str(lexicon_dir), "-o", str(output_dir), "--no-cache"],
31
+
check=True,
32
+
)
33
+
cold_time = time.perf_counter() - start
34
+
35
+
# count output
36
+
models_file = output_dir / "models.py"
37
+
lines = len(models_file.read_text().splitlines()) if models_file.exists() else 0
38
+
39
+
# benchmark cache hit
40
+
start = time.perf_counter()
41
+
subprocess.run(
42
+
["uv", "run", "pmgfal", str(lexicon_dir), "-o", str(output_dir)],
43
+
check=True,
44
+
)
45
+
cache_time = time.perf_counter() - start
46
+
47
+
print(f"\nresults:")
48
+
print(f" lexicons: {len(json_files)}")
49
+
print(f" output: {lines} lines")
50
+
print(f" cold generation: {cold_time:.3f}s")
51
+
print(f" cache hit: {cache_time:.3f}s")
52
+
53
+
54
+
if __name__ == "__main__":
55
+
bench_atproto()
+42
src/lib.rs
+42
src/lib.rs
···
5
5
mod parser;
6
6
mod types;
7
7
8
+
use std::fs;
8
9
use std::path::Path;
9
10
10
11
use pyo3::prelude::*;
12
+
use sha2::{Digest, Sha256};
13
+
14
+
/// compute a hash of all lexicon files in a directory
15
+
#[pyfunction]
16
+
#[pyo3(signature = (lexicon_dir, namespace_prefix=None))]
17
+
fn hash_lexicons(lexicon_dir: &str, namespace_prefix: Option<&str>) -> PyResult<String> {
18
+
let lexicon_path = Path::new(lexicon_dir);
19
+
20
+
let mut hasher = Sha256::new();
21
+
22
+
// include version in hash so cache invalidates on upgrades
23
+
hasher.update(env!("CARGO_PKG_VERSION").as_bytes());
24
+
25
+
// include prefix in hash
26
+
if let Some(prefix) = namespace_prefix {
27
+
hasher.update(prefix.as_bytes());
28
+
}
29
+
30
+
// collect and sort json files for deterministic hashing
31
+
let mut json_files: Vec<_> = walkdir::WalkDir::new(lexicon_path)
32
+
.into_iter()
33
+
.filter_map(|e| e.ok())
34
+
.filter(|e| e.path().extension().is_some_and(|ext| ext == "json"))
35
+
.collect();
36
+
37
+
json_files.sort_by(|a, b| a.path().cmp(b.path()));
38
+
39
+
for entry in json_files {
40
+
let path = entry.path();
41
+
if let Some(name) = path.file_name() {
42
+
hasher.update(name.as_encoded_bytes());
43
+
}
44
+
if let Ok(content) = fs::read(path) {
45
+
hasher.update(&content);
46
+
}
47
+
}
48
+
49
+
let result = hasher.finalize();
50
+
Ok(hex::encode(&result[..8])) // 16 hex chars
51
+
}
11
52
12
53
/// generate pydantic models from lexicon files
13
54
#[pyfunction]
···
32
73
#[pymodule]
33
74
fn _pmgfal(m: &Bound<'_, PyModule>) -> PyResult<()> {
34
75
m.add_function(wrap_pyfunction!(generate, m)?)?;
76
+
m.add_function(wrap_pyfunction!(hash_lexicons, m)?)?;
35
77
m.add("__version__", env!("CARGO_PKG_VERSION"))?;
36
78
Ok(())
37
79
}
+75
tests/test_generate.py
+75
tests/test_generate.py
···
251
251
assert "class ComAtprotoRepoStrongRef(BaseModel):" in content
252
252
assert "uri: str" in content
253
253
assert "cid: str" in content
254
+
255
+
256
+
class TestCaching:
257
+
"""test caching behavior."""
258
+
259
+
def test_cache_hit(self):
260
+
"""second run should hit cache."""
261
+
from pmgfal import main
262
+
263
+
lexicon = {
264
+
"lexicon": 1,
265
+
"id": "test.cache",
266
+
"defs": {
267
+
"main": {
268
+
"type": "record",
269
+
"record": {
270
+
"type": "object",
271
+
"properties": {"x": {"type": "string"}},
272
+
},
273
+
}
274
+
},
275
+
}
276
+
277
+
with tempfile.TemporaryDirectory() as tmpdir:
278
+
lexicon_dir = Path(tmpdir) / "lexicons"
279
+
lexicon_dir.mkdir()
280
+
(lexicon_dir / "cache.json").write_text(json.dumps(lexicon))
281
+
282
+
output_dir = Path(tmpdir) / "generated"
283
+
284
+
# first run - cache miss
285
+
result = main([str(lexicon_dir), "-o", str(output_dir)])
286
+
assert result == 0
287
+
assert (output_dir / "models.py").exists()
288
+
289
+
# delete output to prove cache works
290
+
(output_dir / "models.py").unlink()
291
+
292
+
# second run - cache hit
293
+
result = main([str(lexicon_dir), "-o", str(output_dir)])
294
+
assert result == 0
295
+
assert (output_dir / "models.py").exists()
296
+
297
+
def test_no_cache_flag(self):
298
+
"""--no-cache should force regeneration."""
299
+
from pmgfal import main
300
+
301
+
lexicon = {
302
+
"lexicon": 1,
303
+
"id": "test.nocache",
304
+
"defs": {
305
+
"main": {
306
+
"type": "record",
307
+
"record": {
308
+
"type": "object",
309
+
"properties": {"y": {"type": "string"}},
310
+
},
311
+
}
312
+
},
313
+
}
314
+
315
+
with tempfile.TemporaryDirectory() as tmpdir:
316
+
lexicon_dir = Path(tmpdir) / "lexicons"
317
+
lexicon_dir.mkdir()
318
+
(lexicon_dir / "nocache.json").write_text(json.dumps(lexicon))
319
+
320
+
output_dir = Path(tmpdir) / "generated"
321
+
322
+
# first run
323
+
result = main([str(lexicon_dir), "-o", str(output_dir)])
324
+
assert result == 0
325
+
326
+
# second run with --no-cache
327
+
result = main([str(lexicon_dir), "-o", str(output_dir), "--no-cache"])
328
+
assert result == 0