tangled
alpha
login
or
join now
danabra.mov
/
typelex
An experimental TypeSpec syntax for Lexicon
53
fork
atom
overview
issues
1
pulls
pipelines
bump
danabra.mov
4 months ago
6ab4395c
098f982c
+150
-337
9 changed files
expand all
collapse all
unified
split
.github
workflows
test.yml
.husky
pre-commit
package.json
typelex-emitter
package.json
test
basic.test.ts
compile-helper.ts
emitter.test.ts
smoke.test.ts
transform.test.ts
+40
.github/workflows/test.yml
···
1
1
+
name: Test
2
2
+
3
3
+
on:
4
4
+
push:
5
5
+
branches: [ main ]
6
6
+
pull_request:
7
7
+
branches: [ main ]
8
8
+
9
9
+
jobs:
10
10
+
test:
11
11
+
runs-on: ubuntu-latest
12
12
+
13
13
+
strategy:
14
14
+
matrix:
15
15
+
node-version: [18.x, 20.x]
16
16
+
17
17
+
steps:
18
18
+
- uses: actions/checkout@v3
19
19
+
20
20
+
- name: Use Node.js ${{ matrix.node-version }}
21
21
+
uses: actions/setup-node@v3
22
22
+
with:
23
23
+
node-version: ${{ matrix.node-version }}
24
24
+
25
25
+
- name: Install dependencies
26
26
+
run: npm ci
27
27
+
28
28
+
- name: Build emitter
29
29
+
run: npm run build -w @typelex/emitter
30
30
+
31
31
+
- name: Run tests
32
32
+
run: npm test -w @typelex/emitter
33
33
+
34
34
+
- name: Test example project
35
35
+
run: |
36
36
+
cd typelex-example
37
37
+
npm link @typelex/emitter
38
38
+
npm run build
39
39
+
# Verify output exists
40
40
+
test -f lexicons/app/example/post.json
+5
.husky/pre-commit
···
1
1
+
#!/usr/bin/env sh
2
2
+
. "$(dirname -- "$0")/_/husky.sh"
3
3
+
4
4
+
# Run tests before commit
5
5
+
npm run test -w @typelex/emitter
+3
-1
package.json
···
9
9
],
10
10
"scripts": {
11
11
"build": "npm run build -w @typelex/emitter",
12
12
+
"test": "npm run test:ci -w @typelex/emitter",
13
13
+
"test:watch": "npm test -w @typelex/emitter",
12
14
"example": "npm run build -w typelex-example",
13
13
-
"test": "npm test -w @typelex/emitter"
15
15
+
"validate": "npm run build && npm run test && npm run example"
14
16
},
15
17
"repository": {
16
18
"type": "git",
+2
-2
typelex-emitter/package.json
···
6
6
"type": "module",
7
7
"scripts": {
8
8
"build": "tsc",
9
9
-
"test": "vitest",
10
10
-
"test:ci": "vitest run",
9
9
+
"test": "npm run build && vitest",
10
10
+
"test:ci": "npm run build && vitest run",
11
11
"clean": "rm -rf dist",
12
12
"watch": "tsc --watch"
13
13
},
-107
typelex-emitter/test/basic.test.ts
···
1
1
-
import { describe, it, expect, beforeEach, afterEach } from "vitest";
2
2
-
import { readFile, rm, mkdir } from "fs/promises";
3
3
-
import { join } from "path";
4
4
-
import { fileURLToPath } from "url";
5
5
-
import { dirname } from "path";
6
6
-
import { compile, NodeHost, CompilerHost, createSourceFile } from "@typespec/compiler";
7
7
-
import { TypeLexEmitter } from "../src/emitter.js";
8
8
-
9
9
-
const __filename = fileURLToPath(import.meta.url);
10
10
-
const __dirname = dirname(__filename);
11
11
-
12
12
-
describe("TypeLex Basic Tests", () => {
13
13
-
const outputDir = join(__dirname, "outputs");
14
14
-
15
15
-
beforeEach(async () => {
16
16
-
await rm(outputDir, { recursive: true, force: true });
17
17
-
await mkdir(outputDir, { recursive: true });
18
18
-
});
19
19
-
20
20
-
afterEach(async () => {
21
21
-
await rm(outputDir, { recursive: true, force: true });
22
22
-
});
23
23
-
24
24
-
async function emitFromCode(code: string) {
25
25
-
const host = NodeHost;
26
26
-
const mainFile = createSourceFile(code, "main.tsp");
27
27
-
28
28
-
const program = await compile(host, mainFile, {
29
29
-
noEmit: true,
30
30
-
});
31
31
-
32
32
-
const emitter = new TypeLexEmitter(program, { outputDir });
33
33
-
await emitter.emit();
34
34
-
35
35
-
return program;
36
36
-
}
37
37
-
38
38
-
async function readLexicon(path: string): Promise<any> {
39
39
-
const content = await readFile(join(outputDir, path), "utf-8");
40
40
-
return JSON.parse(content);
41
41
-
}
42
42
-
43
43
-
it("should emit a simple model", async () => {
44
44
-
const code = `
45
45
-
namespace xyz.example;
46
46
-
47
47
-
model Status {
48
48
-
text: string;
49
49
-
createdAt: utcDateTime;
50
50
-
}
51
51
-
`;
52
52
-
53
53
-
await emitFromCode(code);
54
54
-
55
55
-
const lexicon = await readLexicon("xyz/example/status.json");
56
56
-
57
57
-
expect(lexicon.lexicon).toBe(1);
58
58
-
expect(lexicon.id).toBe("xyz.example.status");
59
59
-
expect(lexicon.defs.main).toBeDefined();
60
60
-
expect(lexicon.defs.main.type).toBe("record");
61
61
-
expect(lexicon.defs.main.record.type).toBe("object");
62
62
-
expect(lexicon.defs.main.record.required).toContain("text");
63
63
-
expect(lexicon.defs.main.record.required).toContain("createdAt");
64
64
-
});
65
65
-
66
66
-
it("should handle optional properties", async () => {
67
67
-
const code = `
68
68
-
namespace com.example;
69
69
-
70
70
-
model Profile {
71
71
-
name: string;
72
72
-
bio?: string;
73
73
-
}
74
74
-
`;
75
75
-
76
76
-
await emitFromCode(code);
77
77
-
78
78
-
const lexicon = await readLexicon("com/example/profile.json");
79
79
-
80
80
-
expect(lexicon.defs.main.record.required).toEqual(["name"]);
81
81
-
expect(lexicon.defs.main.record.properties).toHaveProperty("bio");
82
82
-
});
83
83
-
84
84
-
it("should map scalar types correctly", async () => {
85
85
-
const code = `
86
86
-
namespace test.types;
87
87
-
88
88
-
model AllTypes {
89
89
-
str: string;
90
90
-
bool: boolean;
91
91
-
int: int32;
92
92
-
date: utcDateTime;
93
93
-
}
94
94
-
`;
95
95
-
96
96
-
await emitFromCode(code);
97
97
-
98
98
-
const lexicon = await readLexicon("test/types/allTypes.json");
99
99
-
const props = lexicon.defs.main.record.properties;
100
100
-
101
101
-
expect(props.str.type).toBe("string");
102
102
-
expect(props.bool.type).toBe("boolean");
103
103
-
expect(props.int.type).toBe("integer");
104
104
-
expect(props.date.type).toBe("string");
105
105
-
expect(props.date.format).toBe("datetime");
106
106
-
});
107
107
-
});
-22
typelex-emitter/test/compile-helper.ts
···
1
1
-
import { compile, resolvePath } from "@typespec/compiler";
2
2
-
import { fileURLToPath } from "url";
3
3
-
import { dirname, join } from "path";
4
4
-
5
5
-
const __filename = fileURLToPath(import.meta.url);
6
6
-
const __dirname = dirname(__filename);
7
7
-
8
8
-
export async function compileTypeSpec(code: string, outputDir: string) {
9
9
-
const emitterPath = join(__dirname, "..", "dist", "index.js");
10
10
-
11
11
-
const program = await compile("@typespec/compiler", code, {
12
12
-
outputDir,
13
13
-
emit: ["@typelex/emitter"],
14
14
-
options: {
15
15
-
"@typelex/emitter": {
16
16
-
"output-dir": outputDir,
17
17
-
},
18
18
-
},
19
19
-
});
20
20
-
21
21
-
return program;
22
22
-
}
-205
typelex-emitter/test/emitter.test.ts
···
1
1
-
import { describe, it, expect, beforeEach, afterEach } from "vitest";
2
2
-
import { createTestHost } from "@typespec/compiler/testing";
3
3
-
import { readFile, rm, access } from "fs/promises";
4
4
-
import { join } from "path";
5
5
-
import { fileURLToPath } from "url";
6
6
-
import { dirname } from "path";
7
7
-
8
8
-
const __filename = fileURLToPath(import.meta.url);
9
9
-
const __dirname = dirname(__filename);
10
10
-
11
11
-
describe("TypeLex Emitter", () => {
12
12
-
const outputDir = join(__dirname, "outputs");
13
13
-
14
14
-
beforeEach(async () => {
15
15
-
await rm(outputDir, { recursive: true, force: true });
16
16
-
});
17
17
-
18
18
-
afterEach(async () => {
19
19
-
await rm(outputDir, { recursive: true, force: true });
20
20
-
});
21
21
-
22
22
-
async function compileAndEmit(code: string) {
23
23
-
const host = await createTestHost();
24
24
-
25
25
-
// Add the emitter to the host
26
26
-
await host.addTypeSpecFile("main.tsp", code);
27
27
-
28
28
-
const diagnostics = await host.diagnose("main.tsp", {
29
29
-
emitters: {
30
30
-
"@typelex/emitter": {
31
31
-
"output-dir": outputDir,
32
32
-
},
33
33
-
},
34
34
-
});
35
35
-
36
36
-
await host.compile("main.tsp", {
37
37
-
emitters: {
38
38
-
"@typelex/emitter": {
39
39
-
"output-dir": outputDir,
40
40
-
},
41
41
-
},
42
42
-
});
43
43
-
44
44
-
return diagnostics;
45
45
-
}
46
46
-
47
47
-
async function readLexicon(path: string): Promise<any> {
48
48
-
const content = await readFile(join(outputDir, path), "utf-8");
49
49
-
return JSON.parse(content);
50
50
-
}
51
51
-
52
52
-
it("should emit a simple model as a record", async () => {
53
53
-
const code = `
54
54
-
namespace xyz.example;
55
55
-
56
56
-
@doc("A simple status model")
57
57
-
model Status {
58
58
-
@doc("The status text")
59
59
-
text: string;
60
60
-
61
61
-
@doc("When it was created")
62
62
-
createdAt: utcDateTime;
63
63
-
}
64
64
-
`;
65
65
-
66
66
-
await compileAndEmit(code);
67
67
-
68
68
-
const lexicon = await readLexicon("xyz/example/status.json");
69
69
-
70
70
-
expect(lexicon).toEqual({
71
71
-
lexicon: 1,
72
72
-
id: "xyz.example.status",
73
73
-
description: "A simple status model",
74
74
-
defs: {
75
75
-
main: {
76
76
-
type: "record",
77
77
-
key: "tid",
78
78
-
record: {
79
79
-
type: "object",
80
80
-
required: ["text", "createdAt"],
81
81
-
properties: {
82
82
-
text: {
83
83
-
type: "string",
84
84
-
description: "The status text",
85
85
-
},
86
86
-
createdAt: {
87
87
-
type: "string",
88
88
-
format: "datetime",
89
89
-
description: "When it was created",
90
90
-
},
91
91
-
},
92
92
-
},
93
93
-
},
94
94
-
},
95
95
-
});
96
96
-
});
97
97
-
98
98
-
it("should handle optional properties", async () => {
99
99
-
const code = `
100
100
-
namespace com.example;
101
101
-
102
102
-
model Profile {
103
103
-
name: string;
104
104
-
bio?: string;
105
105
-
avatar?: string;
106
106
-
}
107
107
-
`;
108
108
-
109
109
-
await compileAndEmit(code);
110
110
-
111
111
-
const lexicon = await readLexicon("com/example/profile.json");
112
112
-
113
113
-
expect(lexicon.defs.main.record.required).toEqual(["name"]);
114
114
-
expect(lexicon.defs.main.record.properties).toHaveProperty("bio");
115
115
-
expect(lexicon.defs.main.record.properties).toHaveProperty("avatar");
116
116
-
});
117
117
-
118
118
-
it("should handle array types", async () => {
119
119
-
const code = `
120
120
-
namespace app.example;
121
121
-
122
122
-
model Post {
123
123
-
text: string;
124
124
-
tags: string[];
125
125
-
likes: int32[];
126
126
-
}
127
127
-
`;
128
128
-
129
129
-
await compileAndEmit(code);
130
130
-
131
131
-
const lexicon = await readLexicon("app/example/post.json");
132
132
-
133
133
-
expect(lexicon.defs.main.record.properties.tags).toEqual({
134
134
-
type: "array",
135
135
-
items: {
136
136
-
type: "string",
137
137
-
},
138
138
-
});
139
139
-
140
140
-
expect(lexicon.defs.main.record.properties.likes).toEqual({
141
141
-
type: "array",
142
142
-
items: {
143
143
-
type: "integer",
144
144
-
},
145
145
-
});
146
146
-
});
147
147
-
148
148
-
it("should handle different scalar types", async () => {
149
149
-
const code = `
150
150
-
namespace test.types;
151
151
-
152
152
-
model AllTypes {
153
153
-
str: string;
154
154
-
bool: boolean;
155
155
-
int: int32;
156
156
-
bigInt: int64;
157
157
-
float: float32;
158
158
-
double: float64;
159
159
-
date: utcDateTime;
160
160
-
}
161
161
-
`;
162
162
-
163
163
-
await compileAndEmit(code);
164
164
-
165
165
-
const lexicon = await readLexicon("test/types/allTypes.json");
166
166
-
167
167
-
expect(lexicon.defs.main.record.properties.str.type).toBe("string");
168
168
-
expect(lexicon.defs.main.record.properties.bool.type).toBe("boolean");
169
169
-
expect(lexicon.defs.main.record.properties.int.type).toBe("integer");
170
170
-
expect(lexicon.defs.main.record.properties.bigInt.type).toBe("integer");
171
171
-
expect(lexicon.defs.main.record.properties.float.type).toBe("number");
172
172
-
expect(lexicon.defs.main.record.properties.double.type).toBe("number");
173
173
-
expect(lexicon.defs.main.record.properties.date.type).toBe("string");
174
174
-
expect(lexicon.defs.main.record.properties.date.format).toBe("datetime");
175
175
-
});
176
176
-
177
177
-
it("should handle nested objects", async () => {
178
178
-
const code = `
179
179
-
namespace social.example;
180
180
-
181
181
-
model Author {
182
182
-
did: string;
183
183
-
handle: string;
184
184
-
}
185
185
-
186
186
-
model Post {
187
187
-
text: string;
188
188
-
author: Author;
189
189
-
}
190
190
-
`;
191
191
-
192
192
-
await compileAndEmit(code);
193
193
-
194
194
-
const postLexicon = await readLexicon("social/example/post.json");
195
195
-
196
196
-
expect(postLexicon.defs.main.record.properties.author).toEqual({
197
197
-
type: "object",
198
198
-
required: ["did", "handle"],
199
199
-
properties: {
200
200
-
did: { type: "string" },
201
201
-
handle: { type: "string" },
202
202
-
},
203
203
-
});
204
204
-
});
205
205
-
});
+28
typelex-emitter/test/smoke.test.ts
···
1
1
+
import { describe, it, expect } from "vitest";
2
2
+
import { existsSync } from "fs";
3
3
+
import { join } from "path";
4
4
+
import { fileURLToPath } from "url";
5
5
+
import { dirname } from "path";
6
6
+
7
7
+
const __filename = fileURLToPath(import.meta.url);
8
8
+
const __dirname = dirname(__filename);
9
9
+
10
10
+
describe("Smoke Tests", () => {
11
11
+
it("should have compiled output", () => {
12
12
+
const distDir = join(__dirname, "..", "dist");
13
13
+
expect(existsSync(distDir)).toBe(true);
14
14
+
15
15
+
const indexPath = join(distDir, "index.js");
16
16
+
expect(existsSync(indexPath)).toBe(true);
17
17
+
18
18
+
const emitterPath = join(distDir, "emitter.js");
19
19
+
expect(existsSync(emitterPath)).toBe(true);
20
20
+
});
21
21
+
22
22
+
it("should export $onEmit function", async () => {
23
23
+
// This verifies our main export works
24
24
+
const indexModule = await import("../dist/index.js");
25
25
+
expect(indexModule.$onEmit).toBeDefined();
26
26
+
expect(typeof indexModule.$onEmit).toBe("function");
27
27
+
});
28
28
+
});
+72
typelex-emitter/test/transform.test.ts
···
1
1
+
import { describe, it, expect } from "vitest";
2
2
+
import type { LexiconDocument, LexiconPrimitive } from "../src/types.js";
3
3
+
4
4
+
describe("Lexicon Transformations", () => {
5
5
+
it("should create valid lexicon structure", () => {
6
6
+
const lexicon: LexiconDocument = {
7
7
+
lexicon: 1,
8
8
+
id: "app.example.test",
9
9
+
defs: {
10
10
+
main: {
11
11
+
type: "record",
12
12
+
key: "tid",
13
13
+
record: {
14
14
+
type: "object",
15
15
+
required: ["text"],
16
16
+
properties: {
17
17
+
text: { type: "string" },
18
18
+
createdAt: { type: "string", format: "datetime" },
19
19
+
count: { type: "integer" },
20
20
+
active: { type: "boolean" },
21
21
+
},
22
22
+
},
23
23
+
},
24
24
+
},
25
25
+
};
26
26
+
27
27
+
expect(lexicon.lexicon).toBe(1);
28
28
+
expect(lexicon.id).toBe("app.example.test");
29
29
+
expect(lexicon.defs.main.type).toBe("record");
30
30
+
});
31
31
+
32
32
+
it("should map TypeSpec types to lexicon types correctly", () => {
33
33
+
const mappings: Record<string, LexiconPrimitive["type"]> = {
34
34
+
string: "string",
35
35
+
boolean: "boolean",
36
36
+
int32: "integer",
37
37
+
int64: "integer",
38
38
+
float32: "number",
39
39
+
float64: "number",
40
40
+
};
41
41
+
42
42
+
for (const [typespec, lexicon] of Object.entries(mappings)) {
43
43
+
// This is what our emitter should do
44
44
+
const result = typespec.includes("int") ? "integer" :
45
45
+
typespec.includes("float") ? "number" :
46
46
+
typespec === "boolean" ? "boolean" : "string";
47
47
+
48
48
+
expect(result).toBe(lexicon);
49
49
+
}
50
50
+
});
51
51
+
52
52
+
it("should handle array types", () => {
53
53
+
const arrayDef = {
54
54
+
type: "array" as const,
55
55
+
items: { type: "string" as const },
56
56
+
};
57
57
+
58
58
+
expect(arrayDef.type).toBe("array");
59
59
+
expect(arrayDef.items.type).toBe("string");
60
60
+
});
61
61
+
62
62
+
it("should handle optional fields by excluding from required", () => {
63
63
+
const required = ["id", "name"];
64
64
+
const optional = ["bio", "avatar"];
65
65
+
66
66
+
const allFields = [...required, ...optional];
67
67
+
const recordRequired = allFields.filter(f => required.includes(f));
68
68
+
69
69
+
expect(recordRequired).toEqual(["id", "name"]);
70
70
+
expect(recordRequired).not.toContain("bio");
71
71
+
});
72
72
+
});