+32
-31
README.md
+32
-31
README.md
···
8
8
9
9
## ✨ Features
10
10
11
-
- **Schema-first:** Define and validate collections using [Zod](https://zod.dev).
11
+
- **Schema-first:** Define and validate collections using
12
+
[Zod](https://zod.dev).
12
13
- **Type-safe operations:** Auto-complete and strict typings for `insert`,
13
14
`find`, `update`, and `delete`.
14
15
- **Minimal & modular:** No decorators or magic. Just clean, composable APIs.
···
51
52
52
53
```ts
53
54
// src/index.ts
54
-
import {
55
-
connect,
56
-
disconnect,
57
-
InferModel,
58
-
Input,
59
-
Model,
60
-
} from "@nozzle/nozzle";
55
+
import { connect, disconnect, InferModel, Input, Model } from "@nozzle/nozzle";
61
56
import { userSchema } from "./schemas/user";
62
57
import { ObjectId } from "mongodb"; // v6+ driver recommended
63
58
···
67
62
async function main() {
68
63
// Basic connection
69
64
await connect("mongodb://localhost:27017", "your_database_name");
70
-
65
+
71
66
// Or with connection pooling options
72
67
await connect("mongodb://localhost:27017", "your_database_name", {
73
-
maxPoolSize: 10, // Maximum connections in pool
74
-
minPoolSize: 2, // Minimum connections in pool
75
-
maxIdleTimeMS: 30000, // Close idle connections after 30s
68
+
maxPoolSize: 10, // Maximum connections in pool
69
+
minPoolSize: 2, // Minimum connections in pool
70
+
maxIdleTimeMS: 30000, // Close idle connections after 30s
76
71
connectTimeoutMS: 10000, // Connection timeout
77
-
socketTimeoutMS: 45000, // Socket timeout
72
+
socketTimeoutMS: 45000, // Socket timeout
78
73
});
79
-
74
+
80
75
// Production-ready connection with retry logic and resilience
81
76
await connect("mongodb://localhost:27017", "your_database_name", {
82
77
// Connection pooling
83
78
maxPoolSize: 10,
84
79
minPoolSize: 2,
85
-
80
+
86
81
// Automatic retry logic (enabled by default)
87
-
retryReads: true, // Retry failed read operations
88
-
retryWrites: true, // Retry failed write operations
89
-
82
+
retryReads: true, // Retry failed read operations
83
+
retryWrites: true, // Retry failed write operations
84
+
90
85
// Timeouts
91
-
connectTimeoutMS: 10000, // Initial connection timeout
92
-
socketTimeoutMS: 45000, // Socket operation timeout
86
+
connectTimeoutMS: 10000, // Initial connection timeout
87
+
socketTimeoutMS: 45000, // Socket operation timeout
93
88
serverSelectionTimeoutMS: 10000, // Server selection timeout
94
-
89
+
95
90
// Connection resilience
96
-
maxIdleTimeMS: 30000, // Close idle connections
91
+
maxIdleTimeMS: 30000, // Close idle connections
97
92
heartbeatFrequencyMS: 10000, // Server health check interval
98
93
});
99
-
94
+
100
95
const UserModel = new Model("users", userSchema);
101
96
102
97
// Your operations go here
···
210
205
// All operations in this callback are part of the same transaction
211
206
const user = await UserModel.insertOne(
212
207
{ name: "Alice", email: "alice@example.com" },
213
-
{ session } // Pass session to each operation
208
+
{ session }, // Pass session to each operation
214
209
);
215
-
210
+
216
211
const order = await OrderModel.insertOne(
217
212
{ userId: user.insertedId, total: 100 },
218
-
{ session }
213
+
{ session },
219
214
);
220
-
215
+
221
216
// If any operation fails, the entire transaction is automatically aborted
222
217
// If callback succeeds, transaction is automatically committed
223
218
return { user, order };
224
219
});
225
220
226
221
// Manual session management (for advanced use cases)
227
-
import { startSession, endSession } from "@nozzle/nozzle";
222
+
import { endSession, startSession } from "@nozzle/nozzle";
228
223
229
224
const session = startSession();
230
225
try {
231
226
await session.withTransaction(async () => {
232
-
await UserModel.insertOne({ name: "Bob", email: "bob@example.com" }, { session });
227
+
await UserModel.insertOne({ name: "Bob", email: "bob@example.com" }, {
228
+
session,
229
+
});
233
230
await UserModel.updateOne({ name: "Alice" }, { balance: 50 }, { session });
234
231
});
235
232
} finally {
···
237
234
}
238
235
239
236
// Error Handling
240
-
import { ValidationError, ConnectionError } from "@nozzle/nozzle";
237
+
import { ConnectionError, ValidationError } from "@nozzle/nozzle";
241
238
242
239
try {
243
240
await UserModel.insertOne({ name: "", email: "invalid" });
···
261
258
## 🗺️ Roadmap
262
259
263
260
### 🔴 Critical (Must Have)
261
+
264
262
- [x] Transactions support
265
263
- [x] Connection retry logic
266
264
- [x] Improved error handling
···
268
266
- [x] Connection pooling configuration
269
267
270
268
### 🟡 Important (Should Have)
269
+
271
270
- [x] Index management
272
271
- [ ] Middleware/hooks system
273
272
- [ ] Relationship/population support
···
275
274
- [ ] Comprehensive edge case testing
276
275
277
276
### 🟢 Nice to Have
277
+
278
278
- [x] Pagination support
279
279
- [ ] Plugin system
280
280
- [ ] Query builder API
281
281
- [ ] Virtual fields
282
282
- [ ] Document/static methods
283
283
284
-
For detailed production readiness assessment, see [PRODUCTION_READINESS_ASSESSMENT.md](./PRODUCTION_READINESS_ASSESSMENT.md).
284
+
For detailed production readiness assessment, see
285
+
[PRODUCTION_READINESS_ASSESSMENT.md](./PRODUCTION_READINESS_ASSESSMENT.md).
285
286
286
287
---
287
288
+124
bench/ops_bench.ts
+124
bench/ops_bench.ts
···
1
+
import { z } from "@zod/zod";
2
+
import { MongoMemoryServer } from "mongodb-memory-server-core";
3
+
import mongoose from "mongoose";
4
+
import { connect, disconnect, Model } from "../mod.ts";
5
+
6
+
/**
7
+
* Benchmark basic CRUD operations for Nozzle vs Mongoose.
8
+
*
9
+
* Run with:
10
+
* deno bench -A bench/nozzle_vs_mongoose.bench.ts
11
+
*/
12
+
13
+
const userSchema = z.object({
14
+
name: z.string(),
15
+
email: z.string().email(),
16
+
age: z.number().int().positive().optional(),
17
+
createdAt: z.date().default(() => new Date()),
18
+
});
19
+
20
+
const mongoServer = await MongoMemoryServer.create();
21
+
const uri = mongoServer.getUri();
22
+
23
+
// Use separate DBs to avoid any cross-driver interference
24
+
const nozzleDbName = "bench_nozzle";
25
+
const mongooseDbName = "bench_mongoose";
26
+
27
+
await connect(uri, nozzleDbName);
28
+
const NozzleUser = new Model("bench_users_nozzle", userSchema);
29
+
30
+
const mongooseConn = await mongoose.connect(uri, { dbName: mongooseDbName });
31
+
const mongooseUserSchema = new mongoose.Schema(
32
+
{
33
+
name: String,
34
+
email: String,
35
+
age: Number,
36
+
createdAt: { type: Date, default: Date.now },
37
+
},
38
+
{ collection: "bench_users_mongoose" },
39
+
);
40
+
const MongooseUser = mongooseConn.models.BenchUser ||
41
+
mongooseConn.model("BenchUser", mongooseUserSchema);
42
+
43
+
// Start from a clean state
44
+
await NozzleUser.delete({});
45
+
await MongooseUser.deleteMany({});
46
+
47
+
// Seed base documents for read/update benches
48
+
const nozzleSeed = await NozzleUser.insertOne({
49
+
name: "Seed Nozzle",
50
+
email: "seed-nozzle@example.com",
51
+
age: 30,
52
+
});
53
+
const mongooseSeed = await MongooseUser.create({
54
+
name: "Seed Mongoose",
55
+
email: "seed-mongoose@example.com",
56
+
age: 30,
57
+
});
58
+
59
+
const nozzleSeedId = nozzleSeed.insertedId;
60
+
const mongooseSeedId = mongooseSeed._id;
61
+
62
+
let counter = 0;
63
+
const nextEmail = (prefix: string) => `${prefix}-${counter++}@bench.dev`;
64
+
65
+
Deno.bench("mongoose insertOne", { group: "insertOne" }, async () => {
66
+
await MongooseUser.insertOne({
67
+
name: "Mongoose User",
68
+
email: nextEmail("mongoose"),
69
+
age: 25,
70
+
});
71
+
});
72
+
73
+
Deno.bench(
74
+
"nozzle insertOne",
75
+
{ group: "insertOne", baseline: true },
76
+
async () => {
77
+
await NozzleUser.insertOne({
78
+
name: "Nozzle User",
79
+
email: nextEmail("nozzle"),
80
+
age: 25,
81
+
});
82
+
},
83
+
);
84
+
85
+
Deno.bench("mongoose findById", { group: "findById" }, async () => {
86
+
await MongooseUser.findById(mongooseSeedId);
87
+
});
88
+
89
+
Deno.bench(
90
+
"nozzle findById",
91
+
{ group: "findById", baseline: true },
92
+
async () => {
93
+
await NozzleUser.findById(nozzleSeedId);
94
+
},
95
+
);
96
+
97
+
Deno.bench("mongoose updateOne", { group: "updateOne" }, async () => {
98
+
await MongooseUser.updateOne(
99
+
{ _id: mongooseSeedId },
100
+
{ $set: { age: 31 } },
101
+
);
102
+
});
103
+
104
+
Deno.bench(
105
+
"nozzle updateOne",
106
+
{ group: "updateOne", baseline: true },
107
+
async () => {
108
+
await NozzleUser.updateOne(
109
+
{ _id: nozzleSeedId },
110
+
{ age: 31 },
111
+
);
112
+
},
113
+
);
114
+
115
+
// Attempt graceful shutdown when the process exits
116
+
async function cleanup() {
117
+
await disconnect();
118
+
await mongooseConn.disconnect();
119
+
await mongoServer.stop();
120
+
}
121
+
122
+
globalThis.addEventListener("unload", () => {
123
+
void cleanup();
124
+
});
+139
bench/results.json
+139
bench/results.json
···
1
+
{
2
+
"version": 1,
3
+
"runtime": "Deno/2.5.6 aarch64-apple-darwin",
4
+
"cpu": "Apple M2 Pro",
5
+
"benches": [
6
+
{
7
+
"origin": "file:///Users/knotbin/Developer/nozzle/bench/ops_bench.ts",
8
+
"group": "insertOne",
9
+
"name": "mongoose insertOne",
10
+
"baseline": false,
11
+
"results": [
12
+
{
13
+
"ok": {
14
+
"n": 3733,
15
+
"min": 85750.0,
16
+
"max": 495459.0,
17
+
"avg": 134257.0,
18
+
"p75": 128917.0,
19
+
"p99": 313291.0,
20
+
"p995": 344708.0,
21
+
"p999": 446833.0,
22
+
"highPrecision": true,
23
+
"usedExplicitTimers": false
24
+
}
25
+
}
26
+
]
27
+
},
28
+
{
29
+
"origin": "file:///Users/knotbin/Developer/nozzle/bench/ops_bench.ts",
30
+
"group": "insertOne",
31
+
"name": "nozzle insertOne",
32
+
"baseline": true,
33
+
"results": [
34
+
{
35
+
"ok": {
36
+
"n": 6354,
37
+
"min": 52667.0,
38
+
"max": 453875.0,
39
+
"avg": 78809.0,
40
+
"p75": 81417.0,
41
+
"p99": 149417.0,
42
+
"p995": 201459.0,
43
+
"p999": 274750.0,
44
+
"highPrecision": true,
45
+
"usedExplicitTimers": false
46
+
}
47
+
}
48
+
]
49
+
},
50
+
{
51
+
"origin": "file:///Users/knotbin/Developer/nozzle/bench/ops_bench.ts",
52
+
"group": "findById",
53
+
"name": "mongoose findById",
54
+
"baseline": false,
55
+
"results": [
56
+
{
57
+
"ok": {
58
+
"n": 3707,
59
+
"min": 113875.0,
60
+
"max": 510125.0,
61
+
"avg": 135223.0,
62
+
"p75": 137167.0,
63
+
"p99": 263958.0,
64
+
"p995": 347458.0,
65
+
"p999": 428500.0,
66
+
"highPrecision": true,
67
+
"usedExplicitTimers": false
68
+
}
69
+
}
70
+
]
71
+
},
72
+
{
73
+
"origin": "file:///Users/knotbin/Developer/nozzle/bench/ops_bench.ts",
74
+
"group": "findById",
75
+
"name": "nozzle findById",
76
+
"baseline": true,
77
+
"results": [
78
+
{
79
+
"ok": {
80
+
"n": 6045,
81
+
"min": 70750.0,
82
+
"max": 1008792.0,
83
+
"avg": 82859.0,
84
+
"p75": 83750.0,
85
+
"p99": 132250.0,
86
+
"p995": 183500.0,
87
+
"p999": 311833.0,
88
+
"highPrecision": true,
89
+
"usedExplicitTimers": false
90
+
}
91
+
}
92
+
]
93
+
},
94
+
{
95
+
"origin": "file:///Users/knotbin/Developer/nozzle/bench/ops_bench.ts",
96
+
"group": "updateOne",
97
+
"name": "mongoose updateOne",
98
+
"baseline": false,
99
+
"results": [
100
+
{
101
+
"ok": {
102
+
"n": 4123,
103
+
"min": 98500.0,
104
+
"max": 717334.0,
105
+
"avg": 121572.0,
106
+
"p75": 123292.0,
107
+
"p99": 179375.0,
108
+
"p995": 281417.0,
109
+
"p999": 342625.0,
110
+
"highPrecision": true,
111
+
"usedExplicitTimers": false
112
+
}
113
+
}
114
+
]
115
+
},
116
+
{
117
+
"origin": "file:///Users/knotbin/Developer/nozzle/bench/ops_bench.ts",
118
+
"group": "updateOne",
119
+
"name": "nozzle updateOne",
120
+
"baseline": true,
121
+
"results": [
122
+
{
123
+
"ok": {
124
+
"n": 6550,
125
+
"min": 53833.0,
126
+
"max": 401667.0,
127
+
"avg": 76456.0,
128
+
"p75": 76834.0,
129
+
"p99": 118292.0,
130
+
"p995": 181500.0,
131
+
"p999": 299958.0,
132
+
"highPrecision": true,
133
+
"usedExplicitTimers": false
134
+
}
135
+
}
136
+
]
137
+
}
138
+
]
139
+
}
+16
-14
client/connection.ts
+16
-14
client/connection.ts
···
1
-
import { type Db, type MongoClientOptions, MongoClient } from "mongodb";
1
+
import { type Db, MongoClient, type MongoClientOptions } from "mongodb";
2
2
import { ConnectionError } from "../errors.ts";
3
3
4
4
/**
5
5
* Connection management module
6
-
*
6
+
*
7
7
* Handles MongoDB connection lifecycle including connect, disconnect,
8
8
* and connection state management.
9
9
*/
···
20
20
21
21
/**
22
22
* Connect to MongoDB with connection pooling, retry logic, and resilience options
23
-
*
23
+
*
24
24
* The MongoDB driver handles connection pooling and automatic retries.
25
25
* Retry logic is enabled by default for both reads and writes in MongoDB 4.2+.
26
-
*
26
+
*
27
27
* @param uri - MongoDB connection string
28
28
* @param dbName - Name of the database to connect to
29
29
* @param options - Connection options (pooling, retries, timeouts, etc.)
30
30
* @returns Connection object with client and db
31
-
*
31
+
*
32
32
* @example
33
33
* Basic connection with pooling:
34
34
* ```ts
···
40
40
* socketTimeoutMS: 45000,
41
41
* });
42
42
* ```
43
-
*
43
+
*
44
44
* @example
45
45
* Production-ready connection with retry logic and resilience:
46
46
* ```ts
···
48
48
* // Connection pooling
49
49
* maxPoolSize: 10,
50
50
* minPoolSize: 2,
51
-
*
51
+
*
52
52
* // Automatic retry logic (enabled by default)
53
53
* retryReads: true, // Retry failed read operations
54
54
* retryWrites: true, // Retry failed write operations
55
-
*
55
+
*
56
56
* // Timeouts
57
57
* connectTimeoutMS: 10000, // Initial connection timeout
58
58
* socketTimeoutMS: 45000, // Socket operation timeout
59
59
* serverSelectionTimeoutMS: 10000, // Server selection timeout
60
-
*
60
+
*
61
61
* // Connection resilience
62
62
* maxIdleTimeMS: 30000, // Close idle connections
63
63
* heartbeatFrequencyMS: 10000, // Server health check interval
64
-
*
64
+
*
65
65
* // Optional: Compression for reduced bandwidth
66
66
* compressors: ['snappy', 'zlib'],
67
67
* });
···
85
85
return connection;
86
86
} catch (error) {
87
87
throw new ConnectionError(
88
-
`Failed to connect to MongoDB: ${error instanceof Error ? error.message : String(error)}`,
89
-
uri
88
+
`Failed to connect to MongoDB: ${
89
+
error instanceof Error ? error.message : String(error)
90
+
}`,
91
+
uri,
90
92
);
91
93
}
92
94
}
···
103
105
104
106
/**
105
107
* Get the current database connection
106
-
*
108
+
*
107
109
* @returns MongoDB Db instance
108
110
* @throws {ConnectionError} If not connected
109
111
* @internal
···
117
119
118
120
/**
119
121
* Get the current connection state
120
-
*
122
+
*
121
123
* @returns Connection object or null if not connected
122
124
* @internal
123
125
*/
+5
-5
client/health.ts
+5
-5
client/health.ts
···
2
2
3
3
/**
4
4
* Health check module
5
-
*
5
+
*
6
6
* Provides functionality for monitoring MongoDB connection health
7
7
* including ping operations and response time measurement.
8
8
*/
9
9
10
10
/**
11
11
* Health check details of the MongoDB connection
12
-
*
12
+
*
13
13
* @property healthy - Overall health status of the connection
14
14
* @property connected - Whether a connection is established
15
15
* @property responseTimeMs - Response time in milliseconds (if connection is healthy)
···
26
26
27
27
/**
28
28
* Check the health of the MongoDB connection
29
-
*
29
+
*
30
30
* Performs a ping operation to verify the database is responsive
31
31
* and returns detailed health information including response time.
32
-
*
32
+
*
33
33
* @returns Health check result with status and metrics
34
-
*
34
+
*
35
35
* @example
36
36
* ```ts
37
37
* const health = await healthCheck();
+5
-12
client/index.ts
+5
-12
client/index.ts
···
1
1
/**
2
2
* Client module - MongoDB connection and session management
3
-
*
3
+
*
4
4
* This module provides all client-level functionality including:
5
5
* - Connection management (connect, disconnect)
6
6
* - Health monitoring (healthCheck)
···
10
10
// Re-export connection management
11
11
export {
12
12
connect,
13
+
type Connection,
14
+
type ConnectOptions,
13
15
disconnect,
14
16
getDb,
15
-
type ConnectOptions,
16
-
type Connection,
17
17
} from "./connection.ts";
18
18
19
19
// Re-export health monitoring
20
-
export {
21
-
healthCheck,
22
-
type HealthCheckResult,
23
-
} from "./health.ts";
20
+
export { healthCheck, type HealthCheckResult } from "./health.ts";
24
21
25
22
// Re-export transaction management
26
-
export {
27
-
startSession,
28
-
endSession,
29
-
withTransaction,
30
-
} from "./transactions.ts";
23
+
export { endSession, startSession, withTransaction } from "./transactions.ts";
+12
-12
client/transactions.ts
+12
-12
client/transactions.ts
···
4
4
5
5
/**
6
6
* Transaction management module
7
-
*
7
+
*
8
8
* Provides session and transaction management functionality including
9
9
* automatic transaction handling and manual session control.
10
10
*/
11
11
12
12
/**
13
13
* Start a new client session for transactions
14
-
*
14
+
*
15
15
* Sessions must be ended when done using `endSession()`
16
-
*
16
+
*
17
17
* @returns New MongoDB ClientSession
18
18
* @throws {ConnectionError} If not connected
19
-
*
19
+
*
20
20
* @example
21
21
* ```ts
22
22
* const session = startSession();
···
37
37
38
38
/**
39
39
* End a client session
40
-
*
40
+
*
41
41
* @param session - The session to end
42
42
*/
43
43
export async function endSession(session: ClientSession): Promise<void> {
···
46
46
47
47
/**
48
48
* Execute a function within a transaction
49
-
*
49
+
*
50
50
* Automatically handles session creation, transaction start/commit/abort, and cleanup.
51
51
* If the callback throws an error, the transaction is automatically aborted.
52
-
*
52
+
*
53
53
* @param callback - Async function to execute within the transaction. Receives the session as parameter.
54
54
* @param options - Optional transaction options (read/write concern, etc.)
55
55
* @returns The result from the callback function
56
-
*
56
+
*
57
57
* @example
58
58
* ```ts
59
59
* const result = await withTransaction(async (session) => {
···
65
65
*/
66
66
export async function withTransaction<T>(
67
67
callback: (session: ClientSession) => Promise<T>,
68
-
options?: TransactionOptions
68
+
options?: TransactionOptions,
69
69
): Promise<T> {
70
70
const session = startSession();
71
-
71
+
72
72
try {
73
73
let result: T;
74
-
74
+
75
75
await session.withTransaction(async () => {
76
76
result = await callback(session);
77
77
}, options);
78
-
78
+
79
79
return result!;
80
80
} finally {
81
81
await endSession(session);
+2
-1
deno.json
+2
-1
deno.json
···
7
7
"@std/assert": "jsr:@std/assert@^1.0.16",
8
8
"@zod/zod": "jsr:@zod/zod@^4.1.13",
9
9
"mongodb": "npm:mongodb@^6.18.0",
10
-
"mongodb-memory-server-core": "npm:mongodb-memory-server-core@^10.3.0"
10
+
"mongodb-memory-server-core": "npm:mongodb-memory-server-core@^10.3.0",
11
+
"mongoose": "npm:mongoose@^8.5.2"
11
12
}
12
13
}
+40
-3
deno.lock
+40
-3
deno.lock
···
12
12
"jsr:@zod/zod@^4.1.13": "4.1.13",
13
13
"npm:@types/node@*": "22.15.15",
14
14
"npm:mongodb-memory-server-core@^10.3.0": "10.3.0",
15
-
"npm:mongodb@^6.18.0": "6.18.0"
15
+
"npm:mongodb@^6.18.0": "6.20.0",
16
+
"npm:mongoose@^8.5.2": "8.20.1"
16
17
},
17
18
"jsr": {
18
19
"@std/assert@1.0.13": {
···
133
134
"debug"
134
135
]
135
136
},
137
+
"kareem@2.6.3": {
138
+
"integrity": "sha512-C3iHfuGUXK2u8/ipq9LfjFfXFxAZMQJJq7vLS45r3D9Y2xQ/m4S8zaR4zMLFWh9AsNPXmcFfUDhTEO8UIC/V6Q=="
139
+
},
136
140
"locate-path@5.0.0": {
137
141
"integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
138
142
"dependencies": [
···
164
168
"find-cache-dir",
165
169
"follow-redirects",
166
170
"https-proxy-agent",
167
-
"mongodb",
171
+
"mongodb@6.18.0",
168
172
"new-find-package-json",
169
173
"semver@7.7.3",
170
174
"tar-stream",
···
180
184
"mongodb-connection-string-url"
181
185
]
182
186
},
187
+
"mongodb@6.20.0": {
188
+
"integrity": "sha512-Tl6MEIU3K4Rq3TSHd+sZQqRBoGlFsOgNrH5ltAcFBV62Re3Fd+FcaVf8uSEQFOJ51SDowDVttBTONMfoYWrWlQ==",
189
+
"dependencies": [
190
+
"@mongodb-js/saslprep",
191
+
"bson",
192
+
"mongodb-connection-string-url"
193
+
]
194
+
},
195
+
"mongoose@8.20.1": {
196
+
"integrity": "sha512-G+n3maddlqkQrP1nXxsI0q20144OSo+pe+HzRRGqaC4yK3FLYKqejqB9cbIi+SX7eoRsnG23LHGYNp8n7mWL2Q==",
197
+
"dependencies": [
198
+
"bson",
199
+
"kareem",
200
+
"mongodb@6.20.0",
201
+
"mpath",
202
+
"mquery",
203
+
"ms",
204
+
"sift"
205
+
]
206
+
},
207
+
"mpath@0.9.0": {
208
+
"integrity": "sha512-ikJRQTk8hw5DEoFVxHG1Gn9T/xcjtdnOKIU1JTmGjZZlg9LST2mBLmcX3/ICIbgJydT2GOc15RnNy5mHmzfSew=="
209
+
},
210
+
"mquery@5.0.0": {
211
+
"integrity": "sha512-iQMncpmEK8R8ncT8HJGsGc9Dsp8xcgYMVSbs5jgnm1lFHTZqMJTUWTDx1LBO8+mK3tPNZWFLBghQEIOULSTHZg==",
212
+
"dependencies": [
213
+
"debug"
214
+
]
215
+
},
183
216
"ms@2.1.3": {
184
217
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
185
218
},
···
226
259
"semver@7.7.3": {
227
260
"integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==",
228
261
"bin": true
262
+
},
263
+
"sift@17.1.3": {
264
+
"integrity": "sha512-Rtlj66/b0ICeFzYTuNvX/EF1igRbbnGSvEyT79McoZa/DeGhMyC5pWKOEsZKnpkqtSeovd5FL/bjHWC3CIIvCQ=="
229
265
},
230
266
"sparse-bitfield@3.0.3": {
231
267
"integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==",
···
290
326
"jsr:@std/assert@^1.0.16",
291
327
"jsr:@zod/zod@^4.1.13",
292
328
"npm:mongodb-memory-server-core@^10.3.0",
293
-
"npm:mongodb@^6.18.0"
329
+
"npm:mongodb@^6.18.0",
330
+
"npm:mongoose@^8.5.2"
294
331
]
295
332
}
296
333
}
+21
-13
errors.ts
+21
-13
errors.ts
···
24
24
export class ValidationError extends NozzleError {
25
25
public readonly issues: ValidationIssue[];
26
26
public readonly operation: "insert" | "update" | "replace";
27
-
28
-
constructor(issues: ValidationIssue[], operation: "insert" | "update" | "replace") {
27
+
28
+
constructor(
29
+
issues: ValidationIssue[],
30
+
operation: "insert" | "update" | "replace",
31
+
) {
29
32
const message = ValidationError.formatIssues(issues);
30
33
super(`Validation failed on ${operation}: ${message}`);
31
34
this.issues = issues;
···
33
36
}
34
37
35
38
private static formatIssues(issues: ValidationIssue[]): string {
36
-
return issues.map(issue => {
37
-
const path = issue.path.join('.');
38
-
return `${path || 'root'}: ${issue.message}`;
39
-
}).join('; ');
39
+
return issues.map((issue) => {
40
+
const path = issue.path.join(".");
41
+
return `${path || "root"}: ${issue.message}`;
42
+
}).join("; ");
40
43
}
41
44
42
45
/**
···
45
48
public getFieldErrors(): Record<string, string[]> {
46
49
const fieldErrors: Record<string, string[]> = {};
47
50
for (const issue of this.issues) {
48
-
const field = issue.path.join('.') || 'root';
51
+
const field = issue.path.join(".") || "root";
49
52
if (!fieldErrors[field]) {
50
53
fieldErrors[field] = [];
51
54
}
···
61
64
*/
62
65
export class ConnectionError extends NozzleError {
63
66
public readonly uri?: string;
64
-
67
+
65
68
constructor(message: string, uri?: string) {
66
69
super(message);
67
70
this.uri = uri;
···
74
77
*/
75
78
export class ConfigurationError extends NozzleError {
76
79
public readonly option?: string;
77
-
80
+
78
81
constructor(message: string, option?: string) {
79
82
super(message);
80
83
this.option = option;
···
88
91
export class DocumentNotFoundError extends NozzleError {
89
92
public readonly query: unknown;
90
93
public readonly collection: string;
91
-
94
+
92
95
constructor(collection: string, query: unknown) {
93
96
super(`Document not found in collection '${collection}'`);
94
97
this.collection = collection;
···
104
107
public readonly operation: string;
105
108
public readonly collection?: string;
106
109
public override readonly cause?: Error;
107
-
108
-
constructor(operation: string, message: string, collection?: string, cause?: Error) {
110
+
111
+
constructor(
112
+
operation: string,
113
+
message: string,
114
+
collection?: string,
115
+
cause?: Error,
116
+
) {
109
117
super(`${operation} operation failed: ${message}`);
110
118
this.operation = operation;
111
119
this.collection = collection;
···
121
129
constructor() {
122
130
super(
123
131
"Async validation is not currently supported. " +
124
-
"Please use synchronous validation schemas."
132
+
"Please use synchronous validation schemas.",
125
133
);
126
134
}
127
135
}
+12
-12
mod.ts
+12
-12
mod.ts
···
1
-
export type { Schema, Infer, Input } from "./types.ts";
2
-
export {
3
-
connect,
4
-
disconnect,
5
-
healthCheck,
6
-
startSession,
1
+
export type { Infer, Input, Schema } from "./types.ts";
2
+
export {
3
+
connect,
4
+
type ConnectOptions,
5
+
disconnect,
7
6
endSession,
7
+
healthCheck,
8
+
type HealthCheckResult,
9
+
startSession,
8
10
withTransaction,
9
-
type ConnectOptions,
10
-
type HealthCheckResult
11
11
} from "./client/index.ts";
12
12
export { Model } from "./model/index.ts";
13
13
export {
14
-
NozzleError,
15
-
ValidationError,
14
+
AsyncValidationError,
15
+
ConfigurationError,
16
16
ConnectionError,
17
-
ConfigurationError,
18
17
DocumentNotFoundError,
18
+
NozzleError,
19
19
OperationError,
20
-
AsyncValidationError,
20
+
ValidationError,
21
21
} from "./errors.ts";
22
22
23
23
// Re-export MongoDB types that users might need
+81
-62
model/core.ts
+81
-62
model/core.ts
···
1
1
import type { z } from "@zod/zod";
2
2
import type {
3
+
AggregateOptions,
4
+
BulkWriteOptions,
3
5
Collection,
6
+
CountDocumentsOptions,
7
+
DeleteOptions,
4
8
DeleteResult,
5
9
Document,
6
10
Filter,
11
+
FindOneAndReplaceOptions,
12
+
FindOneAndUpdateOptions,
13
+
FindOptions,
7
14
InsertManyResult,
8
-
InsertOneResult,
9
15
InsertOneOptions,
10
-
FindOptions,
11
-
UpdateOptions,
12
-
ReplaceOptions,
13
-
FindOneAndUpdateOptions,
14
-
FindOneAndReplaceOptions,
15
-
DeleteOptions,
16
-
CountDocumentsOptions,
17
-
AggregateOptions,
16
+
InsertOneResult,
17
+
ModifyResult,
18
18
OptionalUnlessRequiredId,
19
+
ReplaceOptions,
20
+
UpdateFilter,
21
+
UpdateOptions,
19
22
UpdateResult,
20
23
WithId,
21
-
BulkWriteOptions,
22
-
UpdateFilter,
23
-
ModifyResult,
24
24
} from "mongodb";
25
25
import { ObjectId } from "mongodb";
26
-
import type { Schema, Infer, Input } from "../types.ts";
27
-
import { parse, parsePartial, parseReplace, applyDefaultsForUpsert } from "./validation.ts";
26
+
import type { Infer, Input, Schema } from "../types.ts";
27
+
import {
28
+
applyDefaultsForUpsert,
29
+
parse,
30
+
parsePartial,
31
+
parseReplace,
32
+
} from "./validation.ts";
28
33
29
34
/**
30
35
* Core CRUD operations for the Model class
31
-
*
36
+
*
32
37
* This module contains all basic create, read, update, and delete operations
33
38
* with automatic Zod validation and transaction support.
34
39
*/
35
40
36
41
/**
37
42
* Insert a single document into the collection
38
-
*
43
+
*
39
44
* @param collection - MongoDB collection
40
45
* @param schema - Zod schema for validation
41
46
* @param data - Document data to insert
···
46
51
collection: Collection<Infer<T>>,
47
52
schema: T,
48
53
data: Input<T>,
49
-
options?: InsertOneOptions
54
+
options?: InsertOneOptions,
50
55
): Promise<InsertOneResult<Infer<T>>> {
51
56
const validatedData = parse(schema, data);
52
57
return await collection.insertOne(
53
58
validatedData as OptionalUnlessRequiredId<Infer<T>>,
54
-
options
59
+
options,
55
60
);
56
61
}
57
62
58
63
/**
59
64
* Insert multiple documents into the collection
60
-
*
65
+
*
61
66
* @param collection - MongoDB collection
62
67
* @param schema - Zod schema for validation
63
68
* @param data - Array of document data to insert
···
68
73
collection: Collection<Infer<T>>,
69
74
schema: T,
70
75
data: Input<T>[],
71
-
options?: BulkWriteOptions
76
+
options?: BulkWriteOptions,
72
77
): Promise<InsertManyResult<Infer<T>>> {
73
78
const validatedData = data.map((item) => parse(schema, item));
74
79
return await collection.insertMany(
75
80
validatedData as OptionalUnlessRequiredId<Infer<T>>[],
76
-
options
81
+
options,
77
82
);
78
83
}
79
84
80
85
/**
81
86
* Find multiple documents matching the query
82
-
*
87
+
*
83
88
* @param collection - MongoDB collection
84
89
* @param query - MongoDB query filter
85
90
* @param options - Find options (including session for transactions)
···
88
93
export async function find<T extends Schema>(
89
94
collection: Collection<Infer<T>>,
90
95
query: Filter<Infer<T>>,
91
-
options?: FindOptions
96
+
options?: FindOptions,
92
97
): Promise<(WithId<Infer<T>>)[]> {
93
98
return await collection.find(query, options).toArray();
94
99
}
95
100
96
101
/**
97
102
* Find a single document matching the query
98
-
*
103
+
*
99
104
* @param collection - MongoDB collection
100
105
* @param query - MongoDB query filter
101
106
* @param options - Find options (including session for transactions)
···
104
109
export async function findOne<T extends Schema>(
105
110
collection: Collection<Infer<T>>,
106
111
query: Filter<Infer<T>>,
107
-
options?: FindOptions
112
+
options?: FindOptions,
108
113
): Promise<WithId<Infer<T>> | null> {
109
114
return await collection.findOne(query, options);
110
115
}
111
116
112
117
/**
113
118
* Find a document by its MongoDB ObjectId
114
-
*
119
+
*
115
120
* @param collection - MongoDB collection
116
121
* @param id - Document ID (string or ObjectId)
117
122
* @param options - Find options (including session for transactions)
···
120
125
export async function findById<T extends Schema>(
121
126
collection: Collection<Infer<T>>,
122
127
id: string | ObjectId,
123
-
options?: FindOptions
128
+
options?: FindOptions,
124
129
): Promise<WithId<Infer<T>> | null> {
125
130
const objectId = typeof id === "string" ? new ObjectId(id) : id;
126
-
return await findOne(collection, { _id: objectId } as Filter<Infer<T>>, options);
131
+
return await findOne(
132
+
collection,
133
+
{ _id: objectId } as Filter<Infer<T>>,
134
+
options,
135
+
);
127
136
}
128
137
129
138
/**
130
139
* Update multiple documents matching the query
131
-
*
140
+
*
132
141
* Case handling:
133
142
* - If upsert: false (or undefined) → Normal update, no defaults applied
134
143
* - If upsert: true → Defaults added to $setOnInsert for new document creation
135
-
*
144
+
*
136
145
* @param collection - MongoDB collection
137
146
* @param schema - Zod schema for validation
138
147
* @param query - MongoDB query filter
···
145
154
schema: T,
146
155
query: Filter<Infer<T>>,
147
156
data: Partial<z.infer<T>>,
148
-
options?: UpdateOptions
157
+
options?: UpdateOptions,
149
158
): Promise<UpdateResult<Infer<T>>> {
150
159
const validatedData = parsePartial(schema, data);
151
-
let updateDoc: UpdateFilter<Infer<T>> = { $set: validatedData as Partial<Infer<T>> };
152
-
160
+
let updateDoc: UpdateFilter<Infer<T>> = {
161
+
$set: validatedData as Partial<Infer<T>>,
162
+
};
163
+
153
164
// If this is an upsert, apply defaults using $setOnInsert
154
165
if (options?.upsert) {
155
166
updateDoc = applyDefaultsForUpsert(schema, query, updateDoc);
156
167
}
157
-
168
+
158
169
return await collection.updateMany(query, updateDoc, options);
159
170
}
160
171
161
172
/**
162
173
* Update a single document matching the query
163
-
*
174
+
*
164
175
* Case handling:
165
176
* - If upsert: false (or undefined) → Normal update, no defaults applied
166
177
* - If upsert: true → Defaults added to $setOnInsert for new document creation
167
-
*
178
+
*
168
179
* @param collection - MongoDB collection
169
180
* @param schema - Zod schema for validation
170
181
* @param query - MongoDB query filter
···
177
188
schema: T,
178
189
query: Filter<Infer<T>>,
179
190
data: Partial<z.infer<T>>,
180
-
options?: UpdateOptions
191
+
options?: UpdateOptions,
181
192
): Promise<UpdateResult<Infer<T>>> {
182
193
const validatedData = parsePartial(schema, data);
183
-
let updateDoc: UpdateFilter<Infer<T>> = { $set: validatedData as Partial<Infer<T>> };
184
-
194
+
let updateDoc: UpdateFilter<Infer<T>> = {
195
+
$set: validatedData as Partial<Infer<T>>,
196
+
};
197
+
185
198
// If this is an upsert, apply defaults using $setOnInsert
186
199
if (options?.upsert) {
187
200
updateDoc = applyDefaultsForUpsert(schema, query, updateDoc);
188
201
}
189
-
202
+
190
203
return await collection.updateOne(query, updateDoc, options);
191
204
}
192
205
193
206
/**
194
207
* Replace a single document matching the query
195
-
*
208
+
*
196
209
* Case handling:
197
210
* - If upsert: false (or undefined) → Normal replace on existing doc, no additional defaults
198
211
* - If upsert: true → Defaults applied via parse() since we're passing a full document
199
-
*
212
+
*
200
213
* Note: For replace operations, defaults are automatically applied by the schema's
201
214
* parse() function which treats missing fields as candidates for defaults. This works
202
215
* for both regular replaces and upsert-creates since we're providing a full document.
203
-
*
216
+
*
204
217
* @param collection - MongoDB collection
205
218
* @param schema - Zod schema for validation
206
219
* @param query - MongoDB query filter
···
213
226
schema: T,
214
227
query: Filter<Infer<T>>,
215
228
data: Input<T>,
216
-
options?: ReplaceOptions
229
+
options?: ReplaceOptions,
217
230
): Promise<UpdateResult<Infer<T>>> {
218
231
// parseReplace will apply all schema defaults to missing fields
219
232
// This works correctly for both regular replaces and upsert-created documents
220
233
const validatedData = parseReplace(schema, data);
221
-
234
+
222
235
// Remove _id from validatedData for replaceOne (it will use the query's _id)
223
236
const { _id, ...withoutId } = validatedData as Infer<T> & { _id?: unknown };
224
237
return await collection.replaceOne(
225
238
query,
226
239
withoutId as Infer<T>,
227
-
options
240
+
options,
228
241
);
229
242
}
230
243
231
244
/**
232
245
* Find a single document and update it
233
-
*
246
+
*
234
247
* Case handling:
235
248
* - If upsert: false (or undefined) → Normal update
236
249
* - If upsert: true → Defaults added to $setOnInsert for new document creation
···
240
253
schema: T,
241
254
query: Filter<Infer<T>>,
242
255
data: Partial<z.infer<T>>,
243
-
options?: FindOneAndUpdateOptions
256
+
options?: FindOneAndUpdateOptions,
244
257
): Promise<ModifyResult<Infer<T>>> {
245
258
const validatedData = parsePartial(schema, data);
246
-
let updateDoc: UpdateFilter<Infer<T>> = { $set: validatedData as Partial<Infer<T>> };
259
+
let updateDoc: UpdateFilter<Infer<T>> = {
260
+
$set: validatedData as Partial<Infer<T>>,
261
+
};
247
262
248
263
if (options?.upsert) {
249
264
updateDoc = applyDefaultsForUpsert(schema, query, updateDoc);
250
265
}
251
266
252
-
const resolvedOptions: FindOneAndUpdateOptions & { includeResultMetadata: true } = {
267
+
const resolvedOptions: FindOneAndUpdateOptions & {
268
+
includeResultMetadata: true;
269
+
} = {
253
270
...(options ?? {}),
254
271
includeResultMetadata: true as const,
255
272
};
···
259
276
260
277
/**
261
278
* Find a single document and replace it
262
-
*
279
+
*
263
280
* Defaults are applied via parseReplace(), which fills in missing fields
264
281
* for both normal replacements and upsert-created documents.
265
282
*/
···
268
285
schema: T,
269
286
query: Filter<Infer<T>>,
270
287
data: Input<T>,
271
-
options?: FindOneAndReplaceOptions
288
+
options?: FindOneAndReplaceOptions,
272
289
): Promise<ModifyResult<Infer<T>>> {
273
290
const validatedData = parseReplace(schema, data);
274
291
const { _id, ...withoutId } = validatedData as Infer<T> & { _id?: unknown };
275
292
276
-
const resolvedOptions: FindOneAndReplaceOptions & { includeResultMetadata: true } = {
293
+
const resolvedOptions: FindOneAndReplaceOptions & {
294
+
includeResultMetadata: true;
295
+
} = {
277
296
...(options ?? {}),
278
297
includeResultMetadata: true as const,
279
298
};
···
281
300
return await collection.findOneAndReplace(
282
301
query,
283
302
withoutId as Infer<T>,
284
-
resolvedOptions
303
+
resolvedOptions,
285
304
);
286
305
}
287
306
288
307
/**
289
308
* Delete multiple documents matching the query
290
-
*
309
+
*
291
310
* @param collection - MongoDB collection
292
311
* @param query - MongoDB query filter
293
312
* @param options - Delete options (including session for transactions)
···
296
315
export async function deleteMany<T extends Schema>(
297
316
collection: Collection<Infer<T>>,
298
317
query: Filter<Infer<T>>,
299
-
options?: DeleteOptions
318
+
options?: DeleteOptions,
300
319
): Promise<DeleteResult> {
301
320
return await collection.deleteMany(query, options);
302
321
}
303
322
304
323
/**
305
324
* Delete a single document matching the query
306
-
*
325
+
*
307
326
* @param collection - MongoDB collection
308
327
* @param query - MongoDB query filter
309
328
* @param options - Delete options (including session for transactions)
···
312
331
export async function deleteOne<T extends Schema>(
313
332
collection: Collection<Infer<T>>,
314
333
query: Filter<Infer<T>>,
315
-
options?: DeleteOptions
334
+
options?: DeleteOptions,
316
335
): Promise<DeleteResult> {
317
336
return await collection.deleteOne(query, options);
318
337
}
319
338
320
339
/**
321
340
* Count documents matching the query
322
-
*
341
+
*
323
342
* @param collection - MongoDB collection
324
343
* @param query - MongoDB query filter
325
344
* @param options - Count options (including session for transactions)
···
328
347
export async function count<T extends Schema>(
329
348
collection: Collection<Infer<T>>,
330
349
query: Filter<Infer<T>>,
331
-
options?: CountDocumentsOptions
350
+
options?: CountDocumentsOptions,
332
351
): Promise<number> {
333
352
return await collection.countDocuments(query, options);
334
353
}
335
354
336
355
/**
337
356
* Execute an aggregation pipeline
338
-
*
357
+
*
339
358
* @param collection - MongoDB collection
340
359
* @param pipeline - MongoDB aggregation pipeline
341
360
* @param options - Aggregate options (including session for transactions)
···
344
363
export async function aggregate<T extends Schema>(
345
364
collection: Collection<Infer<T>>,
346
365
pipeline: Document[],
347
-
options?: AggregateOptions
366
+
options?: AggregateOptions,
348
367
): Promise<Document[]> {
349
368
return await collection.aggregate(pipeline, options).toArray();
350
369
}
+90
-60
model/index.ts
+90
-60
model/index.ts
···
1
1
import type { z } from "@zod/zod";
2
2
import type {
3
+
AggregateOptions,
4
+
BulkWriteOptions,
3
5
Collection,
6
+
CountDocumentsOptions,
4
7
CreateIndexesOptions,
8
+
DeleteOptions,
5
9
DeleteResult,
6
10
Document,
7
11
DropIndexesOptions,
8
12
Filter,
13
+
FindOneAndReplaceOptions,
14
+
FindOneAndUpdateOptions,
15
+
FindOptions,
9
16
IndexDescription,
10
17
IndexSpecification,
11
18
InsertManyResult,
19
+
InsertOneOptions,
12
20
InsertOneResult,
13
-
InsertOneOptions,
14
-
FindOptions,
15
-
UpdateOptions,
21
+
ListIndexesOptions,
22
+
ModifyResult,
16
23
ReplaceOptions,
17
-
FindOneAndUpdateOptions,
18
-
FindOneAndReplaceOptions,
19
-
DeleteOptions,
20
-
CountDocumentsOptions,
21
-
AggregateOptions,
22
-
ListIndexesOptions,
24
+
UpdateOptions,
23
25
UpdateResult,
24
26
WithId,
25
-
BulkWriteOptions,
26
-
ModifyResult,
27
27
} from "mongodb";
28
28
import type { ObjectId } from "mongodb";
29
29
import { getDb } from "../client/connection.ts";
30
-
import type { Schema, Infer, Input, Indexes, ModelDef } from "../types.ts";
30
+
import type { Indexes, Infer, Input, ModelDef, Schema } from "../types.ts";
31
31
import * as core from "./core.ts";
32
32
import * as indexes from "./indexes.ts";
33
33
import * as pagination from "./pagination.ts";
34
34
35
35
/**
36
36
* Model class for type-safe MongoDB operations
37
-
*
37
+
*
38
38
* Provides a clean API for CRUD operations, pagination, and index management
39
39
* with automatic Zod validation and TypeScript type safety.
40
-
*
40
+
*
41
41
* @example
42
42
* ```ts
43
43
* const userSchema = z.object({
44
44
* name: z.string(),
45
45
* email: z.string().email(),
46
46
* });
47
-
*
47
+
*
48
48
* const UserModel = new Model("users", userSchema);
49
49
* await UserModel.insertOne({ name: "Alice", email: "alice@example.com" });
50
50
* ```
···
62
62
this.schema = definition as T;
63
63
}
64
64
this.collection = getDb().collection<Infer<T>>(collectionName);
65
-
65
+
66
66
// Automatically create indexes if they were provided
67
67
if (this.indexes && this.indexes.length > 0) {
68
68
// Fire and forget - indexes will be created asynchronously
69
-
indexes.syncIndexes(this.collection, this.indexes)
69
+
indexes.syncIndexes(this.collection, this.indexes);
70
70
}
71
71
}
72
72
···
76
76
77
77
/**
78
78
* Insert a single document into the collection
79
-
*
79
+
*
80
80
* @param data - Document data to insert
81
81
* @param options - Insert options (including session for transactions)
82
82
* @returns Insert result with insertedId
83
83
*/
84
84
async insertOne(
85
85
data: Input<T>,
86
-
options?: InsertOneOptions
86
+
options?: InsertOneOptions,
87
87
): Promise<InsertOneResult<Infer<T>>> {
88
88
return await core.insertOne(this.collection, this.schema, data, options);
89
89
}
90
90
91
91
/**
92
92
* Insert multiple documents into the collection
93
-
*
93
+
*
94
94
* @param data - Array of document data to insert
95
95
* @param options - Insert options (including session for transactions)
96
96
* @returns Insert result with insertedIds
97
97
*/
98
98
async insertMany(
99
99
data: Input<T>[],
100
-
options?: BulkWriteOptions
100
+
options?: BulkWriteOptions,
101
101
): Promise<InsertManyResult<Infer<T>>> {
102
102
return await core.insertMany(this.collection, this.schema, data, options);
103
103
}
104
104
105
105
/**
106
106
* Find multiple documents matching the query
107
-
*
107
+
*
108
108
* @param query - MongoDB query filter
109
109
* @param options - Find options (including session for transactions)
110
110
* @returns Array of matching documents
111
111
*/
112
112
async find(
113
113
query: Filter<Infer<T>>,
114
-
options?: FindOptions
114
+
options?: FindOptions,
115
115
): Promise<(WithId<Infer<T>>)[]> {
116
116
return await core.find(this.collection, query, options);
117
117
}
118
118
119
119
/**
120
120
* Find a single document matching the query
121
-
*
121
+
*
122
122
* @param query - MongoDB query filter
123
123
* @param options - Find options (including session for transactions)
124
124
* @returns Matching document or null if not found
125
125
*/
126
126
async findOne(
127
127
query: Filter<Infer<T>>,
128
-
options?: FindOptions
128
+
options?: FindOptions,
129
129
): Promise<WithId<Infer<T>> | null> {
130
130
return await core.findOne(this.collection, query, options);
131
131
}
132
132
133
133
/**
134
134
* Find a document by its MongoDB ObjectId
135
-
*
135
+
*
136
136
* @param id - Document ID (string or ObjectId)
137
137
* @param options - Find options (including session for transactions)
138
138
* @returns Matching document or null if not found
139
139
*/
140
140
async findById(
141
141
id: string | ObjectId,
142
-
options?: FindOptions
142
+
options?: FindOptions,
143
143
): Promise<WithId<Infer<T>> | null> {
144
144
return await core.findById(this.collection, id, options);
145
145
}
146
146
147
147
/**
148
148
* Update multiple documents matching the query
149
-
*
149
+
*
150
150
* @param query - MongoDB query filter
151
151
* @param data - Partial data to update
152
152
* @param options - Update options (including session for transactions)
···
155
155
async update(
156
156
query: Filter<Infer<T>>,
157
157
data: Partial<z.infer<T>>,
158
-
options?: UpdateOptions
158
+
options?: UpdateOptions,
159
159
): Promise<UpdateResult<Infer<T>>> {
160
-
return await core.update(this.collection, this.schema, query, data, options);
160
+
return await core.update(
161
+
this.collection,
162
+
this.schema,
163
+
query,
164
+
data,
165
+
options,
166
+
);
161
167
}
162
168
163
169
/**
164
170
* Update a single document matching the query
165
-
*
171
+
*
166
172
* @param query - MongoDB query filter
167
173
* @param data - Partial data to update
168
174
* @param options - Update options (including session for transactions)
···
171
177
async updateOne(
172
178
query: Filter<Infer<T>>,
173
179
data: Partial<z.infer<T>>,
174
-
options?: UpdateOptions
180
+
options?: UpdateOptions,
175
181
): Promise<UpdateResult<Infer<T>>> {
176
-
return await core.updateOne(this.collection, this.schema, query, data, options);
182
+
return await core.updateOne(
183
+
this.collection,
184
+
this.schema,
185
+
query,
186
+
data,
187
+
options,
188
+
);
177
189
}
178
190
179
191
/**
180
192
* Find a single document and update it
181
-
*
193
+
*
182
194
* @param query - MongoDB query filter
183
195
* @param data - Partial data to update
184
196
* @param options - FindOneAndUpdate options (including upsert and returnDocument)
···
187
199
async findOneAndUpdate(
188
200
query: Filter<Infer<T>>,
189
201
data: Partial<z.infer<T>>,
190
-
options?: FindOneAndUpdateOptions
202
+
options?: FindOneAndUpdateOptions,
191
203
): Promise<ModifyResult<Infer<T>>> {
192
-
return await core.findOneAndUpdate(this.collection, this.schema, query, data, options);
204
+
return await core.findOneAndUpdate(
205
+
this.collection,
206
+
this.schema,
207
+
query,
208
+
data,
209
+
options,
210
+
);
193
211
}
194
212
195
213
/**
196
214
* Replace a single document matching the query
197
-
*
215
+
*
198
216
* @param query - MongoDB query filter
199
217
* @param data - Complete document data for replacement
200
218
* @param options - Replace options (including session for transactions)
···
203
221
async replaceOne(
204
222
query: Filter<Infer<T>>,
205
223
data: Input<T>,
206
-
options?: ReplaceOptions
224
+
options?: ReplaceOptions,
207
225
): Promise<UpdateResult<Infer<T>>> {
208
-
return await core.replaceOne(this.collection, this.schema, query, data, options);
226
+
return await core.replaceOne(
227
+
this.collection,
228
+
this.schema,
229
+
query,
230
+
data,
231
+
options,
232
+
);
209
233
}
210
234
211
235
/**
212
236
* Find a single document and replace it
213
-
*
237
+
*
214
238
* @param query - MongoDB query filter
215
239
* @param data - Complete document data for replacement
216
240
* @param options - FindOneAndReplace options (including upsert and returnDocument)
···
219
243
async findOneAndReplace(
220
244
query: Filter<Infer<T>>,
221
245
data: Input<T>,
222
-
options?: FindOneAndReplaceOptions
246
+
options?: FindOneAndReplaceOptions,
223
247
): Promise<ModifyResult<Infer<T>>> {
224
-
return await core.findOneAndReplace(this.collection, this.schema, query, data, options);
248
+
return await core.findOneAndReplace(
249
+
this.collection,
250
+
this.schema,
251
+
query,
252
+
data,
253
+
options,
254
+
);
225
255
}
226
256
227
257
/**
228
258
* Delete multiple documents matching the query
229
-
*
259
+
*
230
260
* @param query - MongoDB query filter
231
261
* @param options - Delete options (including session for transactions)
232
262
* @returns Delete result
233
263
*/
234
264
async delete(
235
265
query: Filter<Infer<T>>,
236
-
options?: DeleteOptions
266
+
options?: DeleteOptions,
237
267
): Promise<DeleteResult> {
238
268
return await core.deleteMany(this.collection, query, options);
239
269
}
240
270
241
271
/**
242
272
* Delete a single document matching the query
243
-
*
273
+
*
244
274
* @param query - MongoDB query filter
245
275
* @param options - Delete options (including session for transactions)
246
276
* @returns Delete result
247
277
*/
248
278
async deleteOne(
249
279
query: Filter<Infer<T>>,
250
-
options?: DeleteOptions
280
+
options?: DeleteOptions,
251
281
): Promise<DeleteResult> {
252
282
return await core.deleteOne(this.collection, query, options);
253
283
}
254
284
255
285
/**
256
286
* Count documents matching the query
257
-
*
287
+
*
258
288
* @param query - MongoDB query filter
259
289
* @param options - Count options (including session for transactions)
260
290
* @returns Number of matching documents
261
291
*/
262
292
async count(
263
293
query: Filter<Infer<T>>,
264
-
options?: CountDocumentsOptions
294
+
options?: CountDocumentsOptions,
265
295
): Promise<number> {
266
296
return await core.count(this.collection, query, options);
267
297
}
268
298
269
299
/**
270
300
* Execute an aggregation pipeline
271
-
*
301
+
*
272
302
* @param pipeline - MongoDB aggregation pipeline
273
303
* @param options - Aggregate options (including session for transactions)
274
304
* @returns Array of aggregation results
275
305
*/
276
306
async aggregate(
277
307
pipeline: Document[],
278
-
options?: AggregateOptions
308
+
options?: AggregateOptions,
279
309
): Promise<Document[]> {
280
310
return await core.aggregate(this.collection, pipeline, options);
281
311
}
···
286
316
287
317
/**
288
318
* Find documents with pagination support
289
-
*
319
+
*
290
320
* @param query - MongoDB query filter
291
321
* @param options - Pagination options (skip, limit, sort)
292
322
* @returns Array of matching documents
···
304
334
305
335
/**
306
336
* Create a single index on the collection
307
-
*
337
+
*
308
338
* @param keys - Index specification (e.g., { email: 1 } or { name: "text" })
309
339
* @param options - Index creation options (unique, sparse, expireAfterSeconds, etc.)
310
340
* @returns The name of the created index
···
318
348
319
349
/**
320
350
* Create multiple indexes on the collection
321
-
*
351
+
*
322
352
* @param indexes - Array of index descriptions
323
353
* @param options - Index creation options
324
354
* @returns Array of index names created
···
332
362
333
363
/**
334
364
* Drop a single index from the collection
335
-
*
365
+
*
336
366
* @param index - Index name or specification
337
367
* @param options - Drop index options
338
368
*/
···
345
375
346
376
/**
347
377
* Drop all indexes from the collection (except _id index)
348
-
*
378
+
*
349
379
* @param options - Drop index options
350
380
*/
351
381
async dropIndexes(options?: DropIndexesOptions): Promise<void> {
···
354
384
355
385
/**
356
386
* List all indexes on the collection
357
-
*
387
+
*
358
388
* @param options - List indexes options
359
389
* @returns Array of index information
360
390
*/
···
366
396
367
397
/**
368
398
* Get index information by name
369
-
*
399
+
*
370
400
* @param indexName - Name of the index
371
401
* @returns Index description or null if not found
372
402
*/
···
376
406
377
407
/**
378
408
* Check if an index exists
379
-
*
409
+
*
380
410
* @param indexName - Name of the index
381
411
* @returns True if index exists, false otherwise
382
412
*/
···
386
416
387
417
/**
388
418
* Synchronize indexes - create indexes if they don't exist, update if they differ
389
-
*
419
+
*
390
420
* This is useful for ensuring indexes match your schema definition
391
-
*
421
+
*
392
422
* @param indexes - Array of index descriptions to synchronize
393
423
* @param options - Options for index creation
394
424
* @returns Array of index names that were created
+15
-15
model/indexes.ts
+15
-15
model/indexes.ts
···
6
6
IndexSpecification,
7
7
ListIndexesOptions,
8
8
} from "mongodb";
9
-
import type { Schema, Infer } from "../types.ts";
9
+
import type { Infer, Schema } from "../types.ts";
10
10
11
11
/**
12
12
* Index management operations for the Model class
13
-
*
13
+
*
14
14
* This module contains all index-related operations including creation,
15
15
* deletion, listing, and synchronization of indexes.
16
16
*/
17
17
18
18
/**
19
19
* Create a single index on the collection
20
-
*
20
+
*
21
21
* @param collection - MongoDB collection
22
22
* @param keys - Index specification (e.g., { email: 1 } or { name: "text" })
23
23
* @param options - Index creation options (unique, sparse, expireAfterSeconds, etc.)
···
33
33
34
34
/**
35
35
* Create multiple indexes on the collection
36
-
*
36
+
*
37
37
* @param collection - MongoDB collection
38
38
* @param indexes - Array of index descriptions
39
39
* @param options - Index creation options
···
49
49
50
50
/**
51
51
* Drop a single index from the collection
52
-
*
52
+
*
53
53
* @param collection - MongoDB collection
54
54
* @param index - Index name or specification
55
55
* @param options - Drop index options
···
64
64
65
65
/**
66
66
* Drop all indexes from the collection (except _id index)
67
-
*
67
+
*
68
68
* @param collection - MongoDB collection
69
69
* @param options - Drop index options
70
70
*/
71
71
export async function dropIndexes<T extends Schema>(
72
72
collection: Collection<Infer<T>>,
73
-
options?: DropIndexesOptions
73
+
options?: DropIndexesOptions,
74
74
): Promise<void> {
75
75
await collection.dropIndexes(options);
76
76
}
77
77
78
78
/**
79
79
* List all indexes on the collection
80
-
*
80
+
*
81
81
* @param collection - MongoDB collection
82
82
* @param options - List indexes options
83
83
* @returns Array of index information
···
92
92
93
93
/**
94
94
* Get index information by name
95
-
*
95
+
*
96
96
* @param collection - MongoDB collection
97
97
* @param indexName - Name of the index
98
98
* @returns Index description or null if not found
99
99
*/
100
100
export async function getIndex<T extends Schema>(
101
101
collection: Collection<Infer<T>>,
102
-
indexName: string
102
+
indexName: string,
103
103
): Promise<IndexDescription | null> {
104
104
const indexes = await listIndexes(collection);
105
105
return indexes.find((idx) => idx.name === indexName) || null;
···
107
107
108
108
/**
109
109
* Check if an index exists
110
-
*
110
+
*
111
111
* @param collection - MongoDB collection
112
112
* @param indexName - Name of the index
113
113
* @returns True if index exists, false otherwise
114
114
*/
115
115
export async function indexExists<T extends Schema>(
116
116
collection: Collection<Infer<T>>,
117
-
indexName: string
117
+
indexName: string,
118
118
): Promise<boolean> {
119
119
const index = await getIndex(collection, indexName);
120
120
return index !== null;
···
122
122
123
123
/**
124
124
* Synchronize indexes - create indexes if they don't exist, update if they differ
125
-
*
125
+
*
126
126
* This is useful for ensuring indexes match your schema definition
127
-
*
127
+
*
128
128
* @param collection - MongoDB collection
129
129
* @param indexes - Array of index descriptions to synchronize
130
130
* @param options - Options for index creation
···
167
167
168
168
/**
169
169
* Generate index name from key specification
170
-
*
170
+
*
171
171
* @param keys - Index specification
172
172
* @returns Generated index name
173
173
*/
+6
-11
model/pagination.ts
+6
-11
model/pagination.ts
···
1
-
import type {
2
-
Collection,
3
-
Document,
4
-
Filter,
5
-
WithId,
6
-
} from "mongodb";
7
-
import type { Schema, Infer } from "../types.ts";
1
+
import type { Collection, Document, Filter, WithId } from "mongodb";
2
+
import type { Infer, Schema } from "../types.ts";
8
3
9
4
/**
10
5
* Pagination operations for the Model class
11
-
*
6
+
*
12
7
* This module contains pagination-related functionality for finding documents
13
8
* with skip, limit, and sort options.
14
9
*/
15
10
16
11
/**
17
12
* Find documents with pagination support
18
-
*
13
+
*
19
14
* @param collection - MongoDB collection
20
15
* @param query - MongoDB query filter
21
16
* @param options - Pagination options (skip, limit, sort)
22
17
* @returns Array of matching documents
23
-
*
18
+
*
24
19
* @example
25
20
* ```ts
26
-
* const users = await findPaginated(collection,
21
+
* const users = await findPaginated(collection,
27
22
* { age: { $gte: 18 } },
28
23
* { skip: 0, limit: 10, sort: { createdAt: -1 } }
29
24
* );
+86
-51
model/validation.ts
+86
-51
model/validation.ts
···
1
1
import type { z } from "@zod/zod";
2
-
import type { Schema, Infer, Input } from "../types.ts";
3
-
import { ValidationError, AsyncValidationError } from "../errors.ts";
4
-
import type { Document, UpdateFilter, Filter } from "mongodb";
2
+
import type { Infer, Input, Schema } from "../types.ts";
3
+
import { AsyncValidationError, ValidationError } from "../errors.ts";
4
+
import type { Document, Filter, UpdateFilter } from "mongodb";
5
+
6
+
// Cache frequently reused schema transformations to avoid repeated allocations
7
+
const partialSchemaCache = new WeakMap<Schema, z.ZodTypeAny>();
8
+
const defaultsCache = new WeakMap<Schema, Record<string, unknown>>();
9
+
const updateOperators = [
10
+
"$set",
11
+
"$unset",
12
+
"$inc",
13
+
"$mul",
14
+
"$rename",
15
+
"$min",
16
+
"$max",
17
+
"$currentDate",
18
+
"$push",
19
+
"$pull",
20
+
"$addToSet",
21
+
"$pop",
22
+
"$bit",
23
+
"$setOnInsert",
24
+
];
25
+
26
+
function getPartialSchema(schema: Schema): z.ZodTypeAny {
27
+
const cached = partialSchemaCache.get(schema);
28
+
if (cached) return cached;
29
+
const partial = schema.partial();
30
+
partialSchemaCache.set(schema, partial);
31
+
return partial;
32
+
}
5
33
6
34
/**
7
35
* Validate data for insert operations using Zod schema
8
-
*
36
+
*
9
37
* @param schema - Zod schema to validate against
10
38
* @param data - Data to validate
11
39
* @returns Validated and typed data
···
14
42
*/
15
43
export function parse<T extends Schema>(schema: T, data: Input<T>): Infer<T> {
16
44
const result = schema.safeParse(data);
17
-
45
+
18
46
// Check for async validation
19
47
if (result instanceof Promise) {
20
48
throw new AsyncValidationError();
21
49
}
22
-
50
+
23
51
if (!result.success) {
24
52
throw new ValidationError(result.error.issues, "insert");
25
53
}
···
28
56
29
57
/**
30
58
* Validate partial data for update operations using Zod schema
31
-
*
59
+
*
32
60
* Important: This function only validates the fields that are provided in the data object.
33
61
* Unlike parse(), this function does NOT apply defaults for missing fields because
34
62
* in an update context, missing fields should remain unchanged in the database.
35
-
*
63
+
*
36
64
* @param schema - Zod schema to validate against
37
65
* @param data - Partial data to validate
38
66
* @returns Validated and typed partial data (only fields present in input)
···
43
71
schema: T,
44
72
data: Partial<z.infer<T>>,
45
73
): Partial<z.infer<T>> {
74
+
if (!data || Object.keys(data).length === 0) {
75
+
return {};
76
+
}
77
+
46
78
// Get the list of fields actually provided in the input
47
79
const inputKeys = Object.keys(data);
48
-
49
-
const result = schema.partial().safeParse(data);
50
-
80
+
81
+
const result = getPartialSchema(schema).safeParse(data);
82
+
51
83
// Check for async validation
52
84
if (result instanceof Promise) {
53
85
throw new AsyncValidationError();
54
86
}
55
-
87
+
56
88
if (!result.success) {
57
89
throw new ValidationError(result.error.issues, "update");
58
90
}
59
-
91
+
60
92
// Filter the result to only include fields that were in the input
61
93
// This prevents defaults from being applied to fields that weren't provided
62
94
const filtered: Record<string, unknown> = {};
63
95
for (const key of inputKeys) {
64
-
if (key in result.data) {
96
+
if (key in (result.data as Record<string, unknown>)) {
65
97
filtered[key] = (result.data as Record<string, unknown>)[key];
66
98
}
67
99
}
68
-
100
+
69
101
return filtered as Partial<z.infer<T>>;
70
102
}
71
103
72
104
/**
73
105
* Validate data for replace operations using Zod schema
74
-
*
106
+
*
75
107
* @param schema - Zod schema to validate against
76
108
* @param data - Data to validate
77
109
* @returns Validated and typed data
78
110
* @throws {ValidationError} If validation fails
79
111
* @throws {AsyncValidationError} If async validation is detected
80
112
*/
81
-
export function parseReplace<T extends Schema>(schema: T, data: Input<T>): Infer<T> {
113
+
export function parseReplace<T extends Schema>(
114
+
schema: T,
115
+
data: Input<T>,
116
+
): Infer<T> {
82
117
const result = schema.safeParse(data);
83
-
118
+
84
119
// Check for async validation
85
120
if (result instanceof Promise) {
86
121
throw new AsyncValidationError();
87
122
}
88
-
123
+
89
124
if (!result.success) {
90
125
throw new ValidationError(result.error.issues, "replace");
91
126
}
···
95
130
/**
96
131
* Extract default values from a Zod schema
97
132
* This parses an empty object through the schema to get all defaults applied
98
-
*
133
+
*
99
134
* @param schema - Zod schema to extract defaults from
100
135
* @returns Object containing all default values from the schema
101
136
*/
102
-
export function extractDefaults<T extends Schema>(schema: T): Partial<Infer<T>> {
137
+
export function extractDefaults<T extends Schema>(
138
+
schema: T,
139
+
): Partial<Infer<T>> {
140
+
const cached = defaultsCache.get(schema);
141
+
if (cached) {
142
+
return cached as Partial<Infer<T>>;
143
+
}
144
+
103
145
try {
104
146
// Make all fields optional, then parse empty object to trigger defaults
105
147
// This allows us to see which fields get default values
106
-
const partialSchema = schema.partial();
148
+
const partialSchema = getPartialSchema(schema);
107
149
const result = partialSchema.safeParse({});
108
-
150
+
109
151
if (result instanceof Promise) {
110
152
// Cannot extract defaults from async schemas
111
153
return {};
112
154
}
113
-
155
+
114
156
// If successful, the result contains all fields that have defaults
115
157
// Only include fields that were actually added (have values)
116
158
if (!result.success) {
117
159
return {};
118
160
}
119
-
161
+
120
162
// Filter to only include fields that got values from defaults
121
163
// (not undefined, which indicates no default)
122
164
const defaults: Record<string, unknown> = {};
123
165
const data = result.data as Record<string, unknown>;
124
-
166
+
125
167
for (const [key, value] of Object.entries(data)) {
126
168
if (value !== undefined) {
127
169
defaults[key] = value;
128
170
}
129
171
}
130
-
172
+
defaultsCache.set(schema, defaults as Partial<Infer<Schema>>);
131
173
return defaults as Partial<Infer<T>>;
132
174
} catch {
133
175
return {};
···
137
179
/**
138
180
* Get all field paths mentioned in an update filter object
139
181
* This includes fields in $set, $unset, $inc, $push, etc.
140
-
*
182
+
*
141
183
* @param update - MongoDB update filter
142
184
* @returns Set of field paths that are being modified
143
185
*/
144
186
function getModifiedFields(update: UpdateFilter<Document>): Set<string> {
145
187
const fields = new Set<string>();
146
-
147
-
// Operators that modify fields
148
-
const operators = [
149
-
'$set', '$unset', '$inc', '$mul', '$rename', '$min', '$max',
150
-
'$currentDate', '$push', '$pull', '$addToSet', '$pop', '$bit',
151
-
'$setOnInsert',
152
-
];
153
-
154
-
for (const op of operators) {
155
-
if (update[op] && typeof update[op] === 'object') {
188
+
189
+
for (const op of updateOperators) {
190
+
if (update[op] && typeof update[op] === "object") {
156
191
// Add all field names from this operator
157
192
for (const field of Object.keys(update[op] as Document)) {
158
193
fields.add(field);
159
194
}
160
195
}
161
196
}
162
-
197
+
163
198
return fields;
164
199
}
165
200
···
209
244
210
245
/**
211
246
* Apply schema defaults to an update operation using $setOnInsert
212
-
*
247
+
*
213
248
* This is used for upsert operations to ensure defaults are applied when
214
249
* a new document is created, but not when updating an existing document.
215
-
*
250
+
*
216
251
* For each default field:
217
252
* - If the field is NOT mentioned in any update operator ($set, $inc, etc.)
218
253
* - If the field is NOT fixed by an equality clause in the query filter
219
254
* - Add it to $setOnInsert so it's only applied on insert
220
-
*
255
+
*
221
256
* @param schema - Zod schema with defaults
222
257
* @param query - MongoDB query filter
223
258
* @param update - MongoDB update filter
···
226
261
export function applyDefaultsForUpsert<T extends Schema>(
227
262
schema: T,
228
263
query: Filter<Infer<T>>,
229
-
update: UpdateFilter<Infer<T>>
264
+
update: UpdateFilter<Infer<T>>,
230
265
): UpdateFilter<Infer<T>> {
231
266
// Extract defaults from schema
232
267
const defaults = extractDefaults(schema);
233
-
268
+
234
269
// If no defaults, return update unchanged
235
270
if (Object.keys(defaults).length === 0) {
236
271
return update;
237
272
}
238
-
273
+
239
274
// Get fields that are already being modified
240
275
const modifiedFields = getModifiedFields(update as UpdateFilter<Document>);
241
276
const filterEqualityFields = getEqualityFields(query as Filter<Document>);
242
-
277
+
243
278
// Build $setOnInsert with defaults for unmodified fields
244
279
const setOnInsert: Partial<Infer<T>> = {};
245
-
280
+
246
281
for (const [field, value] of Object.entries(defaults)) {
247
282
// Only add default if field is not already being modified or fixed by filter equality
248
283
if (!modifiedFields.has(field) && !filterEqualityFields.has(field)) {
249
284
setOnInsert[field as keyof Infer<T>] = value as Infer<T>[keyof Infer<T>];
250
285
}
251
286
}
252
-
287
+
253
288
// If there are defaults to add, merge them into $setOnInsert
254
289
if (Object.keys(setOnInsert).length > 0) {
255
290
return {
256
291
...update,
257
292
$setOnInsert: {
258
293
...(update.$setOnInsert || {}),
259
-
...setOnInsert
260
-
} as Partial<Infer<T>>
294
+
...setOnInsert,
295
+
} as Partial<Infer<T>>,
261
296
};
262
297
}
263
-
298
+
264
299
return update;
265
300
}
+42
-37
tests/connection_test.ts
+42
-37
tests/connection_test.ts
···
1
1
import { assert, assertEquals, assertExists } from "@std/assert";
2
-
import { connect, disconnect, healthCheck, type ConnectOptions } from "../mod.ts";
2
+
import {
3
+
connect,
4
+
type ConnectOptions,
5
+
disconnect,
6
+
healthCheck,
7
+
} from "../mod.ts";
3
8
import { MongoMemoryServer } from "mongodb-memory-server-core";
4
9
5
10
let mongoServer: MongoMemoryServer | null = null;
···
27
32
async fn() {
28
33
const uri = await setupTestServer();
29
34
const connection = await connect(uri, "test_db");
30
-
35
+
31
36
assert(connection);
32
37
assert(connection.client);
33
38
assert(connection.db);
···
47
52
maxIdleTimeMS: 30000,
48
53
connectTimeoutMS: 5000,
49
54
};
50
-
55
+
51
56
const connection = await connect(uri, "test_db", options);
52
-
57
+
53
58
assert(connection);
54
59
assert(connection.client);
55
60
assert(connection.db);
56
-
61
+
57
62
// Verify connection is working
58
63
const adminDb = connection.db.admin();
59
64
const serverStatus = await adminDb.serverStatus();
···
67
72
name: "Connection: Singleton - should reuse existing connection",
68
73
async fn() {
69
74
const uri = await setupTestServer();
70
-
75
+
71
76
const connection1 = await connect(uri, "test_db");
72
77
const connection2 = await connect(uri, "test_db");
73
-
78
+
74
79
// Should return the same connection instance
75
80
assertEquals(connection1, connection2);
76
81
assertEquals(connection1.client, connection2.client);
···
84
89
name: "Connection: Disconnect - should disconnect and allow reconnection",
85
90
async fn() {
86
91
const uri = await setupTestServer();
87
-
92
+
88
93
const connection1 = await connect(uri, "test_db");
89
94
assert(connection1);
90
-
95
+
91
96
await disconnect();
92
-
97
+
93
98
// Should be able to reconnect
94
99
const connection2 = await connect(uri, "test_db");
95
100
assert(connection2);
96
-
101
+
97
102
// Should be a new connection instance
98
103
assert(connection1 !== connection2);
99
104
},
···
108
113
const options: ConnectOptions = {
109
114
maxPoolSize: 5,
110
115
};
111
-
116
+
112
117
const connection = await connect(uri, "test_db", options);
113
-
118
+
114
119
// Verify connection works with custom pool size
115
120
const collections = await connection.db.listCollections().toArray();
116
121
assert(Array.isArray(collections));
···
120
125
});
121
126
122
127
Deno.test({
123
-
name: "Connection: Multiple Databases - should handle different database names",
128
+
name:
129
+
"Connection: Multiple Databases - should handle different database names",
124
130
async fn() {
125
131
const uri = await setupTestServer();
126
-
132
+
127
133
// Connect to first database
128
134
const connection1 = await connect(uri, "db1");
129
135
assertEquals(connection1.db.databaseName, "db1");
130
-
136
+
131
137
// Disconnect first
132
138
await disconnect();
133
-
139
+
134
140
// Connect to second database
135
141
const connection2 = await connect(uri, "db2");
136
142
assertEquals(connection2.db.databaseName, "db2");
···
143
149
name: "Health Check: should return unhealthy when not connected",
144
150
async fn() {
145
151
const result = await healthCheck();
146
-
152
+
147
153
assertEquals(result.healthy, false);
148
154
assertEquals(result.connected, false);
149
155
assertExists(result.error);
···
160
166
async fn() {
161
167
const uri = await setupTestServer();
162
168
await connect(uri, "test_db");
163
-
169
+
164
170
const result = await healthCheck();
165
-
171
+
166
172
assertEquals(result.healthy, true);
167
173
assertEquals(result.connected, true);
168
174
assertExists(result.responseTimeMs);
···
179
185
async fn() {
180
186
const uri = await setupTestServer();
181
187
await connect(uri, "test_db");
182
-
188
+
183
189
const result = await healthCheck();
184
-
190
+
185
191
assertEquals(result.healthy, true);
186
192
assertExists(result.responseTimeMs);
187
193
// Response time should be reasonable (less than 1 second for in-memory MongoDB)
···
196
202
async fn() {
197
203
const uri = await setupTestServer();
198
204
await connect(uri, "test_db");
199
-
205
+
200
206
// Run health check multiple times
201
207
const results = await Promise.all([
202
208
healthCheck(),
203
209
healthCheck(),
204
210
healthCheck(),
205
211
]);
206
-
212
+
207
213
// All should be healthy
208
214
for (const result of results) {
209
215
assertEquals(result.healthy, true);
···
220
226
async fn() {
221
227
const uri = await setupTestServer();
222
228
await connect(uri, "test_db");
223
-
229
+
224
230
// First check should be healthy
225
231
let result = await healthCheck();
226
232
assertEquals(result.healthy, true);
227
-
233
+
228
234
// Disconnect
229
235
await disconnect();
230
-
236
+
231
237
// Second check should be unhealthy
232
238
result = await healthCheck();
233
239
assertEquals(result.healthy, false);
···
247
253
serverSelectionTimeoutMS: 5000,
248
254
connectTimeoutMS: 5000,
249
255
};
250
-
256
+
251
257
const connection = await connect(uri, "test_db", options);
252
-
258
+
253
259
assert(connection);
254
260
assert(connection.client);
255
261
assert(connection.db);
256
-
262
+
257
263
// Verify connection works with retry options
258
264
const collections = await connection.db.listCollections().toArray();
259
265
assert(Array.isArray(collections));
···
270
276
// Pooling
271
277
maxPoolSize: 10,
272
278
minPoolSize: 2,
273
-
279
+
274
280
// Retry logic
275
281
retryReads: true,
276
282
retryWrites: true,
277
-
283
+
278
284
// Timeouts
279
285
connectTimeoutMS: 10000,
280
286
socketTimeoutMS: 45000,
281
287
serverSelectionTimeoutMS: 10000,
282
-
288
+
283
289
// Resilience
284
290
maxIdleTimeMS: 30000,
285
291
heartbeatFrequencyMS: 10000,
286
292
};
287
-
293
+
288
294
const connection = await connect(uri, "test_db", options);
289
-
295
+
290
296
assert(connection);
291
-
297
+
292
298
// Verify connection is working
293
299
const adminDb = connection.db.admin();
294
300
const serverStatus = await adminDb.serverStatus();
···
297
303
sanitizeResources: false,
298
304
sanitizeOps: false,
299
305
});
300
-
+1
-8
tests/crud_test.ts
+1
-8
tests/crud_test.ts
···
14
14
15
15
Deno.test.beforeAll(async () => {
16
16
await setupTestDb();
17
-
UserModel = createUserModel();
17
+
UserModel = createUserModel("users_crud");
18
18
});
19
19
20
20
Deno.test.beforeEach(async () => {
···
28
28
Deno.test({
29
29
name: "CRUD: Insert - should insert a new user successfully",
30
30
async fn() {
31
-
32
31
const newUser: UserInsert = {
33
32
name: "Test User",
34
33
email: "test@example.com",
···
47
46
Deno.test({
48
47
name: "CRUD: Find - should find the inserted user",
49
48
async fn() {
50
-
51
49
// First insert a user for this test
52
50
const newUser: UserInsert = {
53
51
name: "Find Test User",
···
73
71
Deno.test({
74
72
name: "CRUD: Update - should update user data",
75
73
async fn() {
76
-
77
74
// Insert a user for this test
78
75
const newUser: UserInsert = {
79
76
name: "Update Test User",
···
106
103
Deno.test({
107
104
name: "CRUD: Delete - should delete user successfully",
108
105
async fn() {
109
-
110
106
// Insert a user for this test
111
107
const newUser: UserInsert = {
112
108
name: "Delete Test User",
···
137
133
Deno.test({
138
134
name: "CRUD: Find Multiple - should find multiple users",
139
135
async fn() {
140
-
141
136
// Insert multiple users
142
137
const users: UserInsert[] = [
143
138
{ name: "User 1", email: "user1@example.com", age: 20 },
···
157
152
sanitizeResources: false,
158
153
sanitizeOps: false,
159
154
});
160
-
161
-
+33
-31
tests/defaults_test.ts
+33
-31
tests/defaults_test.ts
···
1
1
import { assertEquals, assertExists } from "@std/assert";
2
2
import { z } from "@zod/zod";
3
-
import { connect, disconnect, Model } from "../mod.ts";
3
+
import { Model } from "../mod.ts";
4
4
import { applyDefaultsForUpsert } from "../model/validation.ts";
5
-
import { MongoMemoryServer } from "mongodb-memory-server-core";
5
+
import { setupTestDb, teardownTestDb } from "./utils.ts";
6
6
7
7
/**
8
8
* Test suite for default value handling in different operation types
9
-
*
9
+
*
10
10
* This tests the three main cases:
11
11
* 1. Plain inserts - defaults applied directly
12
12
* 2. Updates without upsert - defaults NOT applied
···
26
26
});
27
27
28
28
let ProductModel: Model<typeof productSchema>;
29
-
let mongoServer: MongoMemoryServer;
30
29
31
30
Deno.test.beforeAll(async () => {
32
-
mongoServer = await MongoMemoryServer.create();
33
-
const uri = mongoServer.getUri();
34
-
await connect(uri, "test_defaults_db");
31
+
await setupTestDb();
35
32
ProductModel = new Model("test_products_defaults", productSchema);
36
33
});
37
34
···
41
38
42
39
Deno.test.afterAll(async () => {
43
40
await ProductModel.delete({});
44
-
await disconnect();
45
-
await mongoServer.stop();
41
+
await teardownTestDb();
46
42
});
47
43
48
44
Deno.test({
···
60
56
// Verify defaults were applied
61
57
const product = await ProductModel.findById(result.insertedId);
62
58
assertExists(product);
63
-
59
+
64
60
assertEquals(product.name, "Widget");
65
61
assertEquals(product.price, 29.99);
66
62
assertEquals(product.category, "general"); // default
···
84
80
createdAt: new Date("2023-01-01"),
85
81
tags: ["test"],
86
82
});
87
-
83
+
88
84
assertExists(insertResult.insertedId);
89
85
90
86
// Now update it - defaults should NOT be applied
91
87
await ProductModel.updateOne(
92
88
{ _id: insertResult.insertedId },
93
-
{ price: 24.99 }
89
+
{ price: 24.99 },
94
90
// No upsert flag
95
91
);
96
92
97
93
const updated = await ProductModel.findById(insertResult.insertedId);
98
94
assertExists(updated);
99
-
95
+
100
96
assertEquals(updated.price, 24.99); // updated
101
97
assertEquals(updated.category, "electronics"); // unchanged
102
98
assertEquals(updated.inStock, false); // unchanged
···
107
103
});
108
104
109
105
Deno.test({
110
-
name: "Defaults: Case 3 - Upsert that creates applies defaults via $setOnInsert",
106
+
name:
107
+
"Defaults: Case 3 - Upsert that creates applies defaults via $setOnInsert",
111
108
async fn() {
112
109
// Upsert with a query that won't match - will create new document
113
110
const result = await ProductModel.updateOne(
114
111
{ name: "NonExistent" },
115
112
{ price: 39.99 },
116
-
{ upsert: true }
113
+
{ upsert: true },
117
114
);
118
115
119
116
assertEquals(result.upsertedCount, 1);
···
122
119
// Verify the created document has defaults applied
123
120
const product = await ProductModel.findOne({ name: "NonExistent" });
124
121
assertExists(product);
125
-
122
+
126
123
assertEquals(product.price, 39.99); // from $set
127
124
assertEquals(product.name, "NonExistent"); // from query
128
125
assertEquals(product.category, "general"); // default via $setOnInsert
···
153
150
const result = await ProductModel.updateOne(
154
151
{ name: "ExistingProduct" },
155
152
{ price: 44.99 },
156
-
{ upsert: true }
153
+
{ upsert: true },
157
154
);
158
155
159
156
assertEquals(result.matchedCount, 1);
···
163
160
// Verify defaults were NOT applied (existing values preserved)
164
161
const product = await ProductModel.findOne({ name: "ExistingProduct" });
165
162
assertExists(product);
166
-
163
+
167
164
assertEquals(product.price, 44.99); // updated via $set
168
165
assertEquals(product.category, "premium"); // preserved (not overwritten with default)
169
166
assertEquals(product.inStock, false); // preserved
···
195
192
name: "Replaced",
196
193
price: 15.0,
197
194
// category, inStock, createdAt, tags not provided - defaults should apply
198
-
}
195
+
},
199
196
);
200
197
201
198
const product = await ProductModel.findById(insertResult.insertedId);
202
199
assertExists(product);
203
-
200
+
204
201
assertEquals(product.name, "Replaced");
205
202
assertEquals(product.price, 15.0);
206
203
assertEquals(product.category, "general"); // default applied
···
223
220
price: 99.99,
224
221
// Missing optional fields - defaults should apply
225
222
},
226
-
{ upsert: true }
223
+
{ upsert: true },
227
224
);
228
225
229
226
assertEquals(result.upsertedCount, 1);
···
231
228
232
229
const product = await ProductModel.findOne({ name: "NewViaReplace" });
233
230
assertExists(product);
234
-
231
+
235
232
assertEquals(product.name, "NewViaReplace");
236
233
assertEquals(product.price, 99.99);
237
234
assertEquals(product.category, "general"); // default
···
254
251
category: "custom", // Explicitly setting a field that has a default
255
252
// inStock not set - should get default
256
253
},
257
-
{ upsert: true }
254
+
{ upsert: true },
258
255
);
259
256
260
257
assertEquals(result.upsertedCount, 1);
261
258
262
259
const product = await ProductModel.findOne({ name: "CustomDefaults" });
263
260
assertExists(product);
264
-
261
+
265
262
assertEquals(product.name, "CustomDefaults"); // from query
266
263
assertEquals(product.price, 25.0); // from $set
267
264
assertEquals(product.category, "custom"); // from $set (NOT default)
···
292
289
assertExists(product.createdAt);
293
290
assertEquals(product.inStock, true);
294
291
assertEquals(product.tags, []);
295
-
292
+
296
293
if (product.name === "Bulk2") {
297
294
assertEquals(product.category, "special");
298
295
} else {
···
305
302
});
306
303
307
304
Deno.test({
308
-
name: "Defaults: applyDefaultsForUpsert preserves existing $setOnInsert values",
305
+
name:
306
+
"Defaults: applyDefaultsForUpsert preserves existing $setOnInsert values",
309
307
fn() {
310
308
const schema = z.object({
311
309
name: z.string(),
···
328
326
});
329
327
330
328
Deno.test({
331
-
name: "Defaults: applyDefaultsForUpsert keeps query equality fields untouched",
329
+
name:
330
+
"Defaults: applyDefaultsForUpsert keeps query equality fields untouched",
332
331
fn() {
333
332
const schema = z.object({
334
333
status: z.string().default("pending"),
···
349
348
});
350
349
351
350
Deno.test({
352
-
name: "Defaults: findOneAndUpdate with upsert preserves query equality fields",
351
+
name:
352
+
"Defaults: findOneAndUpdate with upsert preserves query equality fields",
353
353
async fn() {
354
354
await ProductModel.findOneAndUpdate(
355
355
{ name: "FindOneUpsert", category: "special" },
356
356
{ price: 12.5 },
357
-
{ upsert: true }
357
+
{ upsert: true },
358
358
);
359
359
360
360
const product = await ProductModel.findOne({ name: "FindOneUpsert" });
···
379
379
name: "FindOneReplaceUpsert",
380
380
price: 77.0,
381
381
},
382
-
{ upsert: true }
382
+
{ upsert: true },
383
383
);
384
384
385
385
assertExists(result.lastErrorObject?.upserted);
386
386
387
-
const product = await ProductModel.findOne({ name: "FindOneReplaceUpsert" });
387
+
const product = await ProductModel.findOne({
388
+
name: "FindOneReplaceUpsert",
389
+
});
388
390
assertExists(product);
389
391
390
392
assertEquals(product.name, "FindOneReplaceUpsert");
+53
-38
tests/errors_test.ts
+53
-38
tests/errors_test.ts
···
1
1
import { assert, assertEquals, assertExists, assertRejects } from "@std/assert";
2
2
import {
3
3
connect,
4
+
ConnectionError,
4
5
disconnect,
5
6
Model,
6
7
ValidationError,
7
-
ConnectionError,
8
8
} from "../mod.ts";
9
9
import { z } from "@zod/zod";
10
10
import { MongoMemoryServer } from "mongodb-memory-server-core";
···
41
41
async fn() {
42
42
const uri = await setupTestServer();
43
43
await connect(uri, "test_db");
44
-
44
+
45
45
const UserModel = new Model("users", userSchema);
46
-
46
+
47
47
await assertRejects(
48
48
async () => {
49
49
await UserModel.insertOne({ name: "", email: "invalid" });
50
50
},
51
51
ValidationError,
52
-
"Validation failed on insert"
52
+
"Validation failed on insert",
53
53
);
54
54
},
55
55
sanitizeResources: false,
···
61
61
async fn() {
62
62
const uri = await setupTestServer();
63
63
await connect(uri, "test_db");
64
-
64
+
65
65
const UserModel = new Model("users", userSchema);
66
-
66
+
67
67
try {
68
68
await UserModel.insertOne({ name: "", email: "invalid" });
69
69
throw new Error("Should have thrown ValidationError");
···
72
72
assertEquals(error.operation, "insert");
73
73
assertExists(error.issues);
74
74
assert(error.issues.length > 0);
75
-
75
+
76
76
// Check field errors
77
77
const fieldErrors = error.getFieldErrors();
78
78
assertExists(fieldErrors.name);
···
88
88
async fn() {
89
89
const uri = await setupTestServer();
90
90
await connect(uri, "test_db");
91
-
91
+
92
92
const UserModel = new Model("users", userSchema);
93
-
93
+
94
94
await assertRejects(
95
95
async () => {
96
96
await UserModel.updateOne({ name: "test" }, { email: "invalid-email" });
97
97
},
98
98
ValidationError,
99
-
"Validation failed on update"
99
+
"Validation failed on update",
100
100
);
101
101
},
102
102
sanitizeResources: false,
···
108
108
async fn() {
109
109
const uri = await setupTestServer();
110
110
await connect(uri, "test_db");
111
-
111
+
112
112
const UserModel = new Model("users", userSchema);
113
-
113
+
114
114
// First insert a valid document
115
115
await UserModel.insertOne({ name: "Test", email: "test@example.com" });
116
-
116
+
117
117
await assertRejects(
118
118
async () => {
119
-
await UserModel.replaceOne({ name: "Test" }, { name: "", email: "invalid" });
119
+
await UserModel.replaceOne({ name: "Test" }, {
120
+
name: "",
121
+
email: "invalid",
122
+
});
120
123
},
121
124
ValidationError,
122
-
"Validation failed on replace"
125
+
"Validation failed on replace",
123
126
);
124
127
},
125
128
sanitizeResources: false,
···
131
134
async fn() {
132
135
const uri = await setupTestServer();
133
136
await connect(uri, "test_db");
134
-
137
+
135
138
const UserModel = new Model("users", userSchema);
136
-
139
+
137
140
try {
138
141
await UserModel.updateOne({ name: "test" }, { age: -5 });
139
142
throw new Error("Should have thrown ValidationError");
140
143
} catch (error) {
141
144
assert(error instanceof ValidationError);
142
145
assertEquals(error.operation, "update");
143
-
146
+
144
147
const fieldErrors = error.getFieldErrors();
145
148
assertExists(fieldErrors.age);
146
149
}
···
154
157
async fn() {
155
158
await assertRejects(
156
159
async () => {
157
-
await connect("mongodb://invalid-host-that-does-not-exist:27017", "test_db", {
158
-
serverSelectionTimeoutMS: 1000, // 1 second timeout
159
-
connectTimeoutMS: 1000,
160
-
});
160
+
await connect(
161
+
"mongodb://invalid-host-that-does-not-exist:27017",
162
+
"test_db",
163
+
{
164
+
serverSelectionTimeoutMS: 1000, // 1 second timeout
165
+
connectTimeoutMS: 1000,
166
+
},
167
+
);
161
168
},
162
169
ConnectionError,
163
-
"Failed to connect to MongoDB"
170
+
"Failed to connect to MongoDB",
164
171
);
165
172
},
166
173
sanitizeResources: false,
···
171
178
name: "Errors: ConnectionError - should include URI in error",
172
179
async fn() {
173
180
try {
174
-
await connect("mongodb://invalid-host-that-does-not-exist:27017", "test_db", {
175
-
serverSelectionTimeoutMS: 1000, // 1 second timeout
176
-
connectTimeoutMS: 1000,
177
-
});
181
+
await connect(
182
+
"mongodb://invalid-host-that-does-not-exist:27017",
183
+
"test_db",
184
+
{
185
+
serverSelectionTimeoutMS: 1000, // 1 second timeout
186
+
connectTimeoutMS: 1000,
187
+
},
188
+
);
178
189
throw new Error("Should have thrown ConnectionError");
179
190
} catch (error) {
180
191
assert(error instanceof ConnectionError);
181
-
assertEquals(error.uri, "mongodb://invalid-host-that-does-not-exist:27017");
192
+
assertEquals(
193
+
error.uri,
194
+
"mongodb://invalid-host-that-does-not-exist:27017",
195
+
);
182
196
}
183
197
},
184
198
sanitizeResources: false,
···
186
200
});
187
201
188
202
Deno.test({
189
-
name: "Errors: ConnectionError - should throw when getDb called without connection",
203
+
name:
204
+
"Errors: ConnectionError - should throw when getDb called without connection",
190
205
async fn() {
191
206
// Make sure not connected
192
207
await disconnect();
193
-
208
+
194
209
const { getDb } = await import("../client/connection.ts");
195
-
210
+
196
211
try {
197
212
getDb();
198
213
throw new Error("Should have thrown ConnectionError");
···
210
225
async fn() {
211
226
const uri = await setupTestServer();
212
227
await connect(uri, "test_db");
213
-
228
+
214
229
const UserModel = new Model("users", userSchema);
215
-
230
+
216
231
try {
217
232
await UserModel.insertOne({
218
233
name: "",
···
222
237
throw new Error("Should have thrown ValidationError");
223
238
} catch (error) {
224
239
assert(error instanceof ValidationError);
225
-
240
+
226
241
const fieldErrors = error.getFieldErrors();
227
-
242
+
228
243
// Each field should have its own errors
229
244
assert(Array.isArray(fieldErrors.name));
230
245
assert(Array.isArray(fieldErrors.email));
231
246
assert(Array.isArray(fieldErrors.age));
232
-
247
+
233
248
// Verify error messages are present
234
249
assert(fieldErrors.name.length > 0);
235
250
assert(fieldErrors.email.length > 0);
···
245
260
async fn() {
246
261
const uri = await setupTestServer();
247
262
await connect(uri, "test_db");
248
-
263
+
249
264
const UserModel = new Model("users", userSchema);
250
-
265
+
251
266
try {
252
267
await UserModel.insertOne({ name: "", email: "invalid" });
253
268
} catch (error) {
+1
-4
tests/features_test.ts
+1
-4
tests/features_test.ts
···
14
14
15
15
Deno.test.beforeAll(async () => {
16
16
await setupTestDb();
17
-
UserModel = createUserModel();
17
+
UserModel = createUserModel("users_features");
18
18
});
19
19
20
20
Deno.test.beforeEach(async () => {
···
28
28
Deno.test({
29
29
name: "Features: Default Values - should handle default createdAt",
30
30
async fn() {
31
-
32
31
const newUser: UserInsert = {
33
32
name: "Default Test User",
34
33
email: "default@example.com",
···
49
48
sanitizeResources: false,
50
49
sanitizeOps: false,
51
50
});
52
-
53
-
+1
-2
tests/index_test.ts
+1
-2
tests/index_test.ts
···
12
12
13
13
Deno.test.beforeAll(async () => {
14
14
await setupTestDb();
15
-
UserModel = createUserModel();
15
+
UserModel = createUserModel("users_index");
16
16
});
17
17
18
18
Deno.test.beforeEach(async () => {
···
162
162
sanitizeResources: false,
163
163
sanitizeOps: false,
164
164
});
165
-
+71
-63
tests/transactions_test.ts
+71
-63
tests/transactions_test.ts
···
2
2
import {
3
3
connect,
4
4
disconnect,
5
+
endSession,
5
6
Model,
6
-
withTransaction,
7
7
startSession,
8
-
endSession,
8
+
withTransaction,
9
9
} from "../mod.ts";
10
10
import { z } from "@zod/zod";
11
11
import { MongoMemoryReplSet } from "mongodb-memory-server-core";
···
15
15
async function setupTestReplSet() {
16
16
if (!replSet) {
17
17
replSet = await MongoMemoryReplSet.create({
18
-
replSet: {
19
-
count: 3,
20
-
storageEngine: 'wiredTiger' // Required for transactions
18
+
replSet: {
19
+
count: 1,
20
+
storageEngine: "wiredTiger", // Required for transactions
21
21
},
22
22
});
23
23
}
···
63
63
async fn() {
64
64
const uri = await setupTestReplSet();
65
65
await connect(uri, "test_db");
66
-
66
+
67
67
const UserModel = new Model("users", userSchema);
68
68
const OrderModel = new Model("orders", orderSchema);
69
-
69
+
70
70
const result = await withTransaction(async (session) => {
71
71
const user = await UserModel.insertOne(
72
72
{ name: "Alice", email: "alice@example.com", balance: 100 },
73
-
{ session }
73
+
{ session },
74
74
);
75
-
75
+
76
76
const order = await OrderModel.insertOne(
77
77
{ userId: user.insertedId.toString(), amount: 50 },
78
-
{ session }
78
+
{ session },
79
79
);
80
-
80
+
81
81
return { userId: user.insertedId, orderId: order.insertedId };
82
82
});
83
-
83
+
84
84
assertExists(result.userId);
85
85
assertExists(result.orderId);
86
-
86
+
87
87
// Verify data was committed
88
88
const users = await UserModel.find({});
89
89
const orders = await OrderModel.find({});
···
99
99
async fn() {
100
100
const uri = await setupTestReplSet();
101
101
await connect(uri, "test_db");
102
-
102
+
103
103
const UserModel = new Model("users", userSchema);
104
-
104
+
105
105
await assertRejects(
106
106
async () => {
107
107
await withTransaction(async (session) => {
108
108
await UserModel.insertOne(
109
109
{ name: "Bob", email: "bob@example.com" },
110
-
{ session }
110
+
{ session },
111
111
);
112
-
112
+
113
113
// This will fail and abort the transaction
114
114
throw new Error("Simulated error");
115
115
});
116
116
},
117
117
Error,
118
-
"Simulated error"
118
+
"Simulated error",
119
119
);
120
-
120
+
121
121
// Verify no data was committed
122
122
const users = await UserModel.find({});
123
123
assertEquals(users.length, 0);
···
131
131
async fn() {
132
132
const uri = await setupTestReplSet();
133
133
await connect(uri, "test_db");
134
-
134
+
135
135
const UserModel = new Model("users", userSchema);
136
-
136
+
137
137
const result = await withTransaction(async (session) => {
138
138
const users = [];
139
-
139
+
140
140
for (let i = 0; i < 5; i++) {
141
141
const user = await UserModel.insertOne(
142
142
{ name: `User${i}`, email: `user${i}@example.com` },
143
-
{ session }
143
+
{ session },
144
144
);
145
145
users.push(user.insertedId);
146
146
}
147
-
147
+
148
148
return users;
149
149
});
150
-
150
+
151
151
assertEquals(result.length, 5);
152
-
152
+
153
153
// Verify all users were created
154
154
const users = await UserModel.find({});
155
155
assertEquals(users.length, 5);
···
159
159
});
160
160
161
161
Deno.test({
162
-
name: "Transactions: withTransaction - should support read and write operations",
162
+
name:
163
+
"Transactions: withTransaction - should support read and write operations",
163
164
async fn() {
164
165
const uri = await setupTestReplSet();
165
166
await connect(uri, "test_db");
166
-
167
+
167
168
const UserModel = new Model("users", userSchema);
168
-
169
+
169
170
// Insert initial user
170
171
const initialUser = await UserModel.insertOne({
171
172
name: "Charlie",
172
173
email: "charlie@example.com",
173
174
balance: 100,
174
175
});
175
-
176
+
176
177
const result = await withTransaction(async (session) => {
177
178
// Read
178
-
const user = await UserModel.findById(initialUser.insertedId, { session });
179
+
const user = await UserModel.findById(initialUser.insertedId, {
180
+
session,
181
+
});
179
182
assertExists(user);
180
-
183
+
181
184
// Update
182
185
await UserModel.updateOne(
183
186
{ _id: initialUser.insertedId },
184
187
{ balance: 150 },
185
-
{ session }
188
+
{ session },
186
189
);
187
-
190
+
188
191
// Read again
189
-
const updatedUser = await UserModel.findById(initialUser.insertedId, { session });
190
-
192
+
const updatedUser = await UserModel.findById(initialUser.insertedId, {
193
+
session,
194
+
});
195
+
191
196
return updatedUser?.balance;
192
197
});
193
-
198
+
194
199
assertEquals(result, 150);
195
200
},
196
201
sanitizeResources: false,
···
202
207
async fn() {
203
208
const uri = await setupTestReplSet();
204
209
await connect(uri, "test_db");
205
-
210
+
206
211
const UserModel = new Model("users", userSchema);
207
-
212
+
208
213
await assertRejects(
209
214
async () => {
210
215
await withTransaction(async (session) => {
211
216
// Valid insert
212
217
await UserModel.insertOne(
213
218
{ name: "Valid", email: "valid@example.com" },
214
-
{ session }
219
+
{ session },
215
220
);
216
-
221
+
217
222
// Invalid insert (will throw ValidationError)
218
223
await UserModel.insertOne(
219
224
{ name: "", email: "invalid" },
220
-
{ session }
225
+
{ session },
221
226
);
222
227
});
223
228
},
224
-
Error // ValidationError
229
+
Error, // ValidationError
225
230
);
226
-
231
+
227
232
// Transaction should have been aborted, no data should exist
228
233
const users = await UserModel.find({});
229
234
assertEquals(users.length, 0);
···
233
238
});
234
239
235
240
Deno.test({
236
-
name: "Transactions: Manual session - should work with manual session management",
241
+
name:
242
+
"Transactions: Manual session - should work with manual session management",
237
243
async fn() {
238
244
const uri = await setupTestReplSet();
239
245
await connect(uri, "test_db");
240
-
246
+
241
247
const UserModel = new Model("users", userSchema);
242
-
248
+
243
249
const session = startSession();
244
-
250
+
245
251
try {
246
252
await session.withTransaction(async () => {
247
253
await UserModel.insertOne(
248
254
{ name: "Dave", email: "dave@example.com" },
249
-
{ session }
255
+
{ session },
250
256
);
251
257
await UserModel.insertOne(
252
258
{ name: "Eve", email: "eve@example.com" },
253
-
{ session }
259
+
{ session },
254
260
);
255
261
});
256
262
} finally {
257
263
await endSession(session);
258
264
}
259
-
265
+
260
266
// Verify both users were created
261
267
const users = await UserModel.find({});
262
268
assertEquals(users.length, 2);
···
270
276
async fn() {
271
277
const uri = await setupTestReplSet();
272
278
await connect(uri, "test_db");
273
-
279
+
274
280
const UserModel = new Model("users", userSchema);
275
-
281
+
276
282
// Insert initial users
277
283
await UserModel.insertMany([
278
284
{ name: "User1", email: "user1@example.com" },
279
285
{ name: "User2", email: "user2@example.com" },
280
286
{ name: "User3", email: "user3@example.com" },
281
287
]);
282
-
288
+
283
289
await withTransaction(async (session) => {
284
290
// Delete one user
285
291
await UserModel.deleteOne({ name: "User1" }, { session });
286
-
292
+
287
293
// Delete multiple users
288
-
await UserModel.delete({ name: { $in: ["User2", "User3"] } }, { session });
294
+
await UserModel.delete({ name: { $in: ["User2", "User3"] } }, {
295
+
session,
296
+
});
289
297
});
290
-
298
+
291
299
// Verify all were deleted
292
300
const users = await UserModel.find({});
293
301
assertEquals(users.length, 0);
···
301
309
async fn() {
302
310
const uri = await setupTestReplSet();
303
311
await connect(uri, "test_db");
304
-
312
+
305
313
const UserModel = new Model("users", userSchema);
306
-
314
+
307
315
const result = await withTransaction(
308
316
async (session) => {
309
317
await UserModel.insertOne(
310
318
{ name: "Frank", email: "frank@example.com" },
311
-
{ session }
319
+
{ session },
312
320
);
313
321
return "success";
314
322
},
···
316
324
readPreference: "primary",
317
325
readConcern: { level: "snapshot" },
318
326
writeConcern: { w: "majority" },
319
-
}
327
+
},
320
328
);
321
-
329
+
322
330
assertEquals(result, "success");
323
-
331
+
324
332
const users = await UserModel.find({});
325
333
assertEquals(users.length, 1);
326
334
},
+35
-10
tests/utils.ts
+35
-10
tests/utils.ts
···
13
13
14
14
let mongoServer: MongoMemoryServer | null = null;
15
15
let isSetup = false;
16
+
let setupRefCount = 0;
17
+
let activeDbName: string | null = null;
16
18
17
-
export async function setupTestDb() {
18
-
if (!isSetup) {
19
-
// Start MongoDB Memory Server
19
+
export async function setupTestDb(dbName = "test_db") {
20
+
setupRefCount++;
21
+
22
+
// If we're already connected, just share the same database
23
+
if (isSetup) {
24
+
if (activeDbName !== dbName) {
25
+
throw new Error(
26
+
`Test DB already initialized for ${activeDbName}, requested ${dbName}`,
27
+
);
28
+
}
29
+
return;
30
+
}
31
+
32
+
try {
20
33
mongoServer = await MongoMemoryServer.create();
21
34
const uri = mongoServer.getUri();
22
-
23
-
// Connect to the in-memory database
24
-
await connect(uri, "test_db");
35
+
36
+
await connect(uri, dbName);
37
+
activeDbName = dbName;
25
38
isSetup = true;
39
+
} catch (error) {
40
+
// Roll back refcount if setup failed so future attempts can retry
41
+
setupRefCount = Math.max(0, setupRefCount - 1);
42
+
throw error;
26
43
}
27
44
}
28
45
29
46
export async function teardownTestDb() {
30
-
if (isSetup) {
47
+
if (setupRefCount === 0) {
48
+
return;
49
+
}
50
+
51
+
setupRefCount = Math.max(0, setupRefCount - 1);
52
+
53
+
if (isSetup && setupRefCount === 0) {
31
54
await disconnect();
32
55
if (mongoServer) {
33
56
await mongoServer.stop();
34
57
mongoServer = null;
35
58
}
59
+
activeDbName = null;
36
60
isSetup = false;
37
61
}
38
62
}
39
63
40
-
export function createUserModel(): Model<typeof userSchema> {
41
-
return new Model("users", userSchema);
64
+
export function createUserModel(
65
+
collectionName = "users",
66
+
): Model<typeof userSchema> {
67
+
return new Model(collectionName, userSchema);
42
68
}
43
69
44
70
export async function cleanupCollection(model: Model<typeof userSchema>) {
45
71
await model.delete({});
46
72
}
47
-
+1
-8
tests/validation_test.ts
+1
-8
tests/validation_test.ts
···
14
14
15
15
Deno.test.beforeAll(async () => {
16
16
await setupTestDb();
17
-
UserModel = createUserModel();
17
+
UserModel = createUserModel("users_validation");
18
18
});
19
19
20
20
Deno.test.beforeEach(async () => {
···
28
28
Deno.test({
29
29
name: "Validation: Schema - should validate user data on insert",
30
30
async fn() {
31
-
32
31
const invalidUser = {
33
32
name: "Invalid User",
34
33
email: "not-an-email", // Invalid email
···
50
49
Deno.test({
51
50
name: "Validation: Update - should reject invalid email in update",
52
51
async fn() {
53
-
54
52
// Insert a user for this test
55
53
const newUser: UserInsert = {
56
54
name: "Validation Test User",
···
79
77
Deno.test({
80
78
name: "Validation: Update - should reject negative age in update",
81
79
async fn() {
82
-
83
80
// Insert a user for this test
84
81
const newUser: UserInsert = {
85
82
name: "Age Validation Test User",
···
108
105
Deno.test({
109
106
name: "Validation: Update - should reject invalid name type in update",
110
107
async fn() {
111
-
112
108
// Insert a user for this test
113
109
const newUser: UserInsert = {
114
110
name: "Type Validation Test User",
···
137
133
Deno.test({
138
134
name: "Validation: Update - should accept valid partial updates",
139
135
async fn() {
140
-
141
136
// Insert a user for this test
142
137
const newUser: UserInsert = {
143
138
name: "Valid Update Test User",
···
168
163
sanitizeResources: false,
169
164
sanitizeOps: false,
170
165
});
171
-
172
-
+5
-6
types.ts
+5
-6
types.ts
···
1
1
import type { z } from "@zod/zod";
2
-
import type { Document, ObjectId, IndexDescription } from "mongodb";
2
+
import type { Document, IndexDescription, ObjectId } from "mongodb";
3
3
4
4
/**
5
5
* Type alias for Zod schema objects
···
11
11
*/
12
12
export type Infer<T extends Schema> = z.infer<T> & Document;
13
13
14
-
15
14
/**
16
15
* Infer the model type from a Zod schema, including MongoDB Document and ObjectId
17
16
*/
18
17
export type InferModel<T extends Schema> = Infer<T> & {
19
-
_id?: ObjectId;
20
-
};
18
+
_id?: ObjectId;
19
+
};
21
20
22
21
/**
23
22
* Infer the input type for a Zod schema (handles defaults)
···
31
30
32
31
/**
33
32
* Complete definition of a model, including schema and indexes
34
-
*
33
+
*
35
34
* @example
36
35
* ```ts
37
36
* const userDef: ModelDef<typeof userSchema> = {
···
46
45
export type ModelDef<T extends Schema> = {
47
46
schema: T;
48
47
indexes?: Indexes;
49
-
};
48
+
};