Diffdown is a real-time collaborative Markdown editor/previewer built on the AT Protocol
diffdown.com
1import { Parser, NodeProp, NodeSet, NodeType, DefaultBufferLength, Tree, IterMode } from '@lezer/common';
2
3/**
4A parse stack. These are used internally by the parser to track
5parsing progress. They also provide some properties and methods
6that external code such as a tokenizer can use to get information
7about the parse state.
8*/
9class Stack {
10 /**
11 @internal
12 */
13 constructor(
14 /**
15 The parse that this stack is part of @internal
16 */
17 p,
18 /**
19 Holds state, input pos, buffer index triplets for all but the
20 top state @internal
21 */
22 stack,
23 /**
24 The current parse state @internal
25 */
26 state,
27 // The position at which the next reduce should take place. This
28 // can be less than `this.pos` when skipped expressions have been
29 // added to the stack (which should be moved outside of the next
30 // reduction)
31 /**
32 @internal
33 */
34 reducePos,
35 /**
36 The input position up to which this stack has parsed.
37 */
38 pos,
39 /**
40 The dynamic score of the stack, including dynamic precedence
41 and error-recovery penalties
42 @internal
43 */
44 score,
45 // The output buffer. Holds (type, start, end, size) quads
46 // representing nodes created by the parser, where `size` is
47 // amount of buffer array entries covered by this node.
48 /**
49 @internal
50 */
51 buffer,
52 // The base offset of the buffer. When stacks are split, the split
53 // instance shared the buffer history with its parent up to
54 // `bufferBase`, which is the absolute offset (including the
55 // offset of previous splits) into the buffer at which this stack
56 // starts writing.
57 /**
58 @internal
59 */
60 bufferBase,
61 /**
62 @internal
63 */
64 curContext,
65 /**
66 @internal
67 */
68 lookAhead = 0,
69 // A parent stack from which this was split off, if any. This is
70 // set up so that it always points to a stack that has some
71 // additional buffer content, never to a stack with an equal
72 // `bufferBase`.
73 /**
74 @internal
75 */
76 parent) {
77 this.p = p;
78 this.stack = stack;
79 this.state = state;
80 this.reducePos = reducePos;
81 this.pos = pos;
82 this.score = score;
83 this.buffer = buffer;
84 this.bufferBase = bufferBase;
85 this.curContext = curContext;
86 this.lookAhead = lookAhead;
87 this.parent = parent;
88 }
89 /**
90 @internal
91 */
92 toString() {
93 return `[${this.stack.filter((_, i) => i % 3 == 0).concat(this.state)}]@${this.pos}${this.score ? "!" + this.score : ""}`;
94 }
95 // Start an empty stack
96 /**
97 @internal
98 */
99 static start(p, state, pos = 0) {
100 let cx = p.parser.context;
101 return new Stack(p, [], state, pos, pos, 0, [], 0, cx ? new StackContext(cx, cx.start) : null, 0, null);
102 }
103 /**
104 The stack's current [context](#lr.ContextTracker) value, if
105 any. Its type will depend on the context tracker's type
106 parameter, or it will be `null` if there is no context
107 tracker.
108 */
109 get context() { return this.curContext ? this.curContext.context : null; }
110 // Push a state onto the stack, tracking its start position as well
111 // as the buffer base at that point.
112 /**
113 @internal
114 */
115 pushState(state, start) {
116 this.stack.push(this.state, start, this.bufferBase + this.buffer.length);
117 this.state = state;
118 }
119 // Apply a reduce action
120 /**
121 @internal
122 */
123 reduce(action) {
124 var _a;
125 let depth = action >> 19 /* Action.ReduceDepthShift */, type = action & 65535 /* Action.ValueMask */;
126 let { parser } = this.p;
127 let lookaheadRecord = this.reducePos < this.pos - 25 /* Lookahead.Margin */ && this.setLookAhead(this.pos);
128 let dPrec = parser.dynamicPrecedence(type);
129 if (dPrec)
130 this.score += dPrec;
131 if (depth == 0) {
132 this.pushState(parser.getGoto(this.state, type, true), this.reducePos);
133 // Zero-depth reductions are a special case—they add stuff to
134 // the stack without popping anything off.
135 if (type < parser.minRepeatTerm)
136 this.storeNode(type, this.reducePos, this.reducePos, lookaheadRecord ? 8 : 4, true);
137 this.reduceContext(type, this.reducePos);
138 return;
139 }
140 // Find the base index into `this.stack`, content after which will
141 // be dropped. Note that with `StayFlag` reductions we need to
142 // consume two extra frames (the dummy parent node for the skipped
143 // expression and the state that we'll be staying in, which should
144 // be moved to `this.state`).
145 let base = this.stack.length - ((depth - 1) * 3) - (action & 262144 /* Action.StayFlag */ ? 6 : 0);
146 let start = base ? this.stack[base - 2] : this.p.ranges[0].from, size = this.reducePos - start;
147 // This is a kludge to try and detect overly deep left-associative
148 // trees, which will not increase the parse stack depth and thus
149 // won't be caught by the regular stack-depth limit check.
150 if (size >= 2000 /* Recover.MinBigReduction */ && !((_a = this.p.parser.nodeSet.types[type]) === null || _a === void 0 ? void 0 : _a.isAnonymous)) {
151 if (start == this.p.lastBigReductionStart) {
152 this.p.bigReductionCount++;
153 this.p.lastBigReductionSize = size;
154 }
155 else if (this.p.lastBigReductionSize < size) {
156 this.p.bigReductionCount = 1;
157 this.p.lastBigReductionStart = start;
158 this.p.lastBigReductionSize = size;
159 }
160 }
161 let bufferBase = base ? this.stack[base - 1] : 0, count = this.bufferBase + this.buffer.length - bufferBase;
162 // Store normal terms or `R -> R R` repeat reductions
163 if (type < parser.minRepeatTerm || (action & 131072 /* Action.RepeatFlag */)) {
164 let pos = parser.stateFlag(this.state, 1 /* StateFlag.Skipped */) ? this.pos : this.reducePos;
165 this.storeNode(type, start, pos, count + 4, true);
166 }
167 if (action & 262144 /* Action.StayFlag */) {
168 this.state = this.stack[base];
169 }
170 else {
171 let baseStateID = this.stack[base - 3];
172 this.state = parser.getGoto(baseStateID, type, true);
173 }
174 while (this.stack.length > base)
175 this.stack.pop();
176 this.reduceContext(type, start);
177 }
178 // Shift a value into the buffer
179 /**
180 @internal
181 */
182 storeNode(term, start, end, size = 4, mustSink = false) {
183 if (term == 0 /* Term.Err */ &&
184 (!this.stack.length || this.stack[this.stack.length - 1] < this.buffer.length + this.bufferBase)) {
185 // Try to omit/merge adjacent error nodes
186 let cur = this, top = this.buffer.length;
187 if (top == 0 && cur.parent) {
188 top = cur.bufferBase - cur.parent.bufferBase;
189 cur = cur.parent;
190 }
191 if (top > 0 && cur.buffer[top - 4] == 0 /* Term.Err */ && cur.buffer[top - 1] > -1) {
192 if (start == end)
193 return;
194 if (cur.buffer[top - 2] >= start) {
195 cur.buffer[top - 2] = end;
196 return;
197 }
198 }
199 }
200 if (!mustSink || this.pos == end) { // Simple case, just append
201 this.buffer.push(term, start, end, size);
202 }
203 else { // There may be skipped nodes that have to be moved forward
204 let index = this.buffer.length;
205 if (index > 0 && (this.buffer[index - 4] != 0 /* Term.Err */ || this.buffer[index - 1] < 0)) {
206 let mustMove = false;
207 for (let scan = index; scan > 0 && this.buffer[scan - 2] > end; scan -= 4) {
208 if (this.buffer[scan - 1] >= 0) {
209 mustMove = true;
210 break;
211 }
212 }
213 if (mustMove)
214 while (index > 0 && this.buffer[index - 2] > end) {
215 // Move this record forward
216 this.buffer[index] = this.buffer[index - 4];
217 this.buffer[index + 1] = this.buffer[index - 3];
218 this.buffer[index + 2] = this.buffer[index - 2];
219 this.buffer[index + 3] = this.buffer[index - 1];
220 index -= 4;
221 if (size > 4)
222 size -= 4;
223 }
224 }
225 this.buffer[index] = term;
226 this.buffer[index + 1] = start;
227 this.buffer[index + 2] = end;
228 this.buffer[index + 3] = size;
229 }
230 }
231 // Apply a shift action
232 /**
233 @internal
234 */
235 shift(action, type, start, end) {
236 if (action & 131072 /* Action.GotoFlag */) {
237 this.pushState(action & 65535 /* Action.ValueMask */, this.pos);
238 }
239 else if ((action & 262144 /* Action.StayFlag */) == 0) { // Regular shift
240 let nextState = action, { parser } = this.p;
241 this.pos = end;
242 let skipped = parser.stateFlag(nextState, 1 /* StateFlag.Skipped */);
243 // Skipped or zero-length non-tree tokens don't move reducePos
244 if (!skipped && (end > start || type <= parser.maxNode))
245 this.reducePos = end;
246 this.pushState(nextState, skipped ? start : Math.min(start, this.reducePos));
247 this.shiftContext(type, start);
248 if (type <= parser.maxNode)
249 this.buffer.push(type, start, end, 4);
250 }
251 else { // Shift-and-stay, which means this is a skipped token
252 this.pos = end;
253 this.shiftContext(type, start);
254 if (type <= this.p.parser.maxNode)
255 this.buffer.push(type, start, end, 4);
256 }
257 }
258 // Apply an action
259 /**
260 @internal
261 */
262 apply(action, next, nextStart, nextEnd) {
263 if (action & 65536 /* Action.ReduceFlag */)
264 this.reduce(action);
265 else
266 this.shift(action, next, nextStart, nextEnd);
267 }
268 // Add a prebuilt (reused) node into the buffer.
269 /**
270 @internal
271 */
272 useNode(value, next) {
273 let index = this.p.reused.length - 1;
274 if (index < 0 || this.p.reused[index] != value) {
275 this.p.reused.push(value);
276 index++;
277 }
278 let start = this.pos;
279 this.reducePos = this.pos = start + value.length;
280 this.pushState(next, start);
281 this.buffer.push(index, start, this.reducePos, -1 /* size == -1 means this is a reused value */);
282 if (this.curContext)
283 this.updateContext(this.curContext.tracker.reuse(this.curContext.context, value, this, this.p.stream.reset(this.pos - value.length)));
284 }
285 // Split the stack. Due to the buffer sharing and the fact
286 // that `this.stack` tends to stay quite shallow, this isn't very
287 // expensive.
288 /**
289 @internal
290 */
291 split() {
292 let parent = this;
293 let off = parent.buffer.length;
294 // Because the top of the buffer (after this.pos) may be mutated
295 // to reorder reductions and skipped tokens, and shared buffers
296 // should be immutable, this copies any outstanding skipped tokens
297 // to the new buffer, and puts the base pointer before them.
298 while (off > 0 && parent.buffer[off - 2] > parent.reducePos)
299 off -= 4;
300 let buffer = parent.buffer.slice(off), base = parent.bufferBase + off;
301 // Make sure parent points to an actual parent with content, if there is such a parent.
302 while (parent && base == parent.bufferBase)
303 parent = parent.parent;
304 return new Stack(this.p, this.stack.slice(), this.state, this.reducePos, this.pos, this.score, buffer, base, this.curContext, this.lookAhead, parent);
305 }
306 // Try to recover from an error by 'deleting' (ignoring) one token.
307 /**
308 @internal
309 */
310 recoverByDelete(next, nextEnd) {
311 let isNode = next <= this.p.parser.maxNode;
312 if (isNode)
313 this.storeNode(next, this.pos, nextEnd, 4);
314 this.storeNode(0 /* Term.Err */, this.pos, nextEnd, isNode ? 8 : 4);
315 this.pos = this.reducePos = nextEnd;
316 this.score -= 190 /* Recover.Delete */;
317 }
318 /**
319 Check if the given term would be able to be shifted (optionally
320 after some reductions) on this stack. This can be useful for
321 external tokenizers that want to make sure they only provide a
322 given token when it applies.
323 */
324 canShift(term) {
325 for (let sim = new SimulatedStack(this);;) {
326 let action = this.p.parser.stateSlot(sim.state, 4 /* ParseState.DefaultReduce */) || this.p.parser.hasAction(sim.state, term);
327 if (action == 0)
328 return false;
329 if ((action & 65536 /* Action.ReduceFlag */) == 0)
330 return true;
331 sim.reduce(action);
332 }
333 }
334 // Apply up to Recover.MaxNext recovery actions that conceptually
335 // inserts some missing token or rule.
336 /**
337 @internal
338 */
339 recoverByInsert(next) {
340 if (this.stack.length >= 300 /* Recover.MaxInsertStackDepth */)
341 return [];
342 let nextStates = this.p.parser.nextStates(this.state);
343 if (nextStates.length > 4 /* Recover.MaxNext */ << 1 || this.stack.length >= 120 /* Recover.DampenInsertStackDepth */) {
344 let best = [];
345 for (let i = 0, s; i < nextStates.length; i += 2) {
346 if ((s = nextStates[i + 1]) != this.state && this.p.parser.hasAction(s, next))
347 best.push(nextStates[i], s);
348 }
349 if (this.stack.length < 120 /* Recover.DampenInsertStackDepth */)
350 for (let i = 0; best.length < 4 /* Recover.MaxNext */ << 1 && i < nextStates.length; i += 2) {
351 let s = nextStates[i + 1];
352 if (!best.some((v, i) => (i & 1) && v == s))
353 best.push(nextStates[i], s);
354 }
355 nextStates = best;
356 }
357 let result = [];
358 for (let i = 0; i < nextStates.length && result.length < 4 /* Recover.MaxNext */; i += 2) {
359 let s = nextStates[i + 1];
360 if (s == this.state)
361 continue;
362 let stack = this.split();
363 stack.pushState(s, this.pos);
364 stack.storeNode(0 /* Term.Err */, stack.pos, stack.pos, 4, true);
365 stack.shiftContext(nextStates[i], this.pos);
366 stack.reducePos = this.pos;
367 stack.score -= 200 /* Recover.Insert */;
368 result.push(stack);
369 }
370 return result;
371 }
372 // Force a reduce, if possible. Return false if that can't
373 // be done.
374 /**
375 @internal
376 */
377 forceReduce() {
378 let { parser } = this.p;
379 let reduce = parser.stateSlot(this.state, 5 /* ParseState.ForcedReduce */);
380 if ((reduce & 65536 /* Action.ReduceFlag */) == 0)
381 return false;
382 if (!parser.validAction(this.state, reduce)) {
383 let depth = reduce >> 19 /* Action.ReduceDepthShift */, term = reduce & 65535 /* Action.ValueMask */;
384 let target = this.stack.length - depth * 3;
385 if (target < 0 || parser.getGoto(this.stack[target], term, false) < 0) {
386 let backup = this.findForcedReduction();
387 if (backup == null)
388 return false;
389 reduce = backup;
390 }
391 this.storeNode(0 /* Term.Err */, this.pos, this.pos, 4, true);
392 this.score -= 100 /* Recover.Reduce */;
393 }
394 this.reducePos = this.pos;
395 this.reduce(reduce);
396 return true;
397 }
398 /**
399 Try to scan through the automaton to find some kind of reduction
400 that can be applied. Used when the regular ForcedReduce field
401 isn't a valid action. @internal
402 */
403 findForcedReduction() {
404 let { parser } = this.p, seen = [];
405 let explore = (state, depth) => {
406 if (seen.includes(state))
407 return;
408 seen.push(state);
409 return parser.allActions(state, (action) => {
410 if (action & (262144 /* Action.StayFlag */ | 131072 /* Action.GotoFlag */)) ;
411 else if (action & 65536 /* Action.ReduceFlag */) {
412 let rDepth = (action >> 19 /* Action.ReduceDepthShift */) - depth;
413 if (rDepth > 1) {
414 let term = action & 65535 /* Action.ValueMask */, target = this.stack.length - rDepth * 3;
415 if (target >= 0 && parser.getGoto(this.stack[target], term, false) >= 0)
416 return (rDepth << 19 /* Action.ReduceDepthShift */) | 65536 /* Action.ReduceFlag */ | term;
417 }
418 }
419 else {
420 let found = explore(action, depth + 1);
421 if (found != null)
422 return found;
423 }
424 });
425 };
426 return explore(this.state, 0);
427 }
428 /**
429 @internal
430 */
431 forceAll() {
432 while (!this.p.parser.stateFlag(this.state, 2 /* StateFlag.Accepting */)) {
433 if (!this.forceReduce()) {
434 this.storeNode(0 /* Term.Err */, this.pos, this.pos, 4, true);
435 break;
436 }
437 }
438 return this;
439 }
440 /**
441 Check whether this state has no further actions (assumed to be a direct descendant of the
442 top state, since any other states must be able to continue
443 somehow). @internal
444 */
445 get deadEnd() {
446 if (this.stack.length != 3)
447 return false;
448 let { parser } = this.p;
449 return parser.data[parser.stateSlot(this.state, 1 /* ParseState.Actions */)] == 65535 /* Seq.End */ &&
450 !parser.stateSlot(this.state, 4 /* ParseState.DefaultReduce */);
451 }
452 /**
453 Restart the stack (put it back in its start state). Only safe
454 when this.stack.length == 3 (state is directly below the top
455 state). @internal
456 */
457 restart() {
458 this.storeNode(0 /* Term.Err */, this.pos, this.pos, 4, true);
459 this.state = this.stack[0];
460 this.stack.length = 0;
461 }
462 /**
463 @internal
464 */
465 sameState(other) {
466 if (this.state != other.state || this.stack.length != other.stack.length)
467 return false;
468 for (let i = 0; i < this.stack.length; i += 3)
469 if (this.stack[i] != other.stack[i])
470 return false;
471 return true;
472 }
473 /**
474 Get the parser used by this stack.
475 */
476 get parser() { return this.p.parser; }
477 /**
478 Test whether a given dialect (by numeric ID, as exported from
479 the terms file) is enabled.
480 */
481 dialectEnabled(dialectID) { return this.p.parser.dialect.flags[dialectID]; }
482 shiftContext(term, start) {
483 if (this.curContext)
484 this.updateContext(this.curContext.tracker.shift(this.curContext.context, term, this, this.p.stream.reset(start)));
485 }
486 reduceContext(term, start) {
487 if (this.curContext)
488 this.updateContext(this.curContext.tracker.reduce(this.curContext.context, term, this, this.p.stream.reset(start)));
489 }
490 /**
491 @internal
492 */
493 emitContext() {
494 let last = this.buffer.length - 1;
495 if (last < 0 || this.buffer[last] != -3)
496 this.buffer.push(this.curContext.hash, this.pos, this.pos, -3);
497 }
498 /**
499 @internal
500 */
501 emitLookAhead() {
502 let last = this.buffer.length - 1;
503 if (last < 0 || this.buffer[last] != -4)
504 this.buffer.push(this.lookAhead, this.pos, this.pos, -4);
505 }
506 updateContext(context) {
507 if (context != this.curContext.context) {
508 let newCx = new StackContext(this.curContext.tracker, context);
509 if (newCx.hash != this.curContext.hash)
510 this.emitContext();
511 this.curContext = newCx;
512 }
513 }
514 /**
515 @internal
516 */
517 setLookAhead(lookAhead) {
518 if (lookAhead <= this.lookAhead)
519 return false;
520 this.emitLookAhead();
521 this.lookAhead = lookAhead;
522 return true;
523 }
524 /**
525 @internal
526 */
527 close() {
528 if (this.curContext && this.curContext.tracker.strict)
529 this.emitContext();
530 if (this.lookAhead > 0)
531 this.emitLookAhead();
532 }
533}
534class StackContext {
535 constructor(tracker, context) {
536 this.tracker = tracker;
537 this.context = context;
538 this.hash = tracker.strict ? tracker.hash(context) : 0;
539 }
540}
541// Used to cheaply run some reductions to scan ahead without mutating
542// an entire stack
543class SimulatedStack {
544 constructor(start) {
545 this.start = start;
546 this.state = start.state;
547 this.stack = start.stack;
548 this.base = this.stack.length;
549 }
550 reduce(action) {
551 let term = action & 65535 /* Action.ValueMask */, depth = action >> 19 /* Action.ReduceDepthShift */;
552 if (depth == 0) {
553 if (this.stack == this.start.stack)
554 this.stack = this.stack.slice();
555 this.stack.push(this.state, 0, 0);
556 this.base += 3;
557 }
558 else {
559 this.base -= (depth - 1) * 3;
560 }
561 let goto = this.start.p.parser.getGoto(this.stack[this.base - 3], term, true);
562 this.state = goto;
563 }
564}
565// This is given to `Tree.build` to build a buffer, and encapsulates
566// the parent-stack-walking necessary to read the nodes.
567class StackBufferCursor {
568 constructor(stack, pos, index) {
569 this.stack = stack;
570 this.pos = pos;
571 this.index = index;
572 this.buffer = stack.buffer;
573 if (this.index == 0)
574 this.maybeNext();
575 }
576 static create(stack, pos = stack.bufferBase + stack.buffer.length) {
577 return new StackBufferCursor(stack, pos, pos - stack.bufferBase);
578 }
579 maybeNext() {
580 let next = this.stack.parent;
581 if (next != null) {
582 this.index = this.stack.bufferBase - next.bufferBase;
583 this.stack = next;
584 this.buffer = next.buffer;
585 }
586 }
587 get id() { return this.buffer[this.index - 4]; }
588 get start() { return this.buffer[this.index - 3]; }
589 get end() { return this.buffer[this.index - 2]; }
590 get size() { return this.buffer[this.index - 1]; }
591 next() {
592 this.index -= 4;
593 this.pos -= 4;
594 if (this.index == 0)
595 this.maybeNext();
596 }
597 fork() {
598 return new StackBufferCursor(this.stack, this.pos, this.index);
599 }
600}
601
602// See lezer-generator/src/encode.ts for comments about the encoding
603// used here
604function decodeArray(input, Type = Uint16Array) {
605 if (typeof input != "string")
606 return input;
607 let array = null;
608 for (let pos = 0, out = 0; pos < input.length;) {
609 let value = 0;
610 for (;;) {
611 let next = input.charCodeAt(pos++), stop = false;
612 if (next == 126 /* Encode.BigValCode */) {
613 value = 65535 /* Encode.BigVal */;
614 break;
615 }
616 if (next >= 92 /* Encode.Gap2 */)
617 next--;
618 if (next >= 34 /* Encode.Gap1 */)
619 next--;
620 let digit = next - 32 /* Encode.Start */;
621 if (digit >= 46 /* Encode.Base */) {
622 digit -= 46 /* Encode.Base */;
623 stop = true;
624 }
625 value += digit;
626 if (stop)
627 break;
628 value *= 46 /* Encode.Base */;
629 }
630 if (array)
631 array[out++] = value;
632 else
633 array = new Type(value);
634 }
635 return array;
636}
637
638class CachedToken {
639 constructor() {
640 this.start = -1;
641 this.value = -1;
642 this.end = -1;
643 this.extended = -1;
644 this.lookAhead = 0;
645 this.mask = 0;
646 this.context = 0;
647 }
648}
649const nullToken = new CachedToken;
650/**
651[Tokenizers](#lr.ExternalTokenizer) interact with the input
652through this interface. It presents the input as a stream of
653characters, tracking lookahead and hiding the complexity of
654[ranges](#common.Parser.parse^ranges) from tokenizer code.
655*/
656class InputStream {
657 /**
658 @internal
659 */
660 constructor(
661 /**
662 @internal
663 */
664 input,
665 /**
666 @internal
667 */
668 ranges) {
669 this.input = input;
670 this.ranges = ranges;
671 /**
672 @internal
673 */
674 this.chunk = "";
675 /**
676 @internal
677 */
678 this.chunkOff = 0;
679 /**
680 Backup chunk
681 */
682 this.chunk2 = "";
683 this.chunk2Pos = 0;
684 /**
685 The character code of the next code unit in the input, or -1
686 when the stream is at the end of the input.
687 */
688 this.next = -1;
689 /**
690 @internal
691 */
692 this.token = nullToken;
693 this.rangeIndex = 0;
694 this.pos = this.chunkPos = ranges[0].from;
695 this.range = ranges[0];
696 this.end = ranges[ranges.length - 1].to;
697 this.readNext();
698 }
699 /**
700 @internal
701 */
702 resolveOffset(offset, assoc) {
703 let range = this.range, index = this.rangeIndex;
704 let pos = this.pos + offset;
705 while (pos < range.from) {
706 if (!index)
707 return null;
708 let next = this.ranges[--index];
709 pos -= range.from - next.to;
710 range = next;
711 }
712 while (assoc < 0 ? pos > range.to : pos >= range.to) {
713 if (index == this.ranges.length - 1)
714 return null;
715 let next = this.ranges[++index];
716 pos += next.from - range.to;
717 range = next;
718 }
719 return pos;
720 }
721 /**
722 @internal
723 */
724 clipPos(pos) {
725 if (pos >= this.range.from && pos < this.range.to)
726 return pos;
727 for (let range of this.ranges)
728 if (range.to > pos)
729 return Math.max(pos, range.from);
730 return this.end;
731 }
732 /**
733 Look at a code unit near the stream position. `.peek(0)` equals
734 `.next`, `.peek(-1)` gives you the previous character, and so
735 on.
736
737 Note that looking around during tokenizing creates dependencies
738 on potentially far-away content, which may reduce the
739 effectiveness incremental parsing—when looking forward—or even
740 cause invalid reparses when looking backward more than 25 code
741 units, since the library does not track lookbehind.
742 */
743 peek(offset) {
744 let idx = this.chunkOff + offset, pos, result;
745 if (idx >= 0 && idx < this.chunk.length) {
746 pos = this.pos + offset;
747 result = this.chunk.charCodeAt(idx);
748 }
749 else {
750 let resolved = this.resolveOffset(offset, 1);
751 if (resolved == null)
752 return -1;
753 pos = resolved;
754 if (pos >= this.chunk2Pos && pos < this.chunk2Pos + this.chunk2.length) {
755 result = this.chunk2.charCodeAt(pos - this.chunk2Pos);
756 }
757 else {
758 let i = this.rangeIndex, range = this.range;
759 while (range.to <= pos)
760 range = this.ranges[++i];
761 this.chunk2 = this.input.chunk(this.chunk2Pos = pos);
762 if (pos + this.chunk2.length > range.to)
763 this.chunk2 = this.chunk2.slice(0, range.to - pos);
764 result = this.chunk2.charCodeAt(0);
765 }
766 }
767 if (pos >= this.token.lookAhead)
768 this.token.lookAhead = pos + 1;
769 return result;
770 }
771 /**
772 Accept a token. By default, the end of the token is set to the
773 current stream position, but you can pass an offset (relative to
774 the stream position) to change that.
775 */
776 acceptToken(token, endOffset = 0) {
777 let end = endOffset ? this.resolveOffset(endOffset, -1) : this.pos;
778 if (end == null || end < this.token.start)
779 throw new RangeError("Token end out of bounds");
780 this.token.value = token;
781 this.token.end = end;
782 }
783 /**
784 Accept a token ending at a specific given position.
785 */
786 acceptTokenTo(token, endPos) {
787 this.token.value = token;
788 this.token.end = endPos;
789 }
790 getChunk() {
791 if (this.pos >= this.chunk2Pos && this.pos < this.chunk2Pos + this.chunk2.length) {
792 let { chunk, chunkPos } = this;
793 this.chunk = this.chunk2;
794 this.chunkPos = this.chunk2Pos;
795 this.chunk2 = chunk;
796 this.chunk2Pos = chunkPos;
797 this.chunkOff = this.pos - this.chunkPos;
798 }
799 else {
800 this.chunk2 = this.chunk;
801 this.chunk2Pos = this.chunkPos;
802 let nextChunk = this.input.chunk(this.pos);
803 let end = this.pos + nextChunk.length;
804 this.chunk = end > this.range.to ? nextChunk.slice(0, this.range.to - this.pos) : nextChunk;
805 this.chunkPos = this.pos;
806 this.chunkOff = 0;
807 }
808 }
809 readNext() {
810 if (this.chunkOff >= this.chunk.length) {
811 this.getChunk();
812 if (this.chunkOff == this.chunk.length)
813 return this.next = -1;
814 }
815 return this.next = this.chunk.charCodeAt(this.chunkOff);
816 }
817 /**
818 Move the stream forward N (defaults to 1) code units. Returns
819 the new value of [`next`](#lr.InputStream.next).
820 */
821 advance(n = 1) {
822 this.chunkOff += n;
823 while (this.pos + n >= this.range.to) {
824 if (this.rangeIndex == this.ranges.length - 1)
825 return this.setDone();
826 n -= this.range.to - this.pos;
827 this.range = this.ranges[++this.rangeIndex];
828 this.pos = this.range.from;
829 }
830 this.pos += n;
831 if (this.pos >= this.token.lookAhead)
832 this.token.lookAhead = this.pos + 1;
833 return this.readNext();
834 }
835 setDone() {
836 this.pos = this.chunkPos = this.end;
837 this.range = this.ranges[this.rangeIndex = this.ranges.length - 1];
838 this.chunk = "";
839 return this.next = -1;
840 }
841 /**
842 @internal
843 */
844 reset(pos, token) {
845 if (token) {
846 this.token = token;
847 token.start = pos;
848 token.lookAhead = pos + 1;
849 token.value = token.extended = -1;
850 }
851 else {
852 this.token = nullToken;
853 }
854 if (this.pos != pos) {
855 this.pos = pos;
856 if (pos == this.end) {
857 this.setDone();
858 return this;
859 }
860 while (pos < this.range.from)
861 this.range = this.ranges[--this.rangeIndex];
862 while (pos >= this.range.to)
863 this.range = this.ranges[++this.rangeIndex];
864 if (pos >= this.chunkPos && pos < this.chunkPos + this.chunk.length) {
865 this.chunkOff = pos - this.chunkPos;
866 }
867 else {
868 this.chunk = "";
869 this.chunkOff = 0;
870 }
871 this.readNext();
872 }
873 return this;
874 }
875 /**
876 @internal
877 */
878 read(from, to) {
879 if (from >= this.chunkPos && to <= this.chunkPos + this.chunk.length)
880 return this.chunk.slice(from - this.chunkPos, to - this.chunkPos);
881 if (from >= this.chunk2Pos && to <= this.chunk2Pos + this.chunk2.length)
882 return this.chunk2.slice(from - this.chunk2Pos, to - this.chunk2Pos);
883 if (from >= this.range.from && to <= this.range.to)
884 return this.input.read(from, to);
885 let result = "";
886 for (let r of this.ranges) {
887 if (r.from >= to)
888 break;
889 if (r.to > from)
890 result += this.input.read(Math.max(r.from, from), Math.min(r.to, to));
891 }
892 return result;
893 }
894}
895/**
896@internal
897*/
898class TokenGroup {
899 constructor(data, id) {
900 this.data = data;
901 this.id = id;
902 }
903 token(input, stack) {
904 let { parser } = stack.p;
905 readToken(this.data, input, stack, this.id, parser.data, parser.tokenPrecTable);
906 }
907}
908TokenGroup.prototype.contextual = TokenGroup.prototype.fallback = TokenGroup.prototype.extend = false;
909/**
910@hide
911*/
912class LocalTokenGroup {
913 constructor(data, precTable, elseToken) {
914 this.precTable = precTable;
915 this.elseToken = elseToken;
916 this.data = typeof data == "string" ? decodeArray(data) : data;
917 }
918 token(input, stack) {
919 let start = input.pos, skipped = 0;
920 for (;;) {
921 let atEof = input.next < 0, nextPos = input.resolveOffset(1, 1);
922 readToken(this.data, input, stack, 0, this.data, this.precTable);
923 if (input.token.value > -1)
924 break;
925 if (this.elseToken == null)
926 return;
927 if (!atEof)
928 skipped++;
929 if (nextPos == null)
930 break;
931 input.reset(nextPos, input.token);
932 }
933 if (skipped) {
934 input.reset(start, input.token);
935 input.acceptToken(this.elseToken, skipped);
936 }
937 }
938}
939LocalTokenGroup.prototype.contextual = TokenGroup.prototype.fallback = TokenGroup.prototype.extend = false;
940/**
941`@external tokens` declarations in the grammar should resolve to
942an instance of this class.
943*/
944class ExternalTokenizer {
945 /**
946 Create a tokenizer. The first argument is the function that,
947 given an input stream, scans for the types of tokens it
948 recognizes at the stream's position, and calls
949 [`acceptToken`](#lr.InputStream.acceptToken) when it finds
950 one.
951 */
952 constructor(
953 /**
954 @internal
955 */
956 token, options = {}) {
957 this.token = token;
958 this.contextual = !!options.contextual;
959 this.fallback = !!options.fallback;
960 this.extend = !!options.extend;
961 }
962}
963// Tokenizer data is stored a big uint16 array containing, for each
964// state:
965//
966// - A group bitmask, indicating what token groups are reachable from
967// this state, so that paths that can only lead to tokens not in
968// any of the current groups can be cut off early.
969//
970// - The position of the end of the state's sequence of accepting
971// tokens
972//
973// - The number of outgoing edges for the state
974//
975// - The accepting tokens, as (token id, group mask) pairs
976//
977// - The outgoing edges, as (start character, end character, state
978// index) triples, with end character being exclusive
979//
980// This function interprets that data, running through a stream as
981// long as new states with the a matching group mask can be reached,
982// and updating `input.token` when it matches a token.
983function readToken(data, input, stack, group, precTable, precOffset) {
984 let state = 0, groupMask = 1 << group, { dialect } = stack.p.parser;
985 scan: for (;;) {
986 if ((groupMask & data[state]) == 0)
987 break;
988 let accEnd = data[state + 1];
989 // Check whether this state can lead to a token in the current group
990 // Accept tokens in this state, possibly overwriting
991 // lower-precedence / shorter tokens
992 for (let i = state + 3; i < accEnd; i += 2)
993 if ((data[i + 1] & groupMask) > 0) {
994 let term = data[i];
995 if (dialect.allows(term) &&
996 (input.token.value == -1 || input.token.value == term ||
997 overrides(term, input.token.value, precTable, precOffset))) {
998 input.acceptToken(term);
999 break;
1000 }
1001 }
1002 let next = input.next, low = 0, high = data[state + 2];
1003 // Special case for EOF
1004 if (input.next < 0 && high > low && data[accEnd + high * 3 - 3] == 65535 /* Seq.End */) {
1005 state = data[accEnd + high * 3 - 1];
1006 continue scan;
1007 }
1008 // Do a binary search on the state's edges
1009 for (; low < high;) {
1010 let mid = (low + high) >> 1;
1011 let index = accEnd + mid + (mid << 1);
1012 let from = data[index], to = data[index + 1] || 0x10000;
1013 if (next < from)
1014 high = mid;
1015 else if (next >= to)
1016 low = mid + 1;
1017 else {
1018 state = data[index + 2];
1019 input.advance();
1020 continue scan;
1021 }
1022 }
1023 break;
1024 }
1025}
1026function findOffset(data, start, term) {
1027 for (let i = start, next; (next = data[i]) != 65535 /* Seq.End */; i++)
1028 if (next == term)
1029 return i - start;
1030 return -1;
1031}
1032function overrides(token, prev, tableData, tableOffset) {
1033 let iPrev = findOffset(tableData, tableOffset, prev);
1034 return iPrev < 0 || findOffset(tableData, tableOffset, token) < iPrev;
1035}
1036
1037// Environment variable used to control console output
1038const verbose = typeof process != "undefined" && process.env && /\bparse\b/.test(process.env.LOG);
1039let stackIDs = null;
1040function cutAt(tree, pos, side) {
1041 let cursor = tree.cursor(IterMode.IncludeAnonymous);
1042 cursor.moveTo(pos);
1043 for (;;) {
1044 if (!(side < 0 ? cursor.childBefore(pos) : cursor.childAfter(pos)))
1045 for (;;) {
1046 if ((side < 0 ? cursor.to < pos : cursor.from > pos) && !cursor.type.isError)
1047 return side < 0 ? Math.max(0, Math.min(cursor.to - 1, pos - 25 /* Lookahead.Margin */))
1048 : Math.min(tree.length, Math.max(cursor.from + 1, pos + 25 /* Lookahead.Margin */));
1049 if (side < 0 ? cursor.prevSibling() : cursor.nextSibling())
1050 break;
1051 if (!cursor.parent())
1052 return side < 0 ? 0 : tree.length;
1053 }
1054 }
1055}
1056class FragmentCursor {
1057 constructor(fragments, nodeSet) {
1058 this.fragments = fragments;
1059 this.nodeSet = nodeSet;
1060 this.i = 0;
1061 this.fragment = null;
1062 this.safeFrom = -1;
1063 this.safeTo = -1;
1064 this.trees = [];
1065 this.start = [];
1066 this.index = [];
1067 this.nextFragment();
1068 }
1069 nextFragment() {
1070 let fr = this.fragment = this.i == this.fragments.length ? null : this.fragments[this.i++];
1071 if (fr) {
1072 this.safeFrom = fr.openStart ? cutAt(fr.tree, fr.from + fr.offset, 1) - fr.offset : fr.from;
1073 this.safeTo = fr.openEnd ? cutAt(fr.tree, fr.to + fr.offset, -1) - fr.offset : fr.to;
1074 while (this.trees.length) {
1075 this.trees.pop();
1076 this.start.pop();
1077 this.index.pop();
1078 }
1079 this.trees.push(fr.tree);
1080 this.start.push(-fr.offset);
1081 this.index.push(0);
1082 this.nextStart = this.safeFrom;
1083 }
1084 else {
1085 this.nextStart = 1e9;
1086 }
1087 }
1088 // `pos` must be >= any previously given `pos` for this cursor
1089 nodeAt(pos) {
1090 if (pos < this.nextStart)
1091 return null;
1092 while (this.fragment && this.safeTo <= pos)
1093 this.nextFragment();
1094 if (!this.fragment)
1095 return null;
1096 for (;;) {
1097 let last = this.trees.length - 1;
1098 if (last < 0) { // End of tree
1099 this.nextFragment();
1100 return null;
1101 }
1102 let top = this.trees[last], index = this.index[last];
1103 if (index == top.children.length) {
1104 this.trees.pop();
1105 this.start.pop();
1106 this.index.pop();
1107 continue;
1108 }
1109 let next = top.children[index];
1110 let start = this.start[last] + top.positions[index];
1111 if (start > pos) {
1112 this.nextStart = start;
1113 return null;
1114 }
1115 if (next instanceof Tree) {
1116 if (start == pos) {
1117 if (start < this.safeFrom)
1118 return null;
1119 let end = start + next.length;
1120 if (end <= this.safeTo) {
1121 let lookAhead = next.prop(NodeProp.lookAhead);
1122 if (!lookAhead || end + lookAhead < this.fragment.to)
1123 return next;
1124 }
1125 }
1126 this.index[last]++;
1127 if (start + next.length >= Math.max(this.safeFrom, pos)) { // Enter this node
1128 this.trees.push(next);
1129 this.start.push(start);
1130 this.index.push(0);
1131 }
1132 }
1133 else {
1134 this.index[last]++;
1135 this.nextStart = start + next.length;
1136 }
1137 }
1138 }
1139}
1140class TokenCache {
1141 constructor(parser, stream) {
1142 this.stream = stream;
1143 this.tokens = [];
1144 this.mainToken = null;
1145 this.actions = [];
1146 this.tokens = parser.tokenizers.map(_ => new CachedToken);
1147 }
1148 getActions(stack) {
1149 let actionIndex = 0;
1150 let main = null;
1151 let { parser } = stack.p, { tokenizers } = parser;
1152 let mask = parser.stateSlot(stack.state, 3 /* ParseState.TokenizerMask */);
1153 let context = stack.curContext ? stack.curContext.hash : 0;
1154 let lookAhead = 0;
1155 for (let i = 0; i < tokenizers.length; i++) {
1156 if (((1 << i) & mask) == 0)
1157 continue;
1158 let tokenizer = tokenizers[i], token = this.tokens[i];
1159 if (main && !tokenizer.fallback)
1160 continue;
1161 if (tokenizer.contextual || token.start != stack.pos || token.mask != mask || token.context != context) {
1162 this.updateCachedToken(token, tokenizer, stack);
1163 token.mask = mask;
1164 token.context = context;
1165 }
1166 if (token.lookAhead > token.end + 25 /* Lookahead.Margin */)
1167 lookAhead = Math.max(token.lookAhead, lookAhead);
1168 if (token.value != 0 /* Term.Err */) {
1169 let startIndex = actionIndex;
1170 if (token.extended > -1)
1171 actionIndex = this.addActions(stack, token.extended, token.end, actionIndex);
1172 actionIndex = this.addActions(stack, token.value, token.end, actionIndex);
1173 if (!tokenizer.extend) {
1174 main = token;
1175 if (actionIndex > startIndex)
1176 break;
1177 }
1178 }
1179 }
1180 while (this.actions.length > actionIndex)
1181 this.actions.pop();
1182 if (lookAhead)
1183 stack.setLookAhead(lookAhead);
1184 if (!main && stack.pos == this.stream.end) {
1185 main = new CachedToken;
1186 main.value = stack.p.parser.eofTerm;
1187 main.start = main.end = stack.pos;
1188 actionIndex = this.addActions(stack, main.value, main.end, actionIndex);
1189 }
1190 this.mainToken = main;
1191 return this.actions;
1192 }
1193 getMainToken(stack) {
1194 if (this.mainToken)
1195 return this.mainToken;
1196 let main = new CachedToken, { pos, p } = stack;
1197 main.start = pos;
1198 main.end = Math.min(pos + 1, p.stream.end);
1199 main.value = pos == p.stream.end ? p.parser.eofTerm : 0 /* Term.Err */;
1200 return main;
1201 }
1202 updateCachedToken(token, tokenizer, stack) {
1203 let start = this.stream.clipPos(stack.pos);
1204 tokenizer.token(this.stream.reset(start, token), stack);
1205 if (token.value > -1) {
1206 let { parser } = stack.p;
1207 for (let i = 0; i < parser.specialized.length; i++)
1208 if (parser.specialized[i] == token.value) {
1209 let result = parser.specializers[i](this.stream.read(token.start, token.end), stack);
1210 if (result >= 0 && stack.p.parser.dialect.allows(result >> 1)) {
1211 if ((result & 1) == 0 /* Specialize.Specialize */)
1212 token.value = result >> 1;
1213 else
1214 token.extended = result >> 1;
1215 break;
1216 }
1217 }
1218 }
1219 else {
1220 token.value = 0 /* Term.Err */;
1221 token.end = this.stream.clipPos(start + 1);
1222 }
1223 }
1224 putAction(action, token, end, index) {
1225 // Don't add duplicate actions
1226 for (let i = 0; i < index; i += 3)
1227 if (this.actions[i] == action)
1228 return index;
1229 this.actions[index++] = action;
1230 this.actions[index++] = token;
1231 this.actions[index++] = end;
1232 return index;
1233 }
1234 addActions(stack, token, end, index) {
1235 let { state } = stack, { parser } = stack.p, { data } = parser;
1236 for (let set = 0; set < 2; set++) {
1237 for (let i = parser.stateSlot(state, set ? 2 /* ParseState.Skip */ : 1 /* ParseState.Actions */);; i += 3) {
1238 if (data[i] == 65535 /* Seq.End */) {
1239 if (data[i + 1] == 1 /* Seq.Next */) {
1240 i = pair(data, i + 2);
1241 }
1242 else {
1243 if (index == 0 && data[i + 1] == 2 /* Seq.Other */)
1244 index = this.putAction(pair(data, i + 2), token, end, index);
1245 break;
1246 }
1247 }
1248 if (data[i] == token)
1249 index = this.putAction(pair(data, i + 1), token, end, index);
1250 }
1251 }
1252 return index;
1253 }
1254}
1255class Parse {
1256 constructor(parser, input, fragments, ranges) {
1257 this.parser = parser;
1258 this.input = input;
1259 this.ranges = ranges;
1260 this.recovering = 0;
1261 this.nextStackID = 0x2654; // ♔, ♕, ♖, ♗, ♘, ♙, ♠, ♡, ♢, ♣, ♤, ♥, ♦, ♧
1262 this.minStackPos = 0;
1263 this.reused = [];
1264 this.stoppedAt = null;
1265 this.lastBigReductionStart = -1;
1266 this.lastBigReductionSize = 0;
1267 this.bigReductionCount = 0;
1268 this.stream = new InputStream(input, ranges);
1269 this.tokens = new TokenCache(parser, this.stream);
1270 this.topTerm = parser.top[1];
1271 let { from } = ranges[0];
1272 this.stacks = [Stack.start(this, parser.top[0], from)];
1273 this.fragments = fragments.length && this.stream.end - from > parser.bufferLength * 4
1274 ? new FragmentCursor(fragments, parser.nodeSet) : null;
1275 }
1276 get parsedPos() {
1277 return this.minStackPos;
1278 }
1279 // Move the parser forward. This will process all parse stacks at
1280 // `this.pos` and try to advance them to a further position. If no
1281 // stack for such a position is found, it'll start error-recovery.
1282 //
1283 // When the parse is finished, this will return a syntax tree. When
1284 // not, it returns `null`.
1285 advance() {
1286 let stacks = this.stacks, pos = this.minStackPos;
1287 // This will hold stacks beyond `pos`.
1288 let newStacks = this.stacks = [];
1289 let stopped, stoppedTokens;
1290 // If a large amount of reductions happened with the same start
1291 // position, force the stack out of that production in order to
1292 // avoid creating a tree too deep to recurse through.
1293 // (This is an ugly kludge, because unfortunately there is no
1294 // straightforward, cheap way to check for this happening, due to
1295 // the history of reductions only being available in an
1296 // expensive-to-access format in the stack buffers.)
1297 if (this.bigReductionCount > 300 /* Rec.MaxLeftAssociativeReductionCount */ && stacks.length == 1) {
1298 let [s] = stacks;
1299 while (s.forceReduce() && s.stack.length && s.stack[s.stack.length - 2] >= this.lastBigReductionStart) { }
1300 this.bigReductionCount = this.lastBigReductionSize = 0;
1301 }
1302 // Keep advancing any stacks at `pos` until they either move
1303 // forward or can't be advanced. Gather stacks that can't be
1304 // advanced further in `stopped`.
1305 for (let i = 0; i < stacks.length; i++) {
1306 let stack = stacks[i];
1307 for (;;) {
1308 this.tokens.mainToken = null;
1309 if (stack.pos > pos) {
1310 newStacks.push(stack);
1311 }
1312 else if (this.advanceStack(stack, newStacks, stacks)) {
1313 continue;
1314 }
1315 else {
1316 if (!stopped) {
1317 stopped = [];
1318 stoppedTokens = [];
1319 }
1320 stopped.push(stack);
1321 let tok = this.tokens.getMainToken(stack);
1322 stoppedTokens.push(tok.value, tok.end);
1323 }
1324 break;
1325 }
1326 }
1327 if (!newStacks.length) {
1328 let finished = stopped && findFinished(stopped);
1329 if (finished) {
1330 if (verbose)
1331 console.log("Finish with " + this.stackID(finished));
1332 return this.stackToTree(finished);
1333 }
1334 if (this.parser.strict) {
1335 if (verbose && stopped)
1336 console.log("Stuck with token " + (this.tokens.mainToken ? this.parser.getName(this.tokens.mainToken.value) : "none"));
1337 throw new SyntaxError("No parse at " + pos);
1338 }
1339 if (!this.recovering)
1340 this.recovering = 5 /* Rec.Distance */;
1341 }
1342 if (this.recovering && stopped) {
1343 let finished = this.stoppedAt != null && stopped[0].pos > this.stoppedAt ? stopped[0]
1344 : this.runRecovery(stopped, stoppedTokens, newStacks);
1345 if (finished) {
1346 if (verbose)
1347 console.log("Force-finish " + this.stackID(finished));
1348 return this.stackToTree(finished.forceAll());
1349 }
1350 }
1351 if (this.recovering) {
1352 let maxRemaining = this.recovering == 1 ? 1 : this.recovering * 3 /* Rec.MaxRemainingPerStep */;
1353 if (newStacks.length > maxRemaining) {
1354 newStacks.sort((a, b) => b.score - a.score);
1355 while (newStacks.length > maxRemaining)
1356 newStacks.pop();
1357 }
1358 if (newStacks.some(s => s.reducePos > pos))
1359 this.recovering--;
1360 }
1361 else if (newStacks.length > 1) {
1362 // Prune stacks that are in the same state, or that have been
1363 // running without splitting for a while, to avoid getting stuck
1364 // with multiple successful stacks running endlessly on.
1365 outer: for (let i = 0; i < newStacks.length - 1; i++) {
1366 let stack = newStacks[i];
1367 for (let j = i + 1; j < newStacks.length; j++) {
1368 let other = newStacks[j];
1369 if (stack.sameState(other) ||
1370 stack.buffer.length > 500 /* Rec.MinBufferLengthPrune */ && other.buffer.length > 500 /* Rec.MinBufferLengthPrune */) {
1371 if (((stack.score - other.score) || (stack.buffer.length - other.buffer.length)) > 0) {
1372 newStacks.splice(j--, 1);
1373 }
1374 else {
1375 newStacks.splice(i--, 1);
1376 continue outer;
1377 }
1378 }
1379 }
1380 }
1381 if (newStacks.length > 12 /* Rec.MaxStackCount */) {
1382 newStacks.sort((a, b) => b.score - a.score);
1383 newStacks.splice(12 /* Rec.MaxStackCount */, newStacks.length - 12 /* Rec.MaxStackCount */);
1384 }
1385 }
1386 this.minStackPos = newStacks[0].pos;
1387 for (let i = 1; i < newStacks.length; i++)
1388 if (newStacks[i].pos < this.minStackPos)
1389 this.minStackPos = newStacks[i].pos;
1390 return null;
1391 }
1392 stopAt(pos) {
1393 if (this.stoppedAt != null && this.stoppedAt < pos)
1394 throw new RangeError("Can't move stoppedAt forward");
1395 this.stoppedAt = pos;
1396 }
1397 // Returns an updated version of the given stack, or null if the
1398 // stack can't advance normally. When `split` and `stacks` are
1399 // given, stacks split off by ambiguous operations will be pushed to
1400 // `split`, or added to `stacks` if they move `pos` forward.
1401 advanceStack(stack, stacks, split) {
1402 let start = stack.pos, { parser } = this;
1403 let base = verbose ? this.stackID(stack) + " -> " : "";
1404 if (this.stoppedAt != null && start > this.stoppedAt)
1405 return stack.forceReduce() ? stack : null;
1406 if (this.fragments) {
1407 let strictCx = stack.curContext && stack.curContext.tracker.strict, cxHash = strictCx ? stack.curContext.hash : 0;
1408 for (let cached = this.fragments.nodeAt(start); cached;) {
1409 let match = this.parser.nodeSet.types[cached.type.id] == cached.type ? parser.getGoto(stack.state, cached.type.id) : -1;
1410 if (match > -1 && cached.length && (!strictCx || (cached.prop(NodeProp.contextHash) || 0) == cxHash)) {
1411 stack.useNode(cached, match);
1412 if (verbose)
1413 console.log(base + this.stackID(stack) + ` (via reuse of ${parser.getName(cached.type.id)})`);
1414 return true;
1415 }
1416 if (!(cached instanceof Tree) || cached.children.length == 0 || cached.positions[0] > 0)
1417 break;
1418 let inner = cached.children[0];
1419 if (inner instanceof Tree && cached.positions[0] == 0)
1420 cached = inner;
1421 else
1422 break;
1423 }
1424 }
1425 let defaultReduce = parser.stateSlot(stack.state, 4 /* ParseState.DefaultReduce */);
1426 if (defaultReduce > 0) {
1427 stack.reduce(defaultReduce);
1428 if (verbose)
1429 console.log(base + this.stackID(stack) + ` (via always-reduce ${parser.getName(defaultReduce & 65535 /* Action.ValueMask */)})`);
1430 return true;
1431 }
1432 if (stack.stack.length >= 8400 /* Rec.CutDepth */) {
1433 while (stack.stack.length > 6000 /* Rec.CutTo */ && stack.forceReduce()) { }
1434 }
1435 let actions = this.tokens.getActions(stack);
1436 for (let i = 0; i < actions.length;) {
1437 let action = actions[i++], term = actions[i++], end = actions[i++];
1438 let last = i == actions.length || !split;
1439 let localStack = last ? stack : stack.split();
1440 let main = this.tokens.mainToken;
1441 localStack.apply(action, term, main ? main.start : localStack.pos, end);
1442 if (verbose)
1443 console.log(base + this.stackID(localStack) + ` (via ${(action & 65536 /* Action.ReduceFlag */) == 0 ? "shift"
1444 : `reduce of ${parser.getName(action & 65535 /* Action.ValueMask */)}`} for ${parser.getName(term)} @ ${start}${localStack == stack ? "" : ", split"})`);
1445 if (last)
1446 return true;
1447 else if (localStack.pos > start)
1448 stacks.push(localStack);
1449 else
1450 split.push(localStack);
1451 }
1452 return false;
1453 }
1454 // Advance a given stack forward as far as it will go. Returns the
1455 // (possibly updated) stack if it got stuck, or null if it moved
1456 // forward and was given to `pushStackDedup`.
1457 advanceFully(stack, newStacks) {
1458 let pos = stack.pos;
1459 for (;;) {
1460 if (!this.advanceStack(stack, null, null))
1461 return false;
1462 if (stack.pos > pos) {
1463 pushStackDedup(stack, newStacks);
1464 return true;
1465 }
1466 }
1467 }
1468 runRecovery(stacks, tokens, newStacks) {
1469 let finished = null, restarted = false;
1470 for (let i = 0; i < stacks.length; i++) {
1471 let stack = stacks[i], token = tokens[i << 1], tokenEnd = tokens[(i << 1) + 1];
1472 let base = verbose ? this.stackID(stack) + " -> " : "";
1473 if (stack.deadEnd) {
1474 if (restarted)
1475 continue;
1476 restarted = true;
1477 stack.restart();
1478 if (verbose)
1479 console.log(base + this.stackID(stack) + " (restarted)");
1480 let done = this.advanceFully(stack, newStacks);
1481 if (done)
1482 continue;
1483 }
1484 let force = stack.split(), forceBase = base;
1485 for (let j = 0; j < 10 /* Rec.ForceReduceLimit */ && force.forceReduce(); j++) {
1486 if (verbose)
1487 console.log(forceBase + this.stackID(force) + " (via force-reduce)");
1488 let done = this.advanceFully(force, newStacks);
1489 if (done)
1490 break;
1491 if (verbose)
1492 forceBase = this.stackID(force) + " -> ";
1493 }
1494 for (let insert of stack.recoverByInsert(token)) {
1495 if (verbose)
1496 console.log(base + this.stackID(insert) + " (via recover-insert)");
1497 this.advanceFully(insert, newStacks);
1498 }
1499 if (this.stream.end > stack.pos) {
1500 if (tokenEnd == stack.pos) {
1501 tokenEnd++;
1502 token = 0 /* Term.Err */;
1503 }
1504 stack.recoverByDelete(token, tokenEnd);
1505 if (verbose)
1506 console.log(base + this.stackID(stack) + ` (via recover-delete ${this.parser.getName(token)})`);
1507 pushStackDedup(stack, newStacks);
1508 }
1509 else if (!finished || finished.score < force.score) {
1510 finished = force;
1511 }
1512 }
1513 return finished;
1514 }
1515 // Convert the stack's buffer to a syntax tree.
1516 stackToTree(stack) {
1517 stack.close();
1518 return Tree.build({ buffer: StackBufferCursor.create(stack),
1519 nodeSet: this.parser.nodeSet,
1520 topID: this.topTerm,
1521 maxBufferLength: this.parser.bufferLength,
1522 reused: this.reused,
1523 start: this.ranges[0].from,
1524 length: stack.pos - this.ranges[0].from,
1525 minRepeatType: this.parser.minRepeatTerm });
1526 }
1527 stackID(stack) {
1528 let id = (stackIDs || (stackIDs = new WeakMap)).get(stack);
1529 if (!id)
1530 stackIDs.set(stack, id = String.fromCodePoint(this.nextStackID++));
1531 return id + stack;
1532 }
1533}
1534function pushStackDedup(stack, newStacks) {
1535 for (let i = 0; i < newStacks.length; i++) {
1536 let other = newStacks[i];
1537 if (other.pos == stack.pos && other.sameState(stack)) {
1538 if (newStacks[i].score < stack.score)
1539 newStacks[i] = stack;
1540 return;
1541 }
1542 }
1543 newStacks.push(stack);
1544}
1545class Dialect {
1546 constructor(source, flags, disabled) {
1547 this.source = source;
1548 this.flags = flags;
1549 this.disabled = disabled;
1550 }
1551 allows(term) { return !this.disabled || this.disabled[term] == 0; }
1552}
1553const id = x => x;
1554/**
1555Context trackers are used to track stateful context (such as
1556indentation in the Python grammar, or parent elements in the XML
1557grammar) needed by external tokenizers. You declare them in a
1558grammar file as `@context exportName from "module"`.
1559
1560Context values should be immutable, and can be updated (replaced)
1561on shift or reduce actions.
1562
1563The export used in a `@context` declaration should be of this
1564type.
1565*/
1566class ContextTracker {
1567 /**
1568 Define a context tracker.
1569 */
1570 constructor(spec) {
1571 this.start = spec.start;
1572 this.shift = spec.shift || id;
1573 this.reduce = spec.reduce || id;
1574 this.reuse = spec.reuse || id;
1575 this.hash = spec.hash || (() => 0);
1576 this.strict = spec.strict !== false;
1577 }
1578}
1579/**
1580Holds the parse tables for a given grammar, as generated by
1581`lezer-generator`, and provides [methods](#common.Parser) to parse
1582content with.
1583*/
1584class LRParser extends Parser {
1585 /**
1586 @internal
1587 */
1588 constructor(spec) {
1589 super();
1590 /**
1591 @internal
1592 */
1593 this.wrappers = [];
1594 if (spec.version != 14 /* File.Version */)
1595 throw new RangeError(`Parser version (${spec.version}) doesn't match runtime version (${14 /* File.Version */})`);
1596 let nodeNames = spec.nodeNames.split(" ");
1597 this.minRepeatTerm = nodeNames.length;
1598 for (let i = 0; i < spec.repeatNodeCount; i++)
1599 nodeNames.push("");
1600 let topTerms = Object.keys(spec.topRules).map(r => spec.topRules[r][1]);
1601 let nodeProps = [];
1602 for (let i = 0; i < nodeNames.length; i++)
1603 nodeProps.push([]);
1604 function setProp(nodeID, prop, value) {
1605 nodeProps[nodeID].push([prop, prop.deserialize(String(value))]);
1606 }
1607 if (spec.nodeProps)
1608 for (let propSpec of spec.nodeProps) {
1609 let prop = propSpec[0];
1610 if (typeof prop == "string")
1611 prop = NodeProp[prop];
1612 for (let i = 1; i < propSpec.length;) {
1613 let next = propSpec[i++];
1614 if (next >= 0) {
1615 setProp(next, prop, propSpec[i++]);
1616 }
1617 else {
1618 let value = propSpec[i + -next];
1619 for (let j = -next; j > 0; j--)
1620 setProp(propSpec[i++], prop, value);
1621 i++;
1622 }
1623 }
1624 }
1625 this.nodeSet = new NodeSet(nodeNames.map((name, i) => NodeType.define({
1626 name: i >= this.minRepeatTerm ? undefined : name,
1627 id: i,
1628 props: nodeProps[i],
1629 top: topTerms.indexOf(i) > -1,
1630 error: i == 0,
1631 skipped: spec.skippedNodes && spec.skippedNodes.indexOf(i) > -1
1632 })));
1633 if (spec.propSources)
1634 this.nodeSet = this.nodeSet.extend(...spec.propSources);
1635 this.strict = false;
1636 this.bufferLength = DefaultBufferLength;
1637 let tokenArray = decodeArray(spec.tokenData);
1638 this.context = spec.context;
1639 this.specializerSpecs = spec.specialized || [];
1640 this.specialized = new Uint16Array(this.specializerSpecs.length);
1641 for (let i = 0; i < this.specializerSpecs.length; i++)
1642 this.specialized[i] = this.specializerSpecs[i].term;
1643 this.specializers = this.specializerSpecs.map(getSpecializer);
1644 this.states = decodeArray(spec.states, Uint32Array);
1645 this.data = decodeArray(spec.stateData);
1646 this.goto = decodeArray(spec.goto);
1647 this.maxTerm = spec.maxTerm;
1648 this.tokenizers = spec.tokenizers.map(value => typeof value == "number" ? new TokenGroup(tokenArray, value) : value);
1649 this.topRules = spec.topRules;
1650 this.dialects = spec.dialects || {};
1651 this.dynamicPrecedences = spec.dynamicPrecedences || null;
1652 this.tokenPrecTable = spec.tokenPrec;
1653 this.termNames = spec.termNames || null;
1654 this.maxNode = this.nodeSet.types.length - 1;
1655 this.dialect = this.parseDialect();
1656 this.top = this.topRules[Object.keys(this.topRules)[0]];
1657 }
1658 createParse(input, fragments, ranges) {
1659 let parse = new Parse(this, input, fragments, ranges);
1660 for (let w of this.wrappers)
1661 parse = w(parse, input, fragments, ranges);
1662 return parse;
1663 }
1664 /**
1665 Get a goto table entry @internal
1666 */
1667 getGoto(state, term, loose = false) {
1668 let table = this.goto;
1669 if (term >= table[0])
1670 return -1;
1671 for (let pos = table[term + 1];;) {
1672 let groupTag = table[pos++], last = groupTag & 1;
1673 let target = table[pos++];
1674 if (last && loose)
1675 return target;
1676 for (let end = pos + (groupTag >> 1); pos < end; pos++)
1677 if (table[pos] == state)
1678 return target;
1679 if (last)
1680 return -1;
1681 }
1682 }
1683 /**
1684 Check if this state has an action for a given terminal @internal
1685 */
1686 hasAction(state, terminal) {
1687 let data = this.data;
1688 for (let set = 0; set < 2; set++) {
1689 for (let i = this.stateSlot(state, set ? 2 /* ParseState.Skip */ : 1 /* ParseState.Actions */), next;; i += 3) {
1690 if ((next = data[i]) == 65535 /* Seq.End */) {
1691 if (data[i + 1] == 1 /* Seq.Next */)
1692 next = data[i = pair(data, i + 2)];
1693 else if (data[i + 1] == 2 /* Seq.Other */)
1694 return pair(data, i + 2);
1695 else
1696 break;
1697 }
1698 if (next == terminal || next == 0 /* Term.Err */)
1699 return pair(data, i + 1);
1700 }
1701 }
1702 return 0;
1703 }
1704 /**
1705 @internal
1706 */
1707 stateSlot(state, slot) {
1708 return this.states[(state * 6 /* ParseState.Size */) + slot];
1709 }
1710 /**
1711 @internal
1712 */
1713 stateFlag(state, flag) {
1714 return (this.stateSlot(state, 0 /* ParseState.Flags */) & flag) > 0;
1715 }
1716 /**
1717 @internal
1718 */
1719 validAction(state, action) {
1720 return !!this.allActions(state, a => a == action ? true : null);
1721 }
1722 /**
1723 @internal
1724 */
1725 allActions(state, action) {
1726 let deflt = this.stateSlot(state, 4 /* ParseState.DefaultReduce */);
1727 let result = deflt ? action(deflt) : undefined;
1728 for (let i = this.stateSlot(state, 1 /* ParseState.Actions */); result == null; i += 3) {
1729 if (this.data[i] == 65535 /* Seq.End */) {
1730 if (this.data[i + 1] == 1 /* Seq.Next */)
1731 i = pair(this.data, i + 2);
1732 else
1733 break;
1734 }
1735 result = action(pair(this.data, i + 1));
1736 }
1737 return result;
1738 }
1739 /**
1740 Get the states that can follow this one through shift actions or
1741 goto jumps. @internal
1742 */
1743 nextStates(state) {
1744 let result = [];
1745 for (let i = this.stateSlot(state, 1 /* ParseState.Actions */);; i += 3) {
1746 if (this.data[i] == 65535 /* Seq.End */) {
1747 if (this.data[i + 1] == 1 /* Seq.Next */)
1748 i = pair(this.data, i + 2);
1749 else
1750 break;
1751 }
1752 if ((this.data[i + 2] & (65536 /* Action.ReduceFlag */ >> 16)) == 0) {
1753 let value = this.data[i + 1];
1754 if (!result.some((v, i) => (i & 1) && v == value))
1755 result.push(this.data[i], value);
1756 }
1757 }
1758 return result;
1759 }
1760 /**
1761 Configure the parser. Returns a new parser instance that has the
1762 given settings modified. Settings not provided in `config` are
1763 kept from the original parser.
1764 */
1765 configure(config) {
1766 // Hideous reflection-based kludge to make it easy to create a
1767 // slightly modified copy of a parser.
1768 let copy = Object.assign(Object.create(LRParser.prototype), this);
1769 if (config.props)
1770 copy.nodeSet = this.nodeSet.extend(...config.props);
1771 if (config.top) {
1772 let info = this.topRules[config.top];
1773 if (!info)
1774 throw new RangeError(`Invalid top rule name ${config.top}`);
1775 copy.top = info;
1776 }
1777 if (config.tokenizers)
1778 copy.tokenizers = this.tokenizers.map(t => {
1779 let found = config.tokenizers.find(r => r.from == t);
1780 return found ? found.to : t;
1781 });
1782 if (config.specializers) {
1783 copy.specializers = this.specializers.slice();
1784 copy.specializerSpecs = this.specializerSpecs.map((s, i) => {
1785 let found = config.specializers.find(r => r.from == s.external);
1786 if (!found)
1787 return s;
1788 let spec = Object.assign(Object.assign({}, s), { external: found.to });
1789 copy.specializers[i] = getSpecializer(spec);
1790 return spec;
1791 });
1792 }
1793 if (config.contextTracker)
1794 copy.context = config.contextTracker;
1795 if (config.dialect)
1796 copy.dialect = this.parseDialect(config.dialect);
1797 if (config.strict != null)
1798 copy.strict = config.strict;
1799 if (config.wrap)
1800 copy.wrappers = copy.wrappers.concat(config.wrap);
1801 if (config.bufferLength != null)
1802 copy.bufferLength = config.bufferLength;
1803 return copy;
1804 }
1805 /**
1806 Tells you whether any [parse wrappers](#lr.ParserConfig.wrap)
1807 are registered for this parser.
1808 */
1809 hasWrappers() {
1810 return this.wrappers.length > 0;
1811 }
1812 /**
1813 Returns the name associated with a given term. This will only
1814 work for all terms when the parser was generated with the
1815 `--names` option. By default, only the names of tagged terms are
1816 stored.
1817 */
1818 getName(term) {
1819 return this.termNames ? this.termNames[term] : String(term <= this.maxNode && this.nodeSet.types[term].name || term);
1820 }
1821 /**
1822 The eof term id is always allocated directly after the node
1823 types. @internal
1824 */
1825 get eofTerm() { return this.maxNode + 1; }
1826 /**
1827 The type of top node produced by the parser.
1828 */
1829 get topNode() { return this.nodeSet.types[this.top[1]]; }
1830 /**
1831 @internal
1832 */
1833 dynamicPrecedence(term) {
1834 let prec = this.dynamicPrecedences;
1835 return prec == null ? 0 : prec[term] || 0;
1836 }
1837 /**
1838 @internal
1839 */
1840 parseDialect(dialect) {
1841 let values = Object.keys(this.dialects), flags = values.map(() => false);
1842 if (dialect)
1843 for (let part of dialect.split(" ")) {
1844 let id = values.indexOf(part);
1845 if (id >= 0)
1846 flags[id] = true;
1847 }
1848 let disabled = null;
1849 for (let i = 0; i < values.length; i++)
1850 if (!flags[i]) {
1851 for (let j = this.dialects[values[i]], id; (id = this.data[j++]) != 65535 /* Seq.End */;)
1852 (disabled || (disabled = new Uint8Array(this.maxTerm + 1)))[id] = 1;
1853 }
1854 return new Dialect(dialect, flags, disabled);
1855 }
1856 /**
1857 Used by the output of the parser generator. Not available to
1858 user code. @hide
1859 */
1860 static deserialize(spec) {
1861 return new LRParser(spec);
1862 }
1863}
1864function pair(data, off) { return data[off] | (data[off + 1] << 16); }
1865function findFinished(stacks) {
1866 let best = null;
1867 for (let stack of stacks) {
1868 let stopped = stack.p.stoppedAt;
1869 if ((stack.pos == stack.p.stream.end || stopped != null && stack.pos > stopped) &&
1870 stack.p.parser.stateFlag(stack.state, 2 /* StateFlag.Accepting */) &&
1871 (!best || best.score < stack.score))
1872 best = stack;
1873 }
1874 return best;
1875}
1876function getSpecializer(spec) {
1877 if (spec.external) {
1878 let mask = spec.extend ? 1 /* Specialize.Extend */ : 0 /* Specialize.Specialize */;
1879 return (value, stack) => (spec.external(value, stack) << 1) | mask;
1880 }
1881 return spec.get;
1882}
1883
1884export { ContextTracker, ExternalTokenizer, InputStream, LRParser, LocalTokenGroup, Stack };