Markdown parser fork with extended syntax for personal use.
1//! Data occurs in the [string][] and [text][] content types.
2//!
3//! It can include anything (except for line endings) and stops at certain
4//! characters.
5//!
6//! [string]: crate::construct::string
7//! [text]: crate::construct::text
8
9use crate::event::{Kind, Name};
10use crate::state::{Name as StateName, State};
11use crate::subtokenize::Subresult;
12use crate::tokenizer::Tokenizer;
13use alloc::vec;
14
15/// At beginning of data.
16///
17/// ```markdown
18/// > | abc
19/// ^
20/// ```
21pub fn start(tokenizer: &mut Tokenizer) -> State {
22 // Make sure to eat the first `markers`.
23 if let Some(byte) = tokenizer.current {
24 if tokenizer.tokenize_state.markers.contains(&byte) {
25 tokenizer.enter(Name::Data);
26 tokenizer.consume();
27 return State::Next(StateName::DataInside);
28 }
29 }
30
31 State::Retry(StateName::DataAtBreak)
32}
33
34/// Before something.
35///
36/// ```markdown
37/// > | abc
38/// ^
39/// ```
40pub fn at_break(tokenizer: &mut Tokenizer) -> State {
41 if let Some(byte) = tokenizer.current {
42 if !tokenizer.tokenize_state.markers.contains(&byte) {
43 if byte == b'\n' {
44 tokenizer.enter(Name::LineEnding);
45 tokenizer.consume();
46 tokenizer.exit(Name::LineEnding);
47 return State::Next(StateName::DataAtBreak);
48 }
49 tokenizer.enter(Name::Data);
50 return State::Retry(StateName::DataInside);
51 }
52 }
53
54 State::Ok
55}
56
57/// In data.
58///
59/// ```markdown
60/// > | abc
61/// ^^^
62/// ```
63pub fn inside(tokenizer: &mut Tokenizer) -> State {
64 if let Some(byte) = tokenizer.current {
65 if byte != b'\n' && !tokenizer.tokenize_state.markers.contains(&byte) {
66 tokenizer.consume();
67 return State::Next(StateName::DataInside);
68 }
69 }
70
71 tokenizer.exit(Name::Data);
72 State::Retry(StateName::DataAtBreak)
73}
74
75/// Merge adjacent data events.
76pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> {
77 let mut index = 0;
78
79 // Loop through events and merge adjacent data events.
80 while index < tokenizer.events.len() {
81 let event = &tokenizer.events[index];
82
83 if event.kind == Kind::Enter && event.name == Name::Data {
84 // Move to exit.
85 index += 1;
86
87 let mut exit_index = index;
88
89 // Find the farthest `data` event exit event.
90 while exit_index + 1 < tokenizer.events.len()
91 && tokenizer.events[exit_index + 1].name == Name::Data
92 {
93 exit_index += 2;
94 }
95
96 if exit_index > index {
97 tokenizer.map.add(index, exit_index - index, vec![]);
98 // Change positional info.
99 tokenizer.events[index].point = tokenizer.events[exit_index].point.clone();
100 // Move to the end.
101 index = exit_index;
102 }
103 }
104
105 index += 1;
106 }
107
108 tokenizer.map.consume(&mut tokenizer.events);
109 None
110}