Markdown parser fork with extended syntax for personal use.

Refactor to always resolve edit maps

This will probably catch some confusing bugs, such as ad1b3e6.

+7 -8
+1
src/construct/gfm_table.rs
··· 883 883 flush_table_end(tokenizer, last_table_end, last_table_has_body); 884 884 } 885 885 886 + tokenizer.map.consume(&mut tokenizer.events); 886 887 None 887 888 } 888 889
+1
src/construct/heading_atx.rs
··· 280 280 index += 1; 281 281 } 282 282 283 + tokenizer.map.consume(&mut tokenizer.events); 283 284 None 284 285 }
-3
src/construct/heading_setext.rs
··· 185 185 186 186 /// Resolve heading (setext). 187 187 pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> { 188 - tokenizer.map.consume(&mut tokenizer.events); 189 - 190 188 let mut enter = skip::to(&tokenizer.events, 0, &[Name::HeadingSetextUnderline]); 191 189 192 190 while enter < tokenizer.events.len() { ··· 280 278 } 281 279 282 280 tokenizer.map.consume(&mut tokenizer.events); 283 - 284 281 None 285 282 }
-1
src/construct/label_end.rs
··· 669 669 mark_as_data(tokenizer, &starts); 670 670 671 671 tokenizer.map.consume(&mut tokenizer.events); 672 - 673 672 None 674 673 } 675 674
+1
src/construct/list_item.rs
··· 469 469 index += 1; 470 470 } 471 471 472 + tokenizer.map.consume(&mut tokenizer.events); 472 473 None 473 474 }
+1 -2
src/construct/partial_data.rs
··· 74 74 75 75 /// Merge adjacent data events. 76 76 pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> { 77 - tokenizer.map.consume(&mut tokenizer.events); 78 - 79 77 let mut index = 0; 80 78 81 79 // Loop through events and merge adjacent data events. ··· 107 105 index += 1; 108 106 } 109 107 108 + tokenizer.map.consume(&mut tokenizer.events); 110 109 None 111 110 }
+2 -2
src/construct/partial_whitespace.rs
··· 67 67 68 68 /// Resolve whitespace. 69 69 pub fn resolve_whitespace(tokenizer: &mut Tokenizer, hard_break: bool, trim_whole: bool) { 70 - tokenizer.map.consume(&mut tokenizer.events); 71 - 72 70 let mut index = 0; 73 71 74 72 while index < tokenizer.events.len() { ··· 86 84 87 85 index += 1; 88 86 } 87 + 88 + tokenizer.map.consume(&mut tokenizer.events); 89 89 } 90 90 91 91 /// Trim a [`Data`][Name::Data] event.
+1
src/construct/text.rs
··· 259 259 resolve_gfm_autolink_literal(tokenizer); 260 260 } 261 261 262 + tokenizer.map.consume(&mut tokenizer.events); 262 263 None 263 264 }