Markdown parser fork with extended syntax for personal use.
at hack 81 lines 2.3 kB view raw
1//! The string content type. 2//! 3//! **String** is a limited [text][] like content type which only allows 4//! character escapes and character references. 5//! It exists in things such as identifiers (media references, definitions), 6//! titles, URLs, code (fenced) info and meta parts. 7//! 8//! The constructs found in string are: 9//! 10//! * [Character escape][crate::construct::character_escape] 11//! * [Character reference][crate::construct::character_reference] 12//! 13//! [text]: crate::construct::text 14 15use crate::construct::partial_whitespace::resolve_whitespace; 16use crate::resolve::Name as ResolveName; 17use crate::state::{Name as StateName, State}; 18use crate::subtokenize::Subresult; 19use crate::tokenizer::Tokenizer; 20 21/// Characters that can start something in string. 22const MARKERS: [u8; 2] = [b'&', b'\\']; 23 24/// Start of string. 25/// 26/// ````markdown 27/// > | ```js 28/// ^ 29/// ```` 30pub fn start(tokenizer: &mut Tokenizer) -> State { 31 tokenizer.tokenize_state.markers = &MARKERS; 32 State::Retry(StateName::StringBefore) 33} 34 35/// Before string. 36/// 37/// ````markdown 38/// > | ```js 39/// ^ 40/// ```` 41pub fn before(tokenizer: &mut Tokenizer) -> State { 42 match tokenizer.current { 43 None => { 44 tokenizer.register_resolver(ResolveName::Data); 45 tokenizer.register_resolver(ResolveName::String); 46 State::Ok 47 } 48 Some(b'&') => { 49 tokenizer.attempt( 50 State::Next(StateName::StringBefore), 51 State::Next(StateName::StringBeforeData), 52 ); 53 State::Retry(StateName::CharacterReferenceStart) 54 } 55 Some(b'\\') => { 56 tokenizer.attempt( 57 State::Next(StateName::StringBefore), 58 State::Next(StateName::StringBeforeData), 59 ); 60 State::Retry(StateName::CharacterEscapeStart) 61 } 62 _ => State::Retry(StateName::StringBeforeData), 63 } 64} 65 66/// At data. 67/// 68/// ````markdown 69/// > | ```js 70/// ^ 71/// ```` 72pub fn before_data(tokenizer: &mut Tokenizer) -> State { 73 tokenizer.attempt(State::Next(StateName::StringBefore), State::Nok); 74 State::Retry(StateName::DataStart) 75} 76 77/// Resolve whitespace in string. 78pub fn resolve(tokenizer: &mut Tokenizer) -> Option<Subresult> { 79 resolve_whitespace(tokenizer, false, false); 80 None 81}