use crate::construct::partial_space_or_tab_eol::{space_or_tab_eol_with_options, Options}; use crate::event::{Content, Link, Name}; use crate::state::{Name as StateName, State}; use crate::subtokenize::link; use crate::tokenizer::Tokenizer; use crate::util::constant::LINK_REFERENCE_SIZE_MAX; /// Start of label. /// /// ```markdown /// > | [a] /// ^ /// ``` pub fn start(tokenizer: &mut Tokenizer) -> State { State::Retry(StateName::WikilinkLabelAtBreak) } /// In label, at something, before something else. /// /// ```markdown /// > | [a] /// ^ /// ``` pub fn at_break(tokenizer: &mut Tokenizer) -> State { if tokenizer.tokenize_state.size > LINK_REFERENCE_SIZE_MAX || matches!(tokenizer.current, None) { State::Retry(StateName::WikilinkLabelNok) } else { match tokenizer.current { Some(b'\n') => { tokenizer.attempt( State::Next(StateName::WikilinkLabelEolAfter), State::Next(StateName::WikilinkLabelNok), ); State::Retry(space_or_tab_eol_with_options( tokenizer, Options { content: Some(Content::String), connect: tokenizer.tokenize_state.connect, }, )) } Some(b']') if tokenizer.tokenize_state.size_b == 1 => { tokenizer.tokenize_state.size_b = 0; State::Retry(StateName::WikilinkEnd) } _ => { tokenizer.enter_link( Name::Data, Link { previous: None, next: None, content: Content::String, }, ); if tokenizer.tokenize_state.connect { let index = tokenizer.events.len() - 1; link(&mut tokenizer.events, index); } else { tokenizer.tokenize_state.connect = true; } State::Retry(StateName::WikilinkLabelInside) } } } } /// In label, after whitespace. /// /// ```markdown /// | [a␊ /// > | b] /// ^ /// ``` pub fn eol_after(tokenizer: &mut Tokenizer) -> State { tokenizer.tokenize_state.connect = true; State::Retry(StateName::WikilinkLabelAtBreak) } /// In label, on something disallowed. /// /// ```markdown /// > | [] /// ^ /// ``` pub fn nok(tokenizer: &mut Tokenizer) -> State { tokenizer.tokenize_state.connect = false; tokenizer.tokenize_state.seen = false; tokenizer.tokenize_state.size = 0; State::Nok } /// In label, in text. /// /// ```markdown /// > | [a] /// ^ /// ``` pub fn inside(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { None | Some(b'\n') => { tokenizer.exit(Name::Data); State::Retry(StateName::WikilinkLabelAtBreak) } Some(b']') => { tokenizer.exit(Name::Data); tokenizer.consume(); tokenizer.tokenize_state.size += 1; tokenizer.tokenize_state.size_b = 1; tokenizer.tokenize_state.seen = false; // ?? State::Next(StateName::WikilinkLabelAtBreak) } Some(byte) => { if tokenizer.tokenize_state.size > LINK_REFERENCE_SIZE_MAX { tokenizer.exit(Name::Data); State::Retry(StateName::WikilinkLabelAtBreak) } else { tokenizer.consume(); tokenizer.tokenize_state.size += 1; if !tokenizer.tokenize_state.seen && !matches!(byte, b'\t' | b' ') { tokenizer.tokenize_state.seen = true; } State::Next(if matches!(byte, b'\\') { StateName::WikilinkLabelEscape } else { StateName::WikilinkLabelInside }) } } } } /// After `\`, at a special character. /// /// ```markdown /// > | [a\*a] /// ^ /// ``` pub fn escape(tokenizer: &mut Tokenizer) -> State { match tokenizer.current { Some(b'[' | b'\\' | b']') => { tokenizer.consume(); tokenizer.tokenize_state.size += 1; State::Next(StateName::WikilinkLabelInside) } _ => State::Retry(StateName::WikilinkLabelInside), } }