Serenity Operating System
at master 147 lines 3.6 kB view raw
1/* 2 * Copyright (c) 2020-2021, the SerenityOS developers. 3 * Copyright (c) 2021-2023, Sam Atkins <atkinssj@serenityos.org> 4 * 5 * SPDX-License-Identifier: BSD-2-Clause 6 */ 7 8#pragma once 9 10#include <AK/Format.h> 11#include <AK/Vector.h> 12#include <LibWeb/CSS/Parser/ComponentValue.h> 13#include <LibWeb/CSS/Parser/Tokenizer.h> 14 15namespace Web::CSS::Parser { 16 17template<typename T> 18class TokenStream { 19public: 20 class StateTransaction { 21 public: 22 explicit StateTransaction(TokenStream<T>& token_stream) 23 : m_token_stream(token_stream) 24 , m_saved_iterator_offset(token_stream.m_iterator_offset) 25 { 26 } 27 28 ~StateTransaction() 29 { 30 if (!m_commit) 31 m_token_stream.m_iterator_offset = m_saved_iterator_offset; 32 } 33 34 StateTransaction create_child() { return StateTransaction(*this); } 35 36 void commit() 37 { 38 m_commit = true; 39 if (m_parent) 40 m_parent->commit(); 41 } 42 43 private: 44 explicit StateTransaction(StateTransaction& parent) 45 : m_parent(&parent) 46 , m_token_stream(parent.m_token_stream) 47 , m_saved_iterator_offset(parent.m_token_stream.m_iterator_offset) 48 { 49 } 50 51 StateTransaction* m_parent { nullptr }; 52 TokenStream<T>& m_token_stream; 53 int m_saved_iterator_offset { 0 }; 54 bool m_commit { false }; 55 }; 56 57 explicit TokenStream(Vector<T> const& tokens) 58 : m_tokens(tokens) 59 , m_eof(make_eof()) 60 { 61 } 62 TokenStream(TokenStream<T> const&) = delete; 63 TokenStream(TokenStream<T>&&) = default; 64 65 bool has_next_token() 66 { 67 return (size_t)(m_iterator_offset + 1) < m_tokens.size(); 68 } 69 70 T const& next_token() 71 { 72 if (!has_next_token()) 73 return m_eof; 74 75 ++m_iterator_offset; 76 77 return m_tokens.at(m_iterator_offset); 78 } 79 80 T const& peek_token(int offset = 0) 81 { 82 if (!has_next_token()) 83 return m_eof; 84 85 return m_tokens.at(m_iterator_offset + offset + 1); 86 } 87 88 T const& current_token() 89 { 90 if ((size_t)m_iterator_offset >= m_tokens.size()) 91 return m_eof; 92 93 return m_tokens.at(m_iterator_offset); 94 } 95 96 void reconsume_current_input_token() 97 { 98 if (m_iterator_offset >= 0) 99 --m_iterator_offset; 100 } 101 102 StateTransaction begin_transaction() { return StateTransaction(*this); } 103 104 void skip_whitespace() 105 { 106 while (peek_token().is(Token::Type::Whitespace)) 107 next_token(); 108 } 109 110 size_t token_count() const { return m_tokens.size(); } 111 size_t remaining_token_count() const { return token_count() - m_iterator_offset - 1; } 112 113 void dump_all_tokens() 114 { 115 dbgln("Dumping all tokens:"); 116 for (size_t i = 0; i < m_tokens.size(); ++i) { 117 auto& token = m_tokens[i]; 118 if ((i - 1) == (size_t)m_iterator_offset) 119 dbgln("-> {}", token.to_debug_string()); 120 else 121 dbgln(" {}", token.to_debug_string()); 122 } 123 } 124 125 void copy_state(Badge<Parser>, TokenStream<T> const& other) 126 { 127 m_iterator_offset = other.m_iterator_offset; 128 } 129 130private: 131 Vector<T> const& m_tokens; 132 int m_iterator_offset { -1 }; 133 134 T make_eof() 135 { 136 if constexpr (IsSame<T, Token>) { 137 return Tokenizer::create_eof_token(); 138 } 139 if constexpr (IsSame<T, ComponentValue>) { 140 return ComponentValue(Tokenizer::create_eof_token()); 141 } 142 } 143 144 T m_eof; 145}; 146 147}