diff --git a/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.cpp b/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.cpp
index 952d772a8b9..e61985512d8 100644
--- a/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.cpp
+++ b/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.cpp
@@ -243,8 +243,8 @@ Optional HTMLTokenizer::peek_code_point(ssize_t offset, StopAtInsertionPoin
if (it >= static_cast(m_decoded_input.size()))
return {};
if (stop_at_insertion_point == StopAtInsertionPoint::Yes
- && m_insertion_point.defined
- && it >= m_insertion_point.position) {
+ && m_insertion_point.has_value()
+ && it >= *m_insertion_point) {
return {};
}
return m_decoded_input[it];
@@ -2900,7 +2900,7 @@ void HTMLTokenizer::insert_input_at_insertion_point(StringView input)
Vector new_decoded_input;
new_decoded_input.ensure_capacity(m_decoded_input.size() + input.length());
- auto before = m_decoded_input.span().slice(0, m_insertion_point.position);
+ auto before = m_decoded_input.span().slice(0, *m_insertion_point);
new_decoded_input.append(before.data(), before.size());
auto utf8_to_insert = MUST(String::from_utf8(input));
@@ -2910,11 +2910,11 @@ void HTMLTokenizer::insert_input_at_insertion_point(StringView input)
++code_points_inserted;
}
- auto after = m_decoded_input.span().slice(m_insertion_point.position);
+ auto after = m_decoded_input.span().slice(*m_insertion_point);
new_decoded_input.append(after.data(), after.size());
m_decoded_input = move(new_decoded_input);
- m_insertion_point.position += code_points_inserted;
+ m_insertion_point.value() += code_points_inserted;
}
void HTMLTokenizer::insert_eof()
diff --git a/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.h b/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.h
index 87fd604bd0c..7c1c827da95 100644
--- a/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.h
+++ b/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.h
@@ -135,19 +135,12 @@ public:
void insert_eof();
bool is_eof_inserted();
- bool is_insertion_point_defined() const { return m_insertion_point.defined; }
- bool is_insertion_point_reached()
- {
- return m_insertion_point.defined && m_current_offset >= m_insertion_point.position;
- }
- void undefine_insertion_point() { m_insertion_point.defined = false; }
+ bool is_insertion_point_defined() const { return m_insertion_point.has_value(); }
+ bool is_insertion_point_reached() { return m_insertion_point.has_value() && m_current_offset >= *m_insertion_point; }
+ void undefine_insertion_point() { m_insertion_point = {}; }
void store_insertion_point() { m_old_insertion_point = m_insertion_point; }
- void restore_insertion_point() { m_insertion_point = m_old_insertion_point; }
- void update_insertion_point()
- {
- m_insertion_point.defined = true;
- m_insertion_point.position = m_current_offset;
- }
+ void restore_insertion_point() { m_insertion_point = move(m_old_insertion_point); }
+ void update_insertion_point() { m_insertion_point = m_current_offset; }
// This permanently cuts off the tokenizer input stream.
void abort() { m_aborted = true; }
@@ -199,12 +192,8 @@ private:
String m_source;
Vector m_decoded_input;
- struct InsertionPoint {
- ssize_t position { 0 };
- bool defined { false };
- };
- InsertionPoint m_insertion_point {};
- InsertionPoint m_old_insertion_point {};
+ Optional m_insertion_point;
+ Optional m_old_insertion_point;
ssize_t m_current_offset { 0 };
ssize_t m_prev_offset { 0 };