aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/tokenizer.rs6
-rw-r--r--src/tokenizer/machine.rs32
-rw-r--r--src/tokenizer/machine/utils.rs12
3 files changed, 25 insertions, 25 deletions
diff --git a/src/tokenizer.rs b/src/tokenizer.rs
index 4d3c534..661214c 100644
--- a/src/tokenizer.rs
+++ b/src/tokenizer.rs
@@ -119,9 +119,9 @@ impl From<State> for machine::State {
fn from(state: State) -> Self {
match state {
State::Data => machine::State::Data,
- State::Plaintext => machine::State::PlainText,
- State::Rcdata => machine::State::RcData,
- State::Rawtext => machine::State::RawText,
+ State::Plaintext => machine::State::Plaintext,
+ State::Rcdata => machine::State::Rcdata,
+ State::Rawtext => machine::State::Rawtext,
State::ScriptData => machine::State::ScriptData,
State::ScriptDataEscaped => machine::State::ScriptDataEscaped,
State::ScriptDataDoubleEscaped => machine::State::ScriptDataDoubleEscaped,
diff --git a/src/tokenizer/machine.rs b/src/tokenizer/machine.rs
index faf1ea8..944eb01 100644
--- a/src/tokenizer/machine.rs
+++ b/src/tokenizer/machine.rs
@@ -114,14 +114,14 @@ where
}
None => Ok(ControlToken::Eof),
},
- State::RcData => match slf.read_char()? {
+ State::Rcdata => match slf.read_char()? {
Some('&') => {
- slf.return_state = Some(State::RcData);
+ slf.return_state = Some(State::Rcdata);
slf.state = State::CharacterReference;
Ok(ControlToken::Continue)
}
Some('<') => {
- slf.state = State::RcDataLessThanSign;
+ slf.state = State::RcdataLessThanSign;
Ok(ControlToken::Continue)
}
Some('\0') => {
@@ -135,7 +135,7 @@ where
}
None => Ok(ControlToken::Eof),
},
- State::RawText => match slf.read_char()? {
+ State::Rawtext => match slf.read_char()? {
Some('<') => {
slf.state = State::RawTextLessThanSign;
Ok(ControlToken::Continue)
@@ -167,7 +167,7 @@ where
}
None => Ok(ControlToken::Eof),
},
- State::PlainText => match slf.read_char()? {
+ State::Plaintext => match slf.read_char()? {
Some('\0') => {
slf.emit_error(Error::UnexpectedNullCharacter);
slf.emit_char_for_source_char('\u{fffd}', '\0');
@@ -270,34 +270,34 @@ where
Ok(ControlToken::Eof)
}
},
- State::RcDataLessThanSign => match slf.read_char()? {
+ State::RcdataLessThanSign => match slf.read_char()? {
Some('/') => {
slf.temporary_buffer.clear();
- slf.state = State::RcDataEndTagOpen;
+ slf.state = State::RcdataEndTagOpen;
Ok(ControlToken::Continue)
}
c => {
slf.emit_char('<');
- slf.state = State::RcData;
+ slf.state = State::Rcdata;
slf.unread_char(c);
Ok(ControlToken::Continue)
}
},
- State::RcDataEndTagOpen => match slf.read_char()? {
+ State::RcdataEndTagOpen => match slf.read_char()? {
Some(x) if x.is_ascii_alphabetic() => {
slf.init_end_tag();
- slf.state = State::RcDataEndTagName;
+ slf.state = State::RcdataEndTagName;
slf.unread_char(Some(x));
Ok(ControlToken::Continue)
}
c => {
slf.emit_chars(b"</");
- slf.state = State::RcData;
+ slf.state = State::Rcdata;
slf.unread_char(c);
Ok(ControlToken::Continue)
}
},
- State::RcDataEndTagName => match slf.read_char()? {
+ State::RcdataEndTagName => match slf.read_char()? {
Some(whitespace_pat!()) if slf.current_end_tag_is_appropriate() => {
slf.state = State::BeforeAttributeName;
Ok(ControlToken::Continue)
@@ -320,7 +320,7 @@ where
slf.emit_chars(b"</");
slf.flush_buffer_characters();
- slf.state = State::RcData;
+ slf.state = State::Rcdata;
slf.unread_char(c);
Ok(ControlToken::Continue)
}
@@ -333,7 +333,7 @@ where
}
c => {
slf.emit_char('<');
- slf.state = State::RawText;
+ slf.state = State::Rawtext;
slf.unread_char(c);
Ok(ControlToken::Continue)
}
@@ -347,7 +347,7 @@ where
}
c => {
slf.emit_chars(b"</");
- slf.state = State::RawText;
+ slf.state = State::Rawtext;
slf.unread_char(c);
Ok(ControlToken::Continue)
}
@@ -375,7 +375,7 @@ where
slf.emit_chars(b"</");
slf.flush_buffer_characters();
- slf.state = State::RawText;
+ slf.state = State::Rawtext;
slf.unread_char(c);
Ok(ControlToken::Continue)
}
diff --git a/src/tokenizer/machine/utils.rs b/src/tokenizer/machine/utils.rs
index 4d59282..b38b8d9 100644
--- a/src/tokenizer/machine/utils.rs
+++ b/src/tokenizer/machine/utils.rs
@@ -311,16 +311,16 @@ pub(crate) use noncharacter_pat;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum State {
Data,
- RcData,
- RawText,
+ Rcdata,
+ Rawtext,
ScriptData,
- PlainText,
+ Plaintext,
TagOpen,
EndTagOpen,
TagName,
- RcDataLessThanSign,
- RcDataEndTagOpen,
- RcDataEndTagName,
+ RcdataLessThanSign,
+ RcdataEndTagOpen,
+ RcdataEndTagName,
RawTextLessThanSign,
RawTextEndTagOpen,
RawTextEndTagName,