summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMartin Fischer <martin@push-f.com>2021-11-29 18:04:57 +0100
committerMartin Fischer <martin@push-f.com>2021-11-30 11:22:35 +0100
commit8b6e4ba30354d466b56bf80f1bb58c371cfdf7c9 (patch)
tree8ba7c7bacb52b7e2c76438abc583535d4c4c692a /src
parent78995cda172159bce38a9c846d2a57a22a00e375 (diff)
remove pointless associated type from TokenSink trait
Diffstat (limited to 'src')
-rw-r--r--src/tokenizer/interface.rs8
-rw-r--r--src/tokenizer/mod.rs47
2 files changed, 20 insertions, 35 deletions
diff --git a/src/tokenizer/interface.rs b/src/tokenizer/interface.rs
index 53b06ae..2c6cc38 100644
--- a/src/tokenizer/interface.rs
+++ b/src/tokenizer/interface.rs
@@ -94,19 +94,17 @@ pub enum Token {
#[derive(Debug, PartialEq)]
#[must_use]
-pub enum TokenSinkResult<Handle> {
+pub enum TokenSinkResult {
Continue,
- Script(Handle),
+ Break,
Plaintext,
RawData(states::RawKind),
}
/// Types which can receive tokens from the tokenizer.
pub trait TokenSink {
- type Handle;
-
/// Process a token.
- fn process_token(&mut self, token: Token, line_number: u64) -> TokenSinkResult<Self::Handle>;
+ fn process_token(&mut self, token: Token, line_number: u64) -> TokenSinkResult;
// Signal sink that tokenization reached the end.
fn end(&mut self) {}
diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs
index 9cdb398..0a0f7af 100644
--- a/src/tokenizer/mod.rs
+++ b/src/tokenizer/mod.rs
@@ -37,15 +37,15 @@ mod char_ref;
mod interface;
mod states;
-pub enum ProcessResult<Handle> {
+pub enum ProcessResult {
Suspend,
- Script(Handle),
+ Break,
}
#[must_use]
-pub enum TokenizerResult<Handle> {
+pub enum TokenizerResult {
Done,
- Script(Handle),
+ Break,
}
fn option_push(opt_str: &mut Option<String>, c: char) {
@@ -197,7 +197,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}
/// Feed an input string into the tokenizer.
- pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<Sink::Handle> {
+ pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult {
if input.is_empty() {
return TokenizerResult::Done;
}
@@ -215,7 +215,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
self.run(input)
}
- fn process_token(&mut self, token: Token) -> TokenSinkResult<Sink::Handle> {
+ fn process_token(&mut self, token: Token) -> TokenSinkResult {
if self.opts.profile {
let (ret, dt) = time!(self.sink.process_token(token, self.current_line));
self.time_in_sink += dt;
@@ -325,7 +325,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}
/// Run the state machine for as long as we can.
- fn run(&mut self, input: &mut BufferQueue) -> TokenizerResult<Sink::Handle> {
+ fn run(&mut self, input: &mut BufferQueue) -> TokenizerResult {
if self.opts.profile {
loop {
let state = self.state;
@@ -346,9 +346,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
match run {
ControlFlow::Continue(()) => (),
ControlFlow::Break(ProcessResult::Suspend) => break,
- ControlFlow::Break(ProcessResult::Script(node)) => {
- return TokenizerResult::Script(node)
- }
+ ControlFlow::Break(ProcessResult::Break) => return TokenizerResult::Break,
}
}
} else {
@@ -356,9 +354,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
match self.step(input) {
ControlFlow::Continue(()) => (),
ControlFlow::Break(ProcessResult::Suspend) => break,
- ControlFlow::Break(ProcessResult::Script(node)) => {
- return TokenizerResult::Script(node)
- }
+ ControlFlow::Break(ProcessResult::Break) => return TokenizerResult::Break,
}
}
}
@@ -398,7 +394,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
self.process_token_and_continue(CharacterTokens(b));
}
- fn emit_current_tag(&mut self) -> ControlFlow<ProcessResult<Sink::Handle>> {
+ fn emit_current_tag(&mut self) -> ControlFlow<ProcessResult> {
self.finish_attribute();
let name = self.current_tag_name.clone();
@@ -445,9 +441,9 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
self.state = states::Plaintext;
ControlFlow::Continue(())
}
- TokenSinkResult::Script(node) => {
+ TokenSinkResult::Break => {
self.state = states::Data;
- ControlFlow::Break(ProcessResult::Script(node))
+ ControlFlow::Break(ProcessResult::Break)
}
TokenSinkResult::RawData(kind) => {
self.state = states::RawData(kind);
@@ -679,7 +675,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
// Return true if we should be immediately re-invoked
// (this just simplifies control flow vs. break / continue).
#[allow(clippy::never_loop)]
- fn step(&mut self, input: &mut BufferQueue) -> ControlFlow<ProcessResult<Sink::Handle>> {
+ fn step(&mut self, input: &mut BufferQueue) -> ControlFlow<ProcessResult> {
if self.char_ref_tokenizer.is_some() {
return self.step_char_ref_tokenizer(input);
}
@@ -1897,10 +1893,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}
}
- fn step_char_ref_tokenizer(
- &mut self,
- input: &mut BufferQueue,
- ) -> ControlFlow<ProcessResult<Sink::Handle>> {
+ fn step_char_ref_tokenizer(&mut self, input: &mut BufferQueue) -> ControlFlow<ProcessResult> {
// FIXME HACK: Take and replace the tokenizer so we don't
// double-mut-borrow self. This is why it's boxed.
let mut tok = self.char_ref_tokenizer.take().unwrap();
@@ -1973,7 +1966,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
match self.eof_step() {
ControlFlow::Continue(()) => (),
ControlFlow::Break(ProcessResult::Suspend) => break,
- ControlFlow::Break(ProcessResult::Script(_)) => unreachable!(),
+ ControlFlow::Break(ProcessResult::Break) => unreachable!(),
}
}
@@ -2003,7 +1996,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}
}
- fn eof_step(&mut self) -> ControlFlow<ProcessResult<Sink::Handle>> {
+ fn eof_step(&mut self) -> ControlFlow<ProcessResult> {
match self.state {
states::Data
| states::RawData(Rcdata)
@@ -2191,17 +2184,11 @@ mod test {
}
impl TokenSink for LinesMatch {
- type Handle = ();
-
fn end(&mut self) {
self.finish_str();
}
- fn process_token(
- &mut self,
- token: Token,
- line_number: u64,
- ) -> TokenSinkResult<Self::Handle> {
+ fn process_token(&mut self, token: Token, line_number: u64) -> TokenSinkResult {
match token {
CharacterTokens(b) => {
self.current_str_line = line_number;