aboutsummaryrefslogtreecommitdiff
path: root/src/tokenizer/mod.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/tokenizer/mod.rs')
-rw-r--r--src/tokenizer/mod.rs7
1 files changed, 1 insertions, 6 deletions
diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs
index 8626191..dd5f219 100644
--- a/src/tokenizer/mod.rs
+++ b/src/tokenizer/mod.rs
@@ -23,7 +23,6 @@ use self::char_ref::{CharRef, CharRefTokenizer};
use crate::util::{smallcharset::SmallCharSet, str::lower_ascii_letter};
-use log::debug;
use std::borrow::Cow::{self, Borrowed};
use std::collections::BTreeMap;
use std::default::Default;
@@ -271,7 +270,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
self.emit_error(Cow::Owned(msg));
}
- debug!("got character {}", c);
self.current_char = c;
Some(c)
}
@@ -299,7 +297,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}
let d = input.pop_except_from(set);
- debug!("got characters {:?}", d);
match d {
Some(FromSet(c)) => self.get_preprocessed_char(c, input).map(FromSet),
@@ -598,7 +595,7 @@ macro_rules! shorthand (
// so it's behind a cfg flag.
#[cfg(trace_tokenizer)]
macro_rules! sh_trace ( ( $me:ident : $($cmds:tt)* ) => ({
- debug!(" {:s}", stringify!($($cmds)*));
+ println!(" {:s}", stringify!($($cmds)*));
shorthand!($me:expr : $($cmds)*);
}));
@@ -682,7 +679,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
return self.step_char_ref_tokenizer(input);
}
- debug!("processing in state {:?}", self.state);
match self.state {
//ยง data-state
states::Data => loop {
@@ -1435,7 +1431,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}
fn eof_step(&mut self) -> ProcessResult<Sink::Handle> {
- debug!("processing EOF in state {:?}", self.state);
match self.state {
states::Data |
states::RawData(Rcdata) |