diff options
| author | Martin Fischer <martin@push-f.com> | 2021-04-08 14:53:10 +0200 | 
|---|---|---|
| committer | Martin Fischer <martin@push-f.com> | 2021-04-08 15:40:48 +0200 | 
| commit | 7c1b82bf4bac750b7d7311ee29625e40e4441aef (patch) | |
| tree | 6c92c62ffc6bc048dbe2f15f1bd6651cecf62706 /src | |
| parent | 6a37a2432efda67aa681338251a0d47d6336f9e3 (diff) | |
drop log dependency
Diffstat (limited to 'src')
| -rw-r--r-- | src/tokenizer/char_ref/mod.rs | 2 | ||||
| -rw-r--r-- | src/tokenizer/mod.rs | 7 | 
2 files changed, 1 insertions, 8 deletions
| diff --git a/src/tokenizer/char_ref/mod.rs b/src/tokenizer/char_ref/mod.rs index 336e0df..4c231b2 100644 --- a/src/tokenizer/char_ref/mod.rs +++ b/src/tokenizer/char_ref/mod.rs @@ -11,7 +11,6 @@ use super::{TokenSink, Tokenizer};  use crate::util::buffer_queue::BufferQueue;  use crate::util::str::is_ascii_alnum; -use log::debug;  use std::borrow::Cow::Borrowed;  use std::char::from_u32; @@ -159,7 +158,6 @@ impl CharRefTokenizer {              return Done;          } -        debug!("char ref tokenizer stepping in state {:?}", self.state);          match self.state {              Begin => self.do_begin(tokenizer, input),              Octothorpe => self.do_octothorpe(tokenizer, input), diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs index 8626191..dd5f219 100644 --- a/src/tokenizer/mod.rs +++ b/src/tokenizer/mod.rs @@ -23,7 +23,6 @@ use self::char_ref::{CharRef, CharRefTokenizer};  use crate::util::{smallcharset::SmallCharSet, str::lower_ascii_letter}; -use log::debug;  use std::borrow::Cow::{self, Borrowed};  use std::collections::BTreeMap;  use std::default::Default; @@ -271,7 +270,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {              self.emit_error(Cow::Owned(msg));          } -        debug!("got character {}", c);          self.current_char = c;          Some(c)      } @@ -299,7 +297,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {          }          let d = input.pop_except_from(set); -        debug!("got characters {:?}", d);          match d {              Some(FromSet(c)) => self.get_preprocessed_char(c, input).map(FromSet), @@ -598,7 +595,7 @@ macro_rules! shorthand (  // so it's behind a cfg flag.  #[cfg(trace_tokenizer)]  macro_rules! sh_trace ( ( $me:ident : $($cmds:tt)* ) => ({ -    debug!("  {:s}", stringify!($($cmds)*)); +    println!("  {:s}", stringify!($($cmds)*));      shorthand!($me:expr : $($cmds)*);  })); @@ -682,7 +679,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {              return self.step_char_ref_tokenizer(input);          } -        debug!("processing in state {:?}", self.state);          match self.state {              //ยง data-state              states::Data => loop { @@ -1435,7 +1431,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {      }      fn eof_step(&mut self) -> ProcessResult<Sink::Handle> { -        debug!("processing EOF in state {:?}", self.state);          match self.state {              states::Data |              states::RawData(Rcdata) | | 
