summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMartin Fischer <martin@push-f.com>2021-04-08 14:53:10 +0200
committerMartin Fischer <martin@push-f.com>2021-04-08 15:40:48 +0200
commit7c1b82bf4bac750b7d7311ee29625e40e4441aef (patch)
tree6c92c62ffc6bc048dbe2f15f1bd6651cecf62706
parent6a37a2432efda67aa681338251a0d47d6336f9e3 (diff)
drop log dependency
-rw-r--r--Cargo.toml3
-rw-r--r--src/tokenizer/char_ref/mod.rs2
-rw-r--r--src/tokenizer/mod.rs7
3 files changed, 1 insertions, 11 deletions
diff --git a/Cargo.toml b/Cargo.toml
index 6c7698a..2021a6c 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -10,9 +10,6 @@ documentation = "https://docs.rs/html5ever"
categories = [ "parser-implementations", "web-programming" ]
edition = "2018"
-[dependencies]
-log = "0.4"
-
[dev-dependencies]
typed-arena = "1.3.0"
criterion = "0.3"
diff --git a/src/tokenizer/char_ref/mod.rs b/src/tokenizer/char_ref/mod.rs
index 336e0df..4c231b2 100644
--- a/src/tokenizer/char_ref/mod.rs
+++ b/src/tokenizer/char_ref/mod.rs
@@ -11,7 +11,6 @@ use super::{TokenSink, Tokenizer};
use crate::util::buffer_queue::BufferQueue;
use crate::util::str::is_ascii_alnum;
-use log::debug;
use std::borrow::Cow::Borrowed;
use std::char::from_u32;
@@ -159,7 +158,6 @@ impl CharRefTokenizer {
return Done;
}
- debug!("char ref tokenizer stepping in state {:?}", self.state);
match self.state {
Begin => self.do_begin(tokenizer, input),
Octothorpe => self.do_octothorpe(tokenizer, input),
diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs
index 8626191..dd5f219 100644
--- a/src/tokenizer/mod.rs
+++ b/src/tokenizer/mod.rs
@@ -23,7 +23,6 @@ use self::char_ref::{CharRef, CharRefTokenizer};
use crate::util::{smallcharset::SmallCharSet, str::lower_ascii_letter};
-use log::debug;
use std::borrow::Cow::{self, Borrowed};
use std::collections::BTreeMap;
use std::default::Default;
@@ -271,7 +270,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
self.emit_error(Cow::Owned(msg));
}
- debug!("got character {}", c);
self.current_char = c;
Some(c)
}
@@ -299,7 +297,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}
let d = input.pop_except_from(set);
- debug!("got characters {:?}", d);
match d {
Some(FromSet(c)) => self.get_preprocessed_char(c, input).map(FromSet),
@@ -598,7 +595,7 @@ macro_rules! shorthand (
// so it's behind a cfg flag.
#[cfg(trace_tokenizer)]
macro_rules! sh_trace ( ( $me:ident : $($cmds:tt)* ) => ({
- debug!(" {:s}", stringify!($($cmds)*));
+ println!(" {:s}", stringify!($($cmds)*));
shorthand!($me:expr : $($cmds)*);
}));
@@ -682,7 +679,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
return self.step_char_ref_tokenizer(input);
}
- debug!("processing in state {:?}", self.state);
match self.state {
//ยง data-state
states::Data => loop {
@@ -1435,7 +1431,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}
fn eof_step(&mut self) -> ProcessResult<Sink::Handle> {
- debug!("processing EOF in state {:?}", self.state);
match self.state {
states::Data |
states::RawData(Rcdata) |