aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/emitter.rs2
-rw-r--r--src/reader.rs4
-rw-r--r--src/tokenizer.rs36
3 files changed, 18 insertions, 24 deletions
diff --git a/src/emitter.rs b/src/emitter.rs
index be712df..110ed5d 100644
--- a/src/emitter.rs
+++ b/src/emitter.rs
@@ -534,7 +534,7 @@ pub struct Doctype {
}
/// The token type used by default. You can define your own token type by implementing the
-/// [`crate::Emitter`] trait and using [`crate::Tokenizer::new_with_emitter`].
+/// [`crate::Emitter`] trait.
#[derive(Debug, Eq, PartialEq)]
pub enum Token<S> {
/// A HTML start tag.
diff --git a/src/reader.rs b/src/reader.rs
index f756c65..19929d4 100644
--- a/src/reader.rs
+++ b/src/reader.rs
@@ -30,8 +30,8 @@ pub trait Reader {
/// An object that can be converted into a [`crate::Reader`].
///
-/// For example, any utf8-string can be converted into a `StringReader`, such that
-/// `Tokenizer::new("mystring")` and `Tokenizer::new(&String::new("foo"))` work.
+/// For example, any utf8-string can be converted into a `StringReader`.
+// TODO: , such that [give concrete examples of not-yet-implemented parser API] work.
pub trait IntoReader<'a> {
/// The reader type into which this type should be converted.
type Reader: Reader + 'a;
diff --git a/src/tokenizer.rs b/src/tokenizer.rs
index d7db3b6..5abd6ba 100644
--- a/src/tokenizer.rs
+++ b/src/tokenizer.rs
@@ -42,10 +42,21 @@ pub struct Tokenizer<R: Reader, E: Emitter<R> = DefaultEmitter<R, ()>> {
pub(crate) return_state: Option<InternalState>,
}
-impl<R: Reader> Tokenizer<R> {
- /// Create a new tokenizer from some input.
- pub fn new<'a, S: IntoReader<'a, Reader = R>>(input: S) -> Self {
- Tokenizer::<S::Reader>::new_with_emitter(input, DefaultEmitter::default())
+impl<R: Reader, E: Emitter<R>> Tokenizer<R, E> {
+ /// Creates a new tokenizer from some input and an emitter.
+ ///
+ /// TODO: add warning about you needing to do the state switching
+ pub fn new<'a>(reader: impl IntoReader<'a, Reader = R>, emitter: E) -> Self {
+ Tokenizer {
+ reader: reader.into_reader(),
+ emitter,
+ state: InternalState::Data,
+ to_reconsume: Stack2::default(),
+ return_state: None,
+ temporary_buffer: String::new(),
+ character_reference_code: 0,
+ eof: false,
+ }
}
}
@@ -84,23 +95,6 @@ impl From<State> for InternalState {
}
impl<R: Reader, E: Emitter<R>> Tokenizer<R, E> {
- /// Construct a new tokenizer from some input and a custom emitter.
- ///
- /// Use this method over [`Tokenizer::new`] when you want to have more control over string allocation for
- /// tokens.
- pub fn new_with_emitter<'a, S: IntoReader<'a, Reader = R>>(input: S, emitter: E) -> Self {
- Tokenizer {
- eof: false,
- state: InternalState::Data,
- emitter,
- temporary_buffer: String::new(),
- to_reconsume: Stack2::default(),
- reader: input.into_reader(),
- character_reference_code: 0,
- return_state: None,
- }
- }
-
/// Test-internal function to override internal state.
///
/// Only available with the `integration-tests` feature which is not public API.