diff options
author | Martin Fischer <martin@push-f.com> | 2023-08-11 21:42:18 +0200 |
---|---|---|
committer | Martin Fischer <martin@push-f.com> | 2023-08-19 06:41:55 +0200 |
commit | 36fbad12e7a50fcae3cc5e9e2d2baccd828115a5 (patch) | |
tree | c200a7dc3feb3bf869c99beab01f5dbf9d12ce4c | |
parent | c00d5cdec03364f064a1273a5d806f3600abf09a (diff) |
break!: rename Readable to IntoReader
The trait of the standard library is also
called IntoIterator and not Iterable.
-rw-r--r-- | src/lib.rs | 2 | ||||
-rw-r--r-- | src/reader.rs | 24 | ||||
-rw-r--r-- | src/tokenizer.rs | 8 | ||||
-rw-r--r-- | tests/test_spans.rs | 6 |
4 files changed, 21 insertions, 19 deletions
@@ -19,5 +19,5 @@ pub use utils::State as InternalState; pub use emitter::{Attribute, DefaultEmitter, Doctype, Emitter, EndTag, StartTag, Token}; pub use error::Error; pub use never::Never; -pub use reader::{BufReadReader, Readable, Reader, StringReader}; +pub use reader::{BufReadReader, IntoReader, Reader, StringReader}; pub use tokenizer::{InfallibleTokenizer, State, Tokenizer}; diff --git a/src/reader.rs b/src/reader.rs index eb2b479..6067c47 100644 --- a/src/reader.rs +++ b/src/reader.rs @@ -31,18 +31,18 @@ pub trait Reader { /// /// For example, any utf8-string can be converted into a `StringReader`, such that /// `Tokenizer::new("mystring")` and `Tokenizer::new(&String::new("foo"))` work. -pub trait Readable<'a> { - /// The reader type to which this type should be converted. +pub trait IntoReader<'a> { + /// The reader type into which this type should be converted. type Reader: Reader + 'a; - /// Convert self to some sort of reader. - fn to_reader(self) -> Self::Reader; + /// Convert self into some sort of reader. + fn into_reader(self) -> Self::Reader; } -impl<'a, R: 'a + Reader> Readable<'a> for R { +impl<'a, R: 'a + Reader> IntoReader<'a> for R { type Reader = Self; - fn to_reader(self) -> Self::Reader { + fn into_reader(self) -> Self::Reader { self } } @@ -120,18 +120,18 @@ impl<'a> Reader for StringReader<'a> { } } -impl<'a> Readable<'a> for &'a str { +impl<'a> IntoReader<'a> for &'a str { type Reader = StringReader<'a>; - fn to_reader(self) -> Self::Reader { + fn into_reader(self) -> Self::Reader { StringReader::new(self) } } -impl<'a> Readable<'a> for &'a String { +impl<'a> IntoReader<'a> for &'a String { type Reader = StringReader<'a>; - fn to_reader(self) -> Self::Reader { + fn into_reader(self) -> Self::Reader { StringReader::new(self.as_str()) } } @@ -225,10 +225,10 @@ impl<R: BufRead> Reader for BufReadReader<R> { } } -impl<'a, R: Read + 'a> Readable<'a> for BufReader<R> { +impl<'a, R: Read + 'a> IntoReader<'a> for BufReader<R> { type Reader = BufReadReader<BufReader<R>>; - fn to_reader(self) -> Self::Reader { + fn into_reader(self) -> Self::Reader { BufReadReader::new(self) } } diff --git a/src/tokenizer.rs b/src/tokenizer.rs index ee8f893..b2d4b53 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -4,7 +4,7 @@ use crate::machine; use crate::utils::{ control_pat, noncharacter_pat, surrogate_pat, ControlToken, State as InternalState, }; -use crate::{DefaultEmitter, Emitter, Error, Never, Readable, Reader}; +use crate::{DefaultEmitter, Emitter, Error, IntoReader, Never, Reader}; // this is a stack that can hold 0 to 2 Ts #[derive(Debug, Default, Clone, Copy)] @@ -49,7 +49,7 @@ impl<R: Reader> Tokenizer<R> { /// /// Patches are welcome for providing an efficient implementation over async streams, /// iterators, files, etc, as long as any dependencies come behind featureflags. - pub fn new<'a, S: Readable<'a, Reader = R>>(input: S) -> Self { + pub fn new<'a, S: IntoReader<'a, Reader = R>>(input: S) -> Self { Tokenizer::<S::Reader>::new_with_emitter(input, DefaultEmitter::default()) } } @@ -93,14 +93,14 @@ impl<R: Reader, E: Emitter<R>> Tokenizer<R, E> { /// /// Use this method over [`Tokenizer::new`] when you want to have more control over string allocation for /// tokens. - pub fn new_with_emitter<'a, S: Readable<'a, Reader = R>>(input: S, emitter: E) -> Self { + pub fn new_with_emitter<'a, S: IntoReader<'a, Reader = R>>(input: S, emitter: E) -> Self { Tokenizer { eof: false, state: InternalState::Data, emitter, temporary_buffer: String::new(), to_reconsume: Stack2::default(), - reader: input.to_reader(), + reader: input.into_reader(), character_reference_code: 0, return_state: None, } diff --git a/tests/test_spans.rs b/tests/test_spans.rs index 93330db..61b70ce 100644 --- a/tests/test_spans.rs +++ b/tests/test_spans.rs @@ -6,7 +6,9 @@ use codespan_reporting::{ files::SimpleFiles, term::{self, termcolor::Buffer}, }; -use html5tokenizer::{spans::PosTracker, DefaultEmitter, Readable, StringReader, Token, Tokenizer}; +use html5tokenizer::{ + spans::PosTracker, DefaultEmitter, IntoReader, StringReader, Token, Tokenizer, +}; #[test] fn test() { @@ -18,7 +20,7 @@ fn test() { for token in Tokenizer::new_with_emitter( PosTracker { - reader: html.to_reader(), + reader: html.into_reader(), position: 0, }, DefaultEmitter::<PosTracker<StringReader>, Range<usize>>::default(), |