From dead23ea380ad5602430fc5398cdf5ec1bb3921c Mon Sep 17 00:00:00 2001 From: Martin Fischer Date: Thu, 8 Apr 2021 15:53:14 +0200 Subject: fix scope, add short module docstring --- benches/html5ever.rs | 2 +- examples/noop-tokenize.rs | 2 +- examples/tokenize.rs | 6 +++--- src/lib.rs | 10 +++++++++- 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/benches/html5ever.rs b/benches/html5ever.rs index 88a1d4b..8e4bbaf 100644 --- a/benches/html5ever.rs +++ b/benches/html5ever.rs @@ -6,7 +6,7 @@ use std::path::PathBuf; use criterion::{black_box, Criterion}; -use html5tokenizer::tokenizer::{BufferQueue, Token, TokenSink, TokenSinkResult, Tokenizer}; +use html5tokenizer::{BufferQueue, Token, TokenSink, TokenSinkResult, Tokenizer}; struct Sink; diff --git a/examples/noop-tokenize.rs b/examples/noop-tokenize.rs index 9557965..305727e 100644 --- a/examples/noop-tokenize.rs +++ b/examples/noop-tokenize.rs @@ -14,7 +14,7 @@ extern crate html5tokenizer; use std::default::Default; use std::io; -use html5tokenizer::tokenizer::{BufferQueue, Token, TokenSink, TokenSinkResult, Tokenizer}; +use html5tokenizer::{BufferQueue, Token, TokenSink, TokenSinkResult, Tokenizer}; use io::Read; struct Sink(Vec); diff --git a/examples/tokenize.rs b/examples/tokenize.rs index e9b0013..dc3b476 100644 --- a/examples/tokenize.rs +++ b/examples/tokenize.rs @@ -12,9 +12,9 @@ extern crate html5tokenizer; use std::default::Default; use std::io; -use html5tokenizer::tokenizer::BufferQueue; -use html5tokenizer::tokenizer::{CharacterTokens, EndTag, NullCharacterToken, StartTag, TagToken}; -use html5tokenizer::tokenizer::{ +use html5tokenizer::BufferQueue; +use html5tokenizer::{CharacterTokens, EndTag, NullCharacterToken, StartTag, TagToken}; +use html5tokenizer::{ ParseError, Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts, }; use io::Read; diff --git a/src/lib.rs b/src/lib.rs index 40cbf8f..5e6a620 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,6 +7,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +/*! +The HTML5 tokenizer from the [html5ever](https://crates.io/crates/html5ever) +crate, repackaged with its dependencies removed. +*/ + #![crate_type = "dylib"] #![cfg_attr(test, deny(warnings))] #![allow(unused_parens)] @@ -29,4 +34,7 @@ mod util { pub mod smallcharset; } -pub mod tokenizer; +mod tokenizer; + +#[doc(inline)] +pub use tokenizer::*; \ No newline at end of file -- cgit v1.2.3