summaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
Diffstat (limited to 'examples')
-rw-r--r--examples/noop-tokenize.rs8
-rw-r--r--examples/tokenize.rs8
2 files changed, 8 insertions, 8 deletions
diff --git a/examples/noop-tokenize.rs b/examples/noop-tokenize.rs
index d6c62f1..323c429 100644
--- a/examples/noop-tokenize.rs
+++ b/examples/noop-tokenize.rs
@@ -14,8 +14,8 @@ extern crate html5ever;
use std::default::Default;
use std::io;
-use html5ever::tendril::*;
use html5ever::tokenizer::{BufferQueue, Token, TokenSink, TokenSinkResult, Tokenizer};
+use io::Read;
struct Sink(Vec<Token>);
@@ -31,10 +31,10 @@ impl TokenSink for Sink {
}
fn main() {
- let mut chunk = ByteTendril::new();
- io::stdin().read_to_tendril(&mut chunk).unwrap();
+ let mut chunk = Vec::new();
+ io::stdin().read_to_end(&mut chunk).unwrap();
let mut input = BufferQueue::new();
- input.push_back(chunk.try_reinterpret().unwrap());
+ input.push_back(std::str::from_utf8(&chunk).unwrap().to_string());
let mut tok = Tokenizer::new(Sink(Vec::new()), Default::default());
let _ = tok.feed(&mut input);
diff --git a/examples/tokenize.rs b/examples/tokenize.rs
index c422f0c..943513a 100644
--- a/examples/tokenize.rs
+++ b/examples/tokenize.rs
@@ -12,12 +12,12 @@ extern crate html5ever;
use std::default::Default;
use std::io;
-use html5ever::tendril::*;
use html5ever::tokenizer::BufferQueue;
use html5ever::tokenizer::{CharacterTokens, EndTag, NullCharacterToken, StartTag, TagToken};
use html5ever::tokenizer::{
ParseError, Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts,
};
+use io::Read;
#[derive(Copy, Clone)]
struct TokenPrinter {
@@ -84,10 +84,10 @@ impl TokenSink for TokenPrinter {
fn main() {
let mut sink = TokenPrinter { in_char_run: false };
- let mut chunk = ByteTendril::new();
- io::stdin().read_to_tendril(&mut chunk).unwrap();
+ let mut chunk = Vec::new();
+ io::stdin().read_to_end(&mut chunk).unwrap();
let mut input = BufferQueue::new();
- input.push_back(chunk.try_reinterpret().unwrap());
+ input.push_back(std::str::from_utf8(&chunk).unwrap().to_string());
let mut tok = Tokenizer::new(
sink,