summaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
Diffstat (limited to 'examples')
-rw-r--r--examples/noop-tokenize.rs4
-rw-r--r--examples/tokenize.rs4
2 files changed, 2 insertions, 6 deletions
diff --git a/examples/noop-tokenize.rs b/examples/noop-tokenize.rs
index 305727e..22a2192 100644
--- a/examples/noop-tokenize.rs
+++ b/examples/noop-tokenize.rs
@@ -20,9 +20,7 @@ use io::Read;
struct Sink(Vec<Token>);
impl TokenSink for Sink {
- type Handle = ();
-
- fn process_token(&mut self, token: Token, _line_number: u64) -> TokenSinkResult<()> {
+ fn process_token(&mut self, token: Token, _line_number: u64) -> TokenSinkResult {
// Don't use the token, but make sure we don't get
// optimized out entirely.
self.0.push(token);
diff --git a/examples/tokenize.rs b/examples/tokenize.rs
index d70bfff..8728a18 100644
--- a/examples/tokenize.rs
+++ b/examples/tokenize.rs
@@ -40,9 +40,7 @@ impl TokenPrinter {
}
impl TokenSink for TokenPrinter {
- type Handle = ();
-
- fn process_token(&mut self, token: Token, _line_number: u64) -> TokenSinkResult<()> {
+ fn process_token(&mut self, token: Token, _line_number: u64) -> TokenSinkResult {
match token {
CharacterTokens(b) => {
for c in b.chars() {