diff options
| author | Martin Fischer <martin@push-f.com> | 2023-09-11 18:24:20 +0200 | 
|---|---|---|
| committer | Martin Fischer <martin@push-f.com> | 2023-09-28 10:36:08 +0200 | 
| commit | ab373b495c8ed659e64158b764206dd2eaa35336 (patch) | |
| tree | 2120a826e1fa6c393c33f146df8d2f7885c8275a /integration_tests/tests | |
| parent | 55729998c2217bfdbbccf9832b29d3bcd0315094 (diff) | |
refactor: decouple run_test_inner from DefaultEmitter
Diffstat (limited to 'integration_tests/tests')
| -rw-r--r-- | integration_tests/tests/test_html5lib.rs | 30 | 
1 files changed, 23 insertions, 7 deletions
| diff --git a/integration_tests/tests/test_html5lib.rs b/integration_tests/tests/test_html5lib.rs index 8fedc1a..a682cb3 100644 --- a/integration_tests/tests/test_html5lib.rs +++ b/integration_tests/tests/test_html5lib.rs @@ -1,11 +1,12 @@ -use std::{fs::File, io::BufReader, path::Path}; +use std::{fs::File, io::BufReader, ops::Range, path::Path};  use html5lib_tests::{      parse_tests, Error as TestError, InitialState, Output, Test, Token as TestToken,  };  use html5tokenizer::{ -    offset::NoopOffset, reader::Reader, CdataAction, DefaultEmitter, Event, InternalState, Token, -    Tokenizer, +    offset::{Offset, Position}, +    reader::Reader, +    CdataAction, DefaultEmitter, Emitter, Error, Event, InternalState, Token, Tokenizer,  };  use similar_asserts::assert_eq; @@ -90,14 +91,19 @@ fn run_test(fname: &str, test_i: usize, test: Test) {      }  } -fn run_test_inner<R: Reader>( +fn run_test_inner<R, O, E, T>(      fname: &str,      test_i: usize,      test: &Test,      state: &InitialState, -    mut tokenizer: Tokenizer<R, NoopOffset, DefaultEmitter>, +    mut tokenizer: Tokenizer<R, O, E>,      tokenizer_info: &str, -) { +) where +    R: Reader + Position<O>, +    O: Offset, +    E: Emitter<O> + Iterator<Item = T> + DrainErrors<O>, +    T: Into<Token<O>>, +{      println!(          "==== FILE {}, TEST {}, STATE {:?}, TOKENIZER {} ====",          fname, test_i, state, tokenizer_info, @@ -123,7 +129,7 @@ fn run_test_inner<R: Reader>(                  tokenizer.handle_cdata_open(CdataAction::BogusComment);                  continue;              } -            Event::Token(token) => token, +            Event::Token(token) => token.into(),          };          match token { @@ -170,3 +176,13 @@ fn run_test_inner<R: Reader>(          test.output,      );  } + +trait DrainErrors<O> { +    fn drain_errors(&mut self) -> Box<dyn Iterator<Item = (Error, Range<O>)> + '_>; +} + +impl<O> DrainErrors<O> for DefaultEmitter<O> { +    fn drain_errors(&mut self) -> Box<dyn Iterator<Item = (Error, Range<O>)> + '_> { +        Box::new(self.drain_errors()) +    } +} | 
