summaryrefslogtreecommitdiff
path: root/src/tokenizer
diff options
context:
space:
mode:
Diffstat (limited to 'src/tokenizer')
-rw-r--r--src/tokenizer/mod.rs73
1 files changed, 40 insertions, 33 deletions
diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs
index 6a0649a..9cdb398 100644
--- a/src/tokenizer/mod.rs
+++ b/src/tokenizer/mod.rs
@@ -2152,7 +2152,7 @@ mod test {
use super::{TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts};
use super::interface::{CharacterTokens, EOFToken, NullCharacterToken, ParseError};
- use super::interface::{EndTag, StartTag, Tag, TagKind};
+ use super::interface::{EndTag, StartTag, Tag};
use super::interface::{TagToken, Token};
use crate::util::buffer_queue::BufferQueue;
@@ -2252,16 +2252,11 @@ mod test {
tok.sink.lines
}
- // Create a tag token
- fn create_tag(token: String, tagkind: TagKind) -> Token {
- let name = token;
- let token = TagToken(Tag {
- kind: tagkind,
- name,
- self_closing: false,
- attrs: vec![],
- });
- token
+ // FUTURE: replace with std::assert_matches once stable
+ macro_rules! assert_matches {
+ ($expr:expr, $($args:tt)+) => {
+ assert!(matches!($expr, $($args)*), "left matches right\n left: {:?}\nright: {}", &$expr, stringify!($($args)*))
+ };
}
#[test]
@@ -2300,18 +2295,24 @@ mod test {
String::from("</b>\n"),
String::from("</a>\n"),
];
- let expected = vec![
- (1, create_tag(String::from("a"), StartTag)),
- (2, CharacterTokens("\n".into())),
- (2, create_tag(String::from("b"), StartTag)),
- (3, CharacterTokens("\n".into())),
- (3, create_tag(String::from("b"), EndTag)),
- (4, CharacterTokens("\n".into())),
- (4, create_tag(String::from("a"), EndTag)),
- (5, CharacterTokens("\n".into())),
- ];
let results = tokenize(vector, opts);
- assert_eq!(results, expected);
+ assert_matches!(
+ &results[..],
+ [
+ (1, Token::TagToken(Tag{name: n1, kind: StartTag, ..})),
+ (2, CharacterTokens(c1)),
+ (2, Token::TagToken(Tag{name: n2, kind: StartTag, ..})),
+ (3, CharacterTokens(c2)),
+ (3, Token::TagToken(Tag{name: n3, kind: EndTag, ..})),
+ (4, CharacterTokens(c3)),
+ (4, Token::TagToken(Tag{name: n4, kind: EndTag, ..})),
+ (5, CharacterTokens(c4)),
+ ] if
+ n1 == "a" && c1 == "\n" &&
+ n2 == "b" && c2 == "\n" &&
+ n3 == "b" && c3 == "\n" &&
+ n4 == "a" && c4 == "\n"
+ );
}
#[test]
@@ -2329,18 +2330,24 @@ mod test {
String::from("</b>\r\n"),
String::from("</a>\r\n"),
];
- let expected = vec![
- (1, create_tag(String::from("a"), StartTag)),
- (2, CharacterTokens("\n".into())),
- (2, create_tag(String::from("b"), StartTag)),
- (3, CharacterTokens("\n".into())),
- (3, create_tag(String::from("b"), EndTag)),
- (4, CharacterTokens("\n".into())),
- (4, create_tag(String::from("a"), EndTag)),
- (5, CharacterTokens("\n".into())),
- ];
let results = tokenize(vector, opts);
- assert_eq!(results, expected);
+ assert_matches!(
+ &results[..],
+ [
+ (1, Token::TagToken(Tag{name: n1, kind: StartTag, ..})),
+ (2, CharacterTokens(c1)),
+ (2, Token::TagToken(Tag{name: n2, kind: StartTag, ..})),
+ (3, CharacterTokens(c2)),
+ (3, Token::TagToken(Tag{name: n3, kind: EndTag, ..})),
+ (4, CharacterTokens(c3)),
+ (4, Token::TagToken(Tag{name: n4, kind: EndTag, ..})),
+ (5, CharacterTokens(c4)),
+ ] if
+ n1 == "a" && c1 == "\n" &&
+ n2 == "b" && c2 == "\n" &&
+ n3 == "b" && c3 == "\n" &&
+ n4 == "a" && c4 == "\n"
+ );
}
#[test]