aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMartin Fischer <martin@push-f.com>2021-11-29 17:40:18 +0100
committerMartin Fischer <martin@push-f.com>2021-11-30 11:22:35 +0100
commit0b9afa22851c80c92c10323a6ce6a4a5fcdb2954 (patch)
tree44ac5a15784c26eb2a48fade6389584839d0cae2
parentd7f353bd52de6d8f647f7dfa12fde10917266ada (diff)
refactor: put line numbers first in test tuples
-rw-r--r--src/tokenizer/mod.rs46
1 files changed, 23 insertions, 23 deletions
diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs
index f5968b2..6a0649a 100644
--- a/src/tokenizer/mod.rs
+++ b/src/tokenizer/mod.rs
@@ -2164,7 +2164,7 @@ mod test {
struct LinesMatch {
current_str: String,
current_str_line: u64,
- lines: Vec<(Token, u64)>,
+ lines: Vec<(u64, Token)>,
}
impl LinesMatch {
@@ -2178,7 +2178,7 @@ mod test {
fn push(&mut self, token: Token, line_number: u64) {
self.finish_str();
- self.lines.push((token, line_number));
+ self.lines.push((line_number, token));
}
fn finish_str(&mut self) {
@@ -2240,7 +2240,7 @@ mod test {
// Take in tokens, process them, and return vector with line
// numbers that each token is on
- fn tokenize(input: Vec<String>, opts: TokenizerOpts) -> Vec<(Token, u64)> {
+ fn tokenize(input: Vec<String>, opts: TokenizerOpts) -> Vec<(u64, Token)> {
let sink = LinesMatch::new();
let mut tok = Tokenizer::new(sink, opts);
let mut buffer = BufferQueue::new();
@@ -2301,14 +2301,14 @@ mod test {
String::from("</a>\n"),
];
let expected = vec![
- (create_tag(String::from("a"), StartTag), 1),
- (CharacterTokens("\n".into()), 2),
- (create_tag(String::from("b"), StartTag), 2),
- (CharacterTokens("\n".into()), 3),
- (create_tag(String::from("b"), EndTag), 3),
- (CharacterTokens("\n".into()), 4),
- (create_tag(String::from("a"), EndTag), 4),
- (CharacterTokens("\n".into()), 5),
+ (1, create_tag(String::from("a"), StartTag)),
+ (2, CharacterTokens("\n".into())),
+ (2, create_tag(String::from("b"), StartTag)),
+ (3, CharacterTokens("\n".into())),
+ (3, create_tag(String::from("b"), EndTag)),
+ (4, CharacterTokens("\n".into())),
+ (4, create_tag(String::from("a"), EndTag)),
+ (5, CharacterTokens("\n".into())),
];
let results = tokenize(vector, opts);
assert_eq!(results, expected);
@@ -2330,14 +2330,14 @@ mod test {
String::from("</a>\r\n"),
];
let expected = vec![
- (create_tag(String::from("a"), StartTag), 1),
- (CharacterTokens("\n".into()), 2),
- (create_tag(String::from("b"), StartTag), 2),
- (CharacterTokens("\n".into()), 3),
- (create_tag(String::from("b"), EndTag), 3),
- (CharacterTokens("\n".into()), 4),
- (create_tag(String::from("a"), EndTag), 4),
- (CharacterTokens("\n".into()), 5),
+ (1, create_tag(String::from("a"), StartTag)),
+ (2, CharacterTokens("\n".into())),
+ (2, create_tag(String::from("b"), StartTag)),
+ (3, CharacterTokens("\n".into())),
+ (3, create_tag(String::from("b"), EndTag)),
+ (4, CharacterTokens("\n".into())),
+ (4, create_tag(String::from("a"), EndTag)),
+ (5, CharacterTokens("\n".into())),
];
let results = tokenize(vector, opts);
assert_eq!(results, expected);
@@ -2348,7 +2348,7 @@ mod test {
fn named_entities() {
let opts = TokenizerOpts::default();
let vector = vec![String::from("&amp;\r\n"), String::from("&aamp;\r\n")];
- let expected = vec![(Token::CharacterTokens("&amp;\n&aamp;\n".into()), 3)];
+ let expected = vec![(3, Token::CharacterTokens("&amp;\n&aamp;\n".into()))];
let results = tokenize(vector, opts);
assert_eq!(results, expected);
}
@@ -2359,9 +2359,9 @@ mod test {
let opts = TokenizerOpts::default();
let vector = vec![String::from("&amp;\r\n"), String::from("&aamp;\r\n")];
let expected = vec![
- (CharacterTokens("&\n".into()), 3),
- (ParseError("Invalid character reference".into()), 3),
- (CharacterTokens("&aamp;\n".into()), 4),
+ (3, CharacterTokens("&\n".into())),
+ (3, ParseError("Invalid character reference".into())),
+ (4, CharacterTokens("&aamp;\n".into())),
];
let results = tokenize(vector, opts);
assert_eq!(results, expected);