summaryrefslogtreecommitdiff
path: root/src/tokenizer
diff options
context:
space:
mode:
authorMartin Fischer <martin@push-f.com>2021-11-19 08:21:32 +0100
committerMartin Fischer <martin@push-f.com>2021-11-19 08:21:32 +0100
commit7207abccd9dccb15eb37f43a8f763cac99be14d4 (patch)
tree4f1eacee30913bbae125d4afbe6bf4e17cf896db /src/tokenizer
parent29f4f54fdb173c93f17b8515e7cb459d78bc0068 (diff)
make test sink also emit CharacterTokens & ParseError
Diffstat (limited to 'src/tokenizer')
-rw-r--r--src/tokenizer/mod.rs24
1 files changed, 19 insertions, 5 deletions
diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs
index bcbc6b7..0acdcaf 100644
--- a/src/tokenizer/mod.rs
+++ b/src/tokenizer/mod.rs
@@ -1525,16 +1525,16 @@ mod test {
// if current_line is being updated when process_token is called. The lines
// vector is a collection of the line numbers that each token is on.
struct LinesMatch {
- tokens: Vec<Token>,
current_str: String,
+ current_str_line: u64,
lines: Vec<(Token, u64)>,
}
impl LinesMatch {
fn new() -> LinesMatch {
LinesMatch {
- tokens: vec![],
current_str: String::new(),
+ current_str_line: 0,
lines: vec![],
}
}
@@ -1547,7 +1547,8 @@ mod test {
fn finish_str(&mut self) {
if self.current_str.len() > 0 {
let s = replace(&mut self.current_str, String::new());
- self.tokens.push(CharacterTokens(s));
+ self.push(CharacterTokens(s), self.current_str_line);
+ self.current_str_line = 0;
}
}
}
@@ -1555,6 +1556,10 @@ mod test {
impl TokenSink for LinesMatch {
type Handle = ();
+ fn end(&mut self) {
+ self.finish_str();
+ }
+
fn process_token(
&mut self,
token: Token,
@@ -1562,6 +1567,7 @@ mod test {
) -> TokenSinkResult<Self::Handle> {
match token {
CharacterTokens(b) => {
+ self.current_str_line = line_number;
self.current_str.push_str(&b);
}
@@ -1569,8 +1575,8 @@ mod test {
self.current_str.push('\0');
}
- ParseError(_) => {
- panic!("unexpected parse error");
+ token @ ParseError(_) => {
+ self.push(token, line_number);
}
TagToken(mut t) => {
@@ -1659,9 +1665,13 @@ mod test {
];
let expected = vec![
(create_tag(String::from("a"), StartTag), 1),
+ (CharacterTokens("\n".into()), 2),
(create_tag(String::from("b"), StartTag), 2),
+ (CharacterTokens("\n".into()), 3),
(create_tag(String::from("b"), EndTag), 3),
+ (CharacterTokens("\n".into()), 4),
(create_tag(String::from("a"), EndTag), 4),
+ (CharacterTokens("\n".into()), 5),
];
let results = tokenize(vector, opts);
assert_eq!(results, expected);
@@ -1684,9 +1694,13 @@ mod test {
];
let expected = vec![
(create_tag(String::from("a"), StartTag), 1),
+ (CharacterTokens("\n".into()), 2),
(create_tag(String::from("b"), StartTag), 2),
+ (CharacterTokens("\n".into()), 3),
(create_tag(String::from("b"), EndTag), 3),
+ (CharacterTokens("\n".into()), 4),
(create_tag(String::from("a"), EndTag), 4),
+ (CharacterTokens("\n".into()), 5),
];
let results = tokenize(vector, opts);
assert_eq!(results, expected);