summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rw-r--r--tests/test_spans.rs78
1 files changed, 48 insertions, 30 deletions
diff --git a/tests/test_spans.rs b/tests/test_spans.rs
index 71a6c4b..0e95be0 100644
--- a/tests/test_spans.rs
+++ b/tests/test_spans.rs
@@ -10,7 +10,8 @@ use codespan_reporting::{
use html5tokenizer::{
offset::PosTrackingReader,
reader::{IntoReader, Reader},
- NaiveParser, Token, TracingEmitter,
+ trace::Trace,
+ NaiveParser, Token,
};
use insta::assert_snapshot;
use similar_asserts::assert_eq;
@@ -31,7 +32,7 @@ where
PosTrackingReader::new(
Box::new(reader.into_reader()) as Box<dyn Reader<Error = Infallible>>
),
- TracingEmitter::default(),
+ html5tokenizer::TracingEmitter::default(),
)
}
@@ -76,9 +77,9 @@ fn start_tag_span() {
let html = "<x> <xyz> <xyz > <xyz/>";
let labeler = |parser: Parser| {
let mut labels = Vec::new();
- for token in parser.flatten() {
- if let Token::StartTag(tag) = token {
- labels.push((tag.span, ""));
+ for (_, trace) in parser.flatten() {
+ if let Trace::StartTag(trace) = trace {
+ labels.push((trace.span, ""));
}
}
labels
@@ -94,9 +95,9 @@ fn end_tag_span() {
let html = "</x> </xyz> </xyz > </xyz/>";
let labeler = |parser: Parser| {
let mut labels = Vec::new();
- for token in parser.flatten() {
- if let Token::EndTag(tag) = token {
- labels.push((tag.span, ""));
+ for (_, trace) in parser.flatten() {
+ if let Trace::EndTag(trace) = trace {
+ labels.push((trace.span, ""));
}
}
labels
@@ -112,9 +113,9 @@ fn start_tag_name_span() {
let html = "<x> <xyz> <xyz > <xyz/>";
let labeler = |parser: Parser| {
let mut labels = Vec::new();
- for token in parser.flatten() {
- if let Token::StartTag(tag) = token {
- labels.push((tag.name_span, ""));
+ for (_, trace) in parser.flatten() {
+ if let Trace::StartTag(trace) = trace {
+ labels.push((trace.name_span, ""));
}
}
labels
@@ -130,9 +131,9 @@ fn end_tag_name_span() {
let html = "</x> </xyz> </xyz > </xyz/>";
let labeler = |parser: Parser| {
let mut labels = Vec::new();
- for token in parser.flatten() {
- if let Token::EndTag(tag) = token {
- labels.push((tag.name_span, ""));
+ for (_, trace) in parser.flatten() {
+ if let Trace::EndTag(trace) = trace {
+ labels.push((trace.name_span, ""));
}
}
labels
@@ -148,11 +149,15 @@ fn attribute_name_span() {
let html = "<test x xyz y=VAL xy=VAL z = VAL yzx = VAL>";
let labeler = |parser: Parser| {
let mut labels = Vec::new();
- let Token::StartTag(tag) = parser.flatten().next().unwrap() else {
+ let (Token::StartTag(tag), Trace::StartTag(trace)) = parser.flatten().next().unwrap()
+ else {
panic!("expected start tag")
};
for attr in &tag.attributes {
- labels.push((attr.name_span(), ""));
+ labels.push((
+ trace.attribute_traces[attr.trace_idx().unwrap()].name_span(),
+ "",
+ ));
}
labels
};
@@ -167,11 +172,17 @@ fn attribute_value_span() {
let html = "<test x=unquoted y = unquoted z='single-quoted' zz=\"double-quoted\" empty=''>";
let labeler = |parser: Parser| {
let mut labels = Vec::new();
- let Token::StartTag(tag) = parser.flatten().next().unwrap() else {
+ let (Token::StartTag(tag), Trace::StartTag(trace)) = parser.flatten().next().unwrap()
+ else {
panic!("expected start tag")
};
for attr in &tag.attributes {
- labels.push((attr.value_span().unwrap(), ""));
+ labels.push((
+ trace.attribute_traces[attr.trace_idx().unwrap()]
+ .value_span()
+ .unwrap(),
+ "",
+ ));
}
labels
};
@@ -186,11 +197,17 @@ fn attribute_value_with_char_ref() {
let html = "<test x=&amp; y='&amp;' z=\"&amp;\">";
let labeler = |parser: Parser| {
let mut labels = Vec::new();
- let Token::StartTag(tag) = parser.flatten().next().unwrap() else {
+ let (Token::StartTag(tag), Trace::StartTag(trace)) = parser.flatten().next().unwrap()
+ else {
panic!("expected start tag")
};
for attr in &tag.attributes {
- labels.push((attr.value_span().unwrap(), ""));
+ labels.push((
+ trace.attribute_traces[attr.trace_idx().unwrap()]
+ .value_span()
+ .unwrap(),
+ "",
+ ));
}
labels
};
@@ -224,10 +241,10 @@ fn comment_data_span() {
let mut annotated = String::new();
for case in cases {
let labeler = |parser: Parser| {
- let Token::Comment(comment) = parser.flatten().next().unwrap() else {
+ let (_, Trace::Comment(comment)) = parser.flatten().next().unwrap() else {
panic!("expected comment");
};
- vec![(comment.data_span(), "")]
+ vec![(comment.data_span, "")]
};
annotated.push_str(&test_and_annotate(case, labeler));
@@ -263,10 +280,11 @@ fn comment_data_span() {
"###);
for (idx, case) in cases.iter().enumerate() {
- let Token::Comment(comment) = parser(*case).flatten().next().unwrap() else {
+ let (Token::Comment(data), Trace::Comment(trace)) = parser(*case).flatten().next().unwrap()
+ else {
panic!("expected comment");
};
- assert_eq!(case[comment.data_span()], comment.data, "case {idx}");
+ assert_eq!(case[trace.data_span], data, "case {idx}");
}
}
@@ -280,10 +298,10 @@ fn doctype_span() {
let mut annotated = String::new();
for case in cases {
let labeler = |parser: Parser| {
- let Token::Doctype(doctype) = parser.flatten().next().unwrap() else {
+ let (_, Trace::Doctype(trace)) = parser.flatten().next().unwrap() else {
panic!("expected doctype");
};
- vec![(doctype.span, "")]
+ vec![(trace.span(), "")]
};
annotated.push_str(&test_and_annotate(case, labeler));
}
@@ -304,18 +322,18 @@ fn doctype_id_spans() {
let mut annotated = String::new();
for case in cases {
let labeler = |parser: Parser| {
- let Token::Doctype(doctype) = parser.flatten().next().unwrap() else {
+ let (_, Trace::Doctype(trace)) = parser.flatten().next().unwrap() else {
panic!("expected doctype");
};
let mut labels = Vec::new();
- if let Some(name_span) = doctype.name_span() {
+ if let Some(name_span) = trace.name_span() {
labels.push((name_span, "name"));
}
- if let Some(public_id_span) = doctype.public_id_span() {
+ if let Some(public_id_span) = trace.public_id_span() {
labels.push((public_id_span, "public id"));
}
- if let Some(system_id_span) = doctype.system_id_span() {
+ if let Some(system_id_span) = trace.system_id_span() {
labels.push((system_id_span, "system id"));
}
labels