From 912f45e2b28b1904b3187d7bca39dcecbfd3e601 Mon Sep 17 00:00:00 2001
From: Martin Fischer <martin@push-f.com>
Date: Mon, 29 Nov 2021 08:51:35 +0100
Subject: refactor: move return out of go! to clarify flow

#!/usr/bin/env python3
import re

text = ''

for line in open('src/tokenizer/mod.rs'):
    if '$me.emit_current' in line:
        text += re.sub('return (\$me.emit_current_tag\(\));', lambda x: x.group(1), line)
    elif '$me:ident' in line:
        text += re.sub('return (ProcessResult::Continue);', lambda x: x.group(1), line)
    else:
        text += re.sub('go!\(self: (to|consume_char_ref|emit_tag)', lambda x: 'return ' + x.group(), line)

with open('src/tokenizer/mod.rs', 'w') as f:
    f.write(text)
---
 src/tokenizer/mod.rs | 352 +++++++++++++++++++++++++--------------------------
 1 file changed, 176 insertions(+), 176 deletions(-)

(limited to 'src/tokenizer')

diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs
index 299f612..e54eb46 100644
--- a/src/tokenizer/mod.rs
+++ b/src/tokenizer/mod.rs
@@ -602,17 +602,17 @@ macro_rules! sh_trace ( ( $me:ident : $($cmds:tt)* ) => ( shorthand!($me: $($cmd
 
 // A little DSL for sequencing shorthand actions.
 macro_rules! go (
-    ( $me:ident : to $s:ident                    ) => ({ $me.state = states::$s; return ProcessResult::Continue;           });
-    ( $me:ident : to $s:ident $k1:expr           ) => ({ $me.state = states::$s($k1); return ProcessResult::Continue;      });
-    ( $me:ident : to $s:ident $k1:ident $k2:expr ) => ({ $me.state = states::$s($k1($k2)); return ProcessResult::Continue; });
+    ( $me:ident : to $s:ident                    ) => ({ $me.state = states::$s; ProcessResult::Continue           });
+    ( $me:ident : to $s:ident $k1:expr           ) => ({ $me.state = states::$s($k1); ProcessResult::Continue      });
+    ( $me:ident : to $s:ident $k1:ident $k2:expr ) => ({ $me.state = states::$s($k1($k2)); ProcessResult::Continue });
 
-    ( $me:ident : consume_char_ref             ) => ({ $me.consume_char_ref(None); return ProcessResult::Continue;         });
-    ( $me:ident : consume_char_ref $addnl:expr ) => ({ $me.consume_char_ref(Some($addnl)); return ProcessResult::Continue; });
+    ( $me:ident : consume_char_ref             ) => ({ $me.consume_char_ref(None); ProcessResult::Continue         });
+    ( $me:ident : consume_char_ref $addnl:expr ) => ({ $me.consume_char_ref(Some($addnl)); ProcessResult::Continue });
 
     // We have a default next state after emitting a tag, but the sink can override.
     ( $me:ident : emit_tag $s:ident ) => ({
         $me.state = states::$s;
-        return $me.emit_current_tag();
+        $me.emit_current_tag()
     });
 
     // If nothing else matched, it's a single command
@@ -667,10 +667,10 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         go!(self: emit '\0');
                     }
                     FromSet('&') => {
-                        go!(self: consume_char_ref);
+                        return go!(self: consume_char_ref);
                     }
                     FromSet('<') => {
-                        go!(self: to TagOpen);
+                        return go!(self: to TagOpen);
                     }
                     FromSet(c) => {
                         go!(self: emit c);
@@ -687,10 +687,10 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         go!(self: emit '\u{fffd}');
                     }
                     FromSet('&') => {
-                        go!(self: consume_char_ref);
+                        return go!(self: consume_char_ref);
                     }
                     FromSet('<') => {
-                        go!(self: to RawLessThanSign Rcdata);
+                        return go!(self: to RawLessThanSign Rcdata);
                     }
                     FromSet(c) => {
                         go!(self: emit c);
@@ -707,7 +707,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         go!(self: emit '\u{fffd}');
                     }
                     FromSet('<') => {
-                        go!(self: to RawLessThanSign Rawtext);
+                        return go!(self: to RawLessThanSign Rawtext);
                     }
                     FromSet(c) => {
                         go!(self: emit c);
@@ -724,7 +724,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         go!(self: emit '\u{fffd}');
                     }
                     FromSet('<') => {
-                        go!(self: to RawLessThanSign ScriptData);
+                        return go!(self: to RawLessThanSign ScriptData);
                     }
                     FromSet(c) => {
                         go!(self: emit c);
@@ -742,10 +742,10 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                     }
                     FromSet('-') => {
                         go!(self: emit '-');
-                        go!(self: to ScriptDataEscapedDash Escaped);
+                        return go!(self: to ScriptDataEscapedDash Escaped);
                     }
                     FromSet('<') => {
-                        go!(self: to RawLessThanSign ScriptDataEscaped Escaped);
+                        return go!(self: to RawLessThanSign ScriptDataEscaped Escaped);
                     }
                     FromSet(c) => {
                         go!(self: emit c);
@@ -763,11 +763,11 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                     }
                     FromSet('-') => {
                         go!(self: emit '-');
-                        go!(self: to ScriptDataEscapedDash DoubleEscaped);
+                        return go!(self: to ScriptDataEscapedDash DoubleEscaped);
                     }
                     FromSet('<') => {
                         go!(self: emit '<');
-                        go!(self: to RawLessThanSign ScriptDataEscaped DoubleEscaped);
+                        return go!(self: to RawLessThanSign ScriptDataEscaped DoubleEscaped);
                     }
                     FromSet(c) => {
                         go!(self: emit c);
@@ -795,27 +795,27 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '!' => {
                         go!(self: clear_temp);
-                        go!(self: to MarkupDeclarationOpen);
+                        return go!(self: to MarkupDeclarationOpen);
                     }
                     '/' => {
-                        go!(self: to EndTagOpen);
+                        return go!(self: to EndTagOpen);
                     }
                     '?' => {
                         go!(self: error);
                         go!(self: clear_comment);
                         go!(self: push_comment '?');
-                        go!(self: to BogusComment);
+                        return go!(self: to BogusComment);
                     }
                     c => match lower_ascii_letter(c) {
                         Some(cl) => {
                             go!(self: create_tag StartTag cl);
-                            go!(self: to TagName);
+                            return go!(self: to TagName);
                         }
                         None => {
                             go!(self: error);
                             go!(self: emit '<');
                             self.reconsume = true;
-                            go!(self: to Data);
+                            return go!(self: to Data);
                         }
                     },
                 }
@@ -826,24 +826,24 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '>' => {
                         go!(self: error);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     '\0' => {
                         go!(self: error);
                         go!(self: clear_comment);
                         go!(self: push_comment '\u{fffd}');
-                        go!(self: to BogusComment);
+                        return go!(self: to BogusComment);
                     }
                     c => match lower_ascii_letter(c) {
                         Some(cl) => {
                             go!(self: create_tag EndTag cl);
-                            go!(self: to TagName);
+                            return go!(self: to TagName);
                         }
                         None => {
                             go!(self: error);
                             go!(self: clear_comment);
                             go!(self: push_comment c);
-                            go!(self: to BogusComment);
+                            return go!(self: to BogusComment);
                         }
                     },
                 }
@@ -853,13 +853,13 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::TagName => loop {
                 match get_char!(self, input) {
                     '\t' | '\n' | '\x0C' | ' ' => {
-                        go!(self: to BeforeAttributeName);
+                        return go!(self: to BeforeAttributeName);
                     }
                     '/' => {
-                        go!(self: to SelfClosingStartTag);
+                        return go!(self: to SelfClosingStartTag);
                     }
                     '>' => {
-                        go!(self: emit_tag Data);
+                        return go!(self: emit_tag Data);
                     }
                     '\0' => {
                         go!(self: error);
@@ -876,7 +876,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '/' => {
                         go!(self: clear_temp);
-                        go!(self: to RawEndTagOpen ScriptDataEscaped Escaped);
+                        return go!(self: to RawEndTagOpen ScriptDataEscaped Escaped);
                     }
                     c => match lower_ascii_letter(c) {
                         Some(cl) => {
@@ -884,12 +884,12 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                             go!(self: push_temp cl);
                             go!(self: emit '<');
                             go!(self: emit c);
-                            go!(self: to ScriptDataEscapeStart DoubleEscaped);
+                            return go!(self: to ScriptDataEscapeStart DoubleEscaped);
                         }
                         None => {
                             go!(self: emit '<');
                             self.reconsume = true;
-                            go!(self: to RawData ScriptDataEscaped Escaped);
+                            return go!(self: to RawData ScriptDataEscaped Escaped);
                         }
                     },
                 }
@@ -901,11 +901,11 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                     '/' => {
                         go!(self: clear_temp);
                         go!(self: emit '/');
-                        go!(self: to ScriptDataDoubleEscapeEnd);
+                        return go!(self: to ScriptDataDoubleEscapeEnd);
                     }
                     _ => {
                         self.reconsume = true;
-                        go!(self: to RawData ScriptDataEscaped DoubleEscaped);
+                        return go!(self: to RawData ScriptDataEscaped DoubleEscaped);
                     }
                 }
             },
@@ -916,17 +916,17 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '/' => {
                         go!(self: clear_temp);
-                        go!(self: to RawEndTagOpen kind);
+                        return go!(self: to RawEndTagOpen kind);
                     }
                     '!' if kind == ScriptData => {
                         go!(self: emit '<');
                         go!(self: emit '!');
-                        go!(self: to ScriptDataEscapeStart Escaped);
+                        return go!(self: to ScriptDataEscapeStart Escaped);
                     }
                     _ => {
                         go!(self: emit '<');
                         self.reconsume = true;
-                        go!(self: to RawData kind);
+                        return go!(self: to RawData kind);
                     }
                 }
             },
@@ -938,13 +938,13 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                     Some(cl) => {
                         go!(self: create_tag EndTag cl);
                         go!(self: push_temp c);
-                        go!(self: to RawEndTagName kind);
+                        return go!(self: to RawEndTagName kind);
                     }
                     None => {
                         go!(self: emit '<');
                         go!(self: emit '/');
                         self.reconsume = true;
-                        go!(self: to RawData kind);
+                        return go!(self: to RawData kind);
                     }
                 }
             },
@@ -955,13 +955,13 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 if self.have_appropriate_end_tag() {
                     match c {
                         '\t' | '\n' | '\x0C' | ' ' => {
-                            go!(self: to BeforeAttributeName);
+                            return go!(self: to BeforeAttributeName);
                         }
                         '/' => {
-                            go!(self: to SelfClosingStartTag);
+                            return go!(self: to SelfClosingStartTag);
                         }
                         '>' => {
-                            go!(self: emit_tag Data);
+                            return go!(self: emit_tag Data);
                         }
                         _ => (),
                     }
@@ -978,7 +978,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         go!(self: emit '/');
                         go!(self: emit_temp);
                         self.reconsume = true;
-                        go!(self: to RawData kind);
+                        return go!(self: to RawData kind);
                     }
                 }
             },
@@ -995,7 +995,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         };
                         {
                             go!(self: emit c);
-                            go!(self: to RawData ScriptDataEscaped esc);
+                            return go!(self: to RawData ScriptDataEscaped esc);
                         };
                     }
                     _ => match lower_ascii_letter(c) {
@@ -1005,7 +1005,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         }
                         None => {
                             self.reconsume = true;
-                            go!(self: to RawData ScriptDataEscaped Escaped);
+                            return go!(self: to RawData ScriptDataEscaped Escaped);
                         }
                     },
                 }
@@ -1016,11 +1016,11 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '-' => {
                         go!(self: emit '-');
-                        go!(self: to ScriptDataEscapeStartDash);
+                        return go!(self: to ScriptDataEscapeStartDash);
                     }
                     _ => {
                         self.reconsume = true;
-                        go!(self: to RawData ScriptData);
+                        return go!(self: to RawData ScriptData);
                     }
                 }
             },
@@ -1030,11 +1030,11 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '-' => {
                         go!(self: emit '-');
-                        go!(self: to ScriptDataEscapedDashDash Escaped);
+                        return go!(self: to ScriptDataEscapedDashDash Escaped);
                     }
                     _ => {
                         self.reconsume = true;
-                        go!(self: to RawData ScriptData);
+                        return go!(self: to RawData ScriptData);
                     }
                 }
             },
@@ -1044,7 +1044,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '-' => {
                         go!(self: emit '-');
-                        go!(self: to ScriptDataEscapedDashDash kind);
+                        return go!(self: to ScriptDataEscapedDashDash kind);
                     }
                     '<' => {
                         if kind == DoubleEscaped {
@@ -1053,17 +1053,17 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                             };
                         }
                         {
-                            go!(self: to RawLessThanSign ScriptDataEscaped kind);
+                            return go!(self: to RawLessThanSign ScriptDataEscaped kind);
                         };
                     }
                     '\0' => {
                         go!(self: error);
                         go!(self: emit '\u{fffd}');
-                        go!(self: to RawData ScriptDataEscaped kind);
+                        return go!(self: to RawData ScriptDataEscaped kind);
                     }
                     c => {
                         go!(self: emit c);
-                        go!(self: to RawData ScriptDataEscaped kind);
+                        return go!(self: to RawData ScriptDataEscaped kind);
                     }
                 }
             },
@@ -1081,21 +1081,21 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                             };
                         }
                         {
-                            go!(self: to RawLessThanSign ScriptDataEscaped kind);
+                            return go!(self: to RawLessThanSign ScriptDataEscaped kind);
                         };
                     }
                     '>' => {
                         go!(self: emit '>');
-                        go!(self: to RawData ScriptData);
+                        return go!(self: to RawData ScriptData);
                     }
                     '\0' => {
                         go!(self: error);
                         go!(self: emit '\u{fffd}');
-                        go!(self: to RawData ScriptDataEscaped kind);
+                        return go!(self: to RawData ScriptDataEscaped kind);
                     }
                     c => {
                         go!(self: emit c);
-                        go!(self: to RawData ScriptDataEscaped kind);
+                        return go!(self: to RawData ScriptDataEscaped kind);
                     }
                 }
             },
@@ -1112,7 +1112,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         };
                         {
                             go!(self: emit c);
-                            go!(self: to RawData ScriptDataEscaped esc);
+                            return go!(self: to RawData ScriptDataEscaped esc);
                         };
                     }
                     _ => match lower_ascii_letter(c) {
@@ -1122,7 +1122,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         }
                         None => {
                             self.reconsume = true;
-                            go!(self: to RawData ScriptDataEscaped DoubleEscaped);
+                            return go!(self: to RawData ScriptDataEscaped DoubleEscaped);
                         }
                     },
                 }
@@ -1133,27 +1133,27 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '\t' | '\n' | '\x0C' | ' ' => (),
                     '/' => {
-                        go!(self: to SelfClosingStartTag);
+                        return go!(self: to SelfClosingStartTag);
                     }
                     '>' => {
-                        go!(self: emit_tag Data);
+                        return go!(self: emit_tag Data);
                     }
                     '\0' => {
                         go!(self: error);
                         go!(self: create_attr '\u{fffd}');
-                        go!(self: to AttributeName);
+                        return go!(self: to AttributeName);
                     }
                     c => match lower_ascii_letter(c) {
                         Some(cl) => {
                             go!(self: create_attr cl);
-                            go!(self: to AttributeName);
+                            return go!(self: to AttributeName);
                         }
                         None => {
                             go_match!(self: c,
                             '"' , '\'' , '<' , '=' => error);
                             {
                                 go!(self: create_attr c);
-                                go!(self: to AttributeName);
+                                return go!(self: to AttributeName);
                             };
                         }
                     },
@@ -1164,16 +1164,16 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::AttributeName => loop {
                 match get_char!(self, input) {
                     '\t' | '\n' | '\x0C' | ' ' => {
-                        go!(self: to AfterAttributeName);
+                        return go!(self: to AfterAttributeName);
                     }
                     '/' => {
-                        go!(self: to SelfClosingStartTag);
+                        return go!(self: to SelfClosingStartTag);
                     }
                     '=' => {
-                        go!(self: to BeforeAttributeValue);
+                        return go!(self: to BeforeAttributeValue);
                     }
                     '>' => {
-                        go!(self: emit_tag Data);
+                        return go!(self: emit_tag Data);
                     }
                     '\0' => {
                         go!(self: error);
@@ -1199,30 +1199,30 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '\t' | '\n' | '\x0C' | ' ' => (),
                     '/' => {
-                        go!(self: to SelfClosingStartTag);
+                        return go!(self: to SelfClosingStartTag);
                     }
                     '=' => {
-                        go!(self: to BeforeAttributeValue);
+                        return go!(self: to BeforeAttributeValue);
                     }
                     '>' => {
-                        go!(self: emit_tag Data);
+                        return go!(self: emit_tag Data);
                     }
                     '\0' => {
                         go!(self: error);
                         go!(self: create_attr '\u{fffd}');
-                        go!(self: to AttributeName);
+                        return go!(self: to AttributeName);
                     }
                     c => match lower_ascii_letter(c) {
                         Some(cl) => {
                             go!(self: create_attr cl);
-                            go!(self: to AttributeName);
+                            return go!(self: to AttributeName);
                         }
                         None => {
                             go_match!(self: c,
                             '"' , '\'' , '<' => error);
                             {
                                 go!(self: create_attr c);
-                                go!(self: to AttributeName);
+                                return go!(self: to AttributeName);
                             };
                         }
                     },
@@ -1239,25 +1239,25 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                     }
                     '"' => {
                         go!(self: discard_char input);
-                        go!(self: to AttributeValue DoubleQuoted);
+                        return go!(self: to AttributeValue DoubleQuoted);
                     }
                     '\'' => {
                         go!(self: discard_char input);
-                        go!(self: to AttributeValue SingleQuoted);
+                        return go!(self: to AttributeValue SingleQuoted);
                     }
                     '\0' => {
                         go!(self: discard_char input);
                         go!(self: error);
                         go!(self: push_value '\u{fffd}');
-                        go!(self: to AttributeValue Unquoted);
+                        return go!(self: to AttributeValue Unquoted);
                     }
                     '>' => {
                         go!(self: discard_char input);
                         go!(self: error);
-                        go!(self: emit_tag Data);
+                        return go!(self: emit_tag Data);
                     }
                     _ => {
-                        go!(self: to AttributeValue Unquoted);
+                        return go!(self: to AttributeValue Unquoted);
                     }
                 }
             },
@@ -1266,10 +1266,10 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::AttributeValue(DoubleQuoted) => loop {
                 match pop_except_from!(self, input, small_char_set!('\r' '"' '&' '\0' '\n')) {
                     FromSet('"') => {
-                        go!(self: to AfterAttributeValueQuoted);
+                        return go!(self: to AfterAttributeValueQuoted);
                     }
                     FromSet('&') => {
-                        go!(self: consume_char_ref '"');
+                        return go!(self: consume_char_ref '"');
                     }
                     FromSet('\0') => {
                         go!(self: error);
@@ -1288,10 +1288,10 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::AttributeValue(SingleQuoted) => loop {
                 match pop_except_from!(self, input, small_char_set!('\r' '\'' '&' '\0' '\n')) {
                     FromSet('\'') => {
-                        go!(self: to AfterAttributeValueQuoted);
+                        return go!(self: to AfterAttributeValueQuoted);
                     }
                     FromSet('&') => {
-                        go!(self: consume_char_ref '\'');
+                        return go!(self: consume_char_ref '\'');
                     }
                     FromSet('\0') => {
                         go!(self: error);
@@ -1314,13 +1314,13 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                     small_char_set!('\r' '\t' '\n' '\x0C' ' ' '&' '>' '\0')
                 ) {
                     FromSet('\t') | FromSet('\n') | FromSet('\x0C') | FromSet(' ') => {
-                        go!(self: to BeforeAttributeName);
+                        return go!(self: to BeforeAttributeName);
                     }
                     FromSet('&') => {
-                        go!(self: consume_char_ref '>');
+                        return go!(self: consume_char_ref '>');
                     }
                     FromSet('>') => {
-                        go!(self: emit_tag Data);
+                        return go!(self: emit_tag Data);
                     }
                     FromSet('\0') => {
                         go!(self: error);
@@ -1343,18 +1343,18 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::AfterAttributeValueQuoted => loop {
                 match get_char!(self, input) {
                     '\t' | '\n' | '\x0C' | ' ' => {
-                        go!(self: to BeforeAttributeName);
+                        return go!(self: to BeforeAttributeName);
                     }
                     '/' => {
-                        go!(self: to SelfClosingStartTag);
+                        return go!(self: to SelfClosingStartTag);
                     }
                     '>' => {
-                        go!(self: emit_tag Data);
+                        return go!(self: emit_tag Data);
                     }
                     _ => {
                         go!(self: error);
                         self.reconsume = true;
-                        go!(self: to BeforeAttributeName);
+                        return go!(self: to BeforeAttributeName);
                     }
                 }
             },
@@ -1365,13 +1365,13 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                     '>' => {
                         self.current_tag_self_closing = true;
                         {
-                            go!(self: emit_tag Data);
+                            return go!(self: emit_tag Data);
                         };
                     }
                     _ => {
                         go!(self: error);
                         self.reconsume = true;
-                        go!(self: to BeforeAttributeName);
+                        return go!(self: to BeforeAttributeName);
                     }
                 }
             },
@@ -1380,21 +1380,21 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::CommentStart => loop {
                 match get_char!(self, input) {
                     '-' => {
-                        go!(self: to CommentStartDash);
+                        return go!(self: to CommentStartDash);
                     }
                     '\0' => {
                         go!(self: error);
                         go!(self: push_comment '\u{fffd}');
-                        go!(self: to Comment);
+                        return go!(self: to Comment);
                     }
                     '>' => {
                         go!(self: error);
                         go!(self: emit_comment);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     c => {
                         go!(self: push_comment c);
-                        go!(self: to Comment);
+                        return go!(self: to Comment);
                     }
                 }
             },
@@ -1403,22 +1403,22 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::CommentStartDash => loop {
                 match get_char!(self, input) {
                     '-' => {
-                        go!(self: to CommentEnd);
+                        return go!(self: to CommentEnd);
                     }
                     '\0' => {
                         go!(self: error);
                         go!(self: append_comment "-\u{fffd}");
-                        go!(self: to Comment);
+                        return go!(self: to Comment);
                     }
                     '>' => {
                         go!(self: error);
                         go!(self: emit_comment);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     c => {
                         go!(self: push_comment '-');
                         go!(self: push_comment c);
-                        go!(self: to Comment);
+                        return go!(self: to Comment);
                     }
                 }
             },
@@ -1427,7 +1427,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::Comment => loop {
                 match get_char!(self, input) {
                     '-' => {
-                        go!(self: to CommentEndDash);
+                        return go!(self: to CommentEndDash);
                     }
                     '\0' => {
                         go!(self: error);
@@ -1443,17 +1443,17 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::CommentEndDash => loop {
                 match get_char!(self, input) {
                     '-' => {
-                        go!(self: to CommentEnd);
+                        return go!(self: to CommentEnd);
                     }
                     '\0' => {
                         go!(self: error);
                         go!(self: append_comment "-\u{fffd}");
-                        go!(self: to Comment);
+                        return go!(self: to Comment);
                     }
                     c => {
                         go!(self: push_comment '-');
                         go!(self: push_comment c);
-                        go!(self: to Comment);
+                        return go!(self: to Comment);
                     }
                 }
             },
@@ -1463,16 +1463,16 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '>' => {
                         go!(self: emit_comment);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     '\0' => {
                         go!(self: error);
                         go!(self: append_comment "--\u{fffd}");
-                        go!(self: to Comment);
+                        return go!(self: to Comment);
                     }
                     '!' => {
                         go!(self: error);
-                        go!(self: to CommentEndBang);
+                        return go!(self: to CommentEndBang);
                     }
                     '-' => {
                         go!(self: error);
@@ -1482,7 +1482,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         go!(self: error);
                         go!(self: append_comment "--");
                         go!(self: push_comment c);
-                        go!(self: to Comment);
+                        return go!(self: to Comment);
                     }
                 }
             },
@@ -1492,21 +1492,21 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '-' => {
                         go!(self: append_comment "--!");
-                        go!(self: to CommentEndDash);
+                        return go!(self: to CommentEndDash);
                     }
                     '>' => {
                         go!(self: emit_comment);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     '\0' => {
                         go!(self: error);
                         go!(self: append_comment "--!\u{fffd}");
-                        go!(self: to Comment);
+                        return go!(self: to Comment);
                     }
                     c => {
                         go!(self: append_comment "--!");
                         go!(self: push_comment c);
-                        go!(self: to Comment);
+                        return go!(self: to Comment);
                     }
                 }
             },
@@ -1515,12 +1515,12 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::Doctype => loop {
                 match get_char!(self, input) {
                     '\t' | '\n' | '\x0C' | ' ' => {
-                        go!(self: to BeforeDoctypeName);
+                        return go!(self: to BeforeDoctypeName);
                     }
                     _ => {
                         go!(self: error);
                         self.reconsume = true;
-                        go!(self: to BeforeDoctypeName);
+                        return go!(self: to BeforeDoctypeName);
                     }
                 }
             },
@@ -1533,19 +1533,19 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         go!(self: error);
                         go!(self: create_doctype);
                         go!(self: push_doctype_name '\u{fffd}');
-                        go!(self: to DoctypeName);
+                        return go!(self: to DoctypeName);
                     }
                     '>' => {
                         go!(self: error);
                         go!(self: create_doctype);
                         go!(self: force_quirks);
                         go!(self: emit_doctype);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     c => {
                         go!(self: create_doctype);
                         go!(self: push_doctype_name (c.to_ascii_lowercase()));
-                        go!(self: to DoctypeName);
+                        return go!(self: to DoctypeName);
                     }
                 }
             },
@@ -1555,11 +1555,11 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '\t' | '\n' | '\x0C' | ' ' => {
                         go!(self: clear_temp);
-                        go!(self: to AfterDoctypeName);
+                        return go!(self: to AfterDoctypeName);
                     }
                     '>' => {
                         go!(self: emit_doctype);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     '\0' => {
                         go!(self: error);
@@ -1575,23 +1575,23 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::AfterDoctypeName => loop {
                 if eat!(self, input, "public") {
                     {
-                        go!(self: to AfterDoctypeKeyword Public);
+                        return go!(self: to AfterDoctypeKeyword Public);
                     };
                 } else if eat!(self, input, "system") {
                     {
-                        go!(self: to AfterDoctypeKeyword System);
+                        return go!(self: to AfterDoctypeKeyword System);
                     };
                 } else {
                     match get_char!(self, input) {
                         '\t' | '\n' | '\x0C' | ' ' => (),
                         '>' => {
                             go!(self: emit_doctype);
-                            go!(self: to Data);
+                            return go!(self: to Data);
                         }
                         _ => {
                             go!(self: error);
                             go!(self: force_quirks);
-                            go!(self: to BogusDoctype);
+                            return go!(self: to BogusDoctype);
                         }
                     }
                 }
@@ -1601,28 +1601,28 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::AfterDoctypeKeyword(kind) => loop {
                 match get_char!(self, input) {
                     '\t' | '\n' | '\x0C' | ' ' => {
-                        go!(self: to BeforeDoctypeIdentifier kind);
+                        return go!(self: to BeforeDoctypeIdentifier kind);
                     }
                     '"' => {
                         go!(self: error);
                         go!(self: clear_doctype_id kind);
-                        go!(self: to DoctypeIdentifierDoubleQuoted kind);
+                        return go!(self: to DoctypeIdentifierDoubleQuoted kind);
                     }
                     '\'' => {
                         go!(self: error);
                         go!(self: clear_doctype_id kind);
-                        go!(self: to DoctypeIdentifierSingleQuoted kind);
+                        return go!(self: to DoctypeIdentifierSingleQuoted kind);
                     }
                     '>' => {
                         go!(self: error);
                         go!(self: force_quirks);
                         go!(self: emit_doctype);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     _ => {
                         go!(self: error);
                         go!(self: force_quirks);
-                        go!(self: to BogusDoctype);
+                        return go!(self: to BogusDoctype);
                     }
                 }
             },
@@ -1633,22 +1633,22 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                     '\t' | '\n' | '\x0C' | ' ' => (),
                     '"' => {
                         go!(self: clear_doctype_id kind);
-                        go!(self: to DoctypeIdentifierDoubleQuoted kind);
+                        return go!(self: to DoctypeIdentifierDoubleQuoted kind);
                     }
                     '\'' => {
                         go!(self: clear_doctype_id kind);
-                        go!(self: to DoctypeIdentifierSingleQuoted kind);
+                        return go!(self: to DoctypeIdentifierSingleQuoted kind);
                     }
                     '>' => {
                         go!(self: error);
                         go!(self: force_quirks);
                         go!(self: emit_doctype);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     _ => {
                         go!(self: error);
                         go!(self: force_quirks);
-                        go!(self: to BogusDoctype);
+                        return go!(self: to BogusDoctype);
                     }
                 }
             },
@@ -1657,7 +1657,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::DoctypeIdentifierDoubleQuoted(kind) => loop {
                 match get_char!(self, input) {
                     '"' => {
-                        go!(self: to AfterDoctypeIdentifier kind);
+                        return go!(self: to AfterDoctypeIdentifier kind);
                     }
                     '\0' => {
                         go!(self: error);
@@ -1667,7 +1667,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         go!(self: error);
                         go!(self: force_quirks);
                         go!(self: emit_doctype);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     c => {
                         go!(self: push_doctype_id kind c);
@@ -1679,7 +1679,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::DoctypeIdentifierSingleQuoted(kind) => loop {
                 match get_char!(self, input) {
                     '\'' => {
-                        go!(self: to AfterDoctypeIdentifier kind);
+                        return go!(self: to AfterDoctypeIdentifier kind);
                     }
                     '\0' => {
                         go!(self: error);
@@ -1689,7 +1689,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         go!(self: error);
                         go!(self: force_quirks);
                         go!(self: emit_doctype);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     c => {
                         go!(self: push_doctype_id kind c);
@@ -1701,26 +1701,26 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::AfterDoctypeIdentifier(Public) => loop {
                 match get_char!(self, input) {
                     '\t' | '\n' | '\x0C' | ' ' => {
-                        go!(self: to BetweenDoctypePublicAndSystemIdentifiers);
+                        return go!(self: to BetweenDoctypePublicAndSystemIdentifiers);
                     }
                     '>' => {
                         go!(self: emit_doctype);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     '"' => {
                         go!(self: error);
                         go!(self: clear_doctype_id System);
-                        go!(self: to DoctypeIdentifierDoubleQuoted System);
+                        return go!(self: to DoctypeIdentifierDoubleQuoted System);
                     }
                     '\'' => {
                         go!(self: error);
                         go!(self: clear_doctype_id System);
-                        go!(self: to DoctypeIdentifierSingleQuoted System);
+                        return go!(self: to DoctypeIdentifierSingleQuoted System);
                     }
                     _ => {
                         go!(self: error);
                         go!(self: force_quirks);
-                        go!(self: to BogusDoctype);
+                        return go!(self: to BogusDoctype);
                     }
                 }
             },
@@ -1731,11 +1731,11 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                     '\t' | '\n' | '\x0C' | ' ' => (),
                     '>' => {
                         go!(self: emit_doctype);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     _ => {
                         go!(self: error);
-                        go!(self: to BogusDoctype);
+                        return go!(self: to BogusDoctype);
                     }
                 }
             },
@@ -1746,20 +1746,20 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                     '\t' | '\n' | '\x0C' | ' ' => (),
                     '>' => {
                         go!(self: emit_doctype);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     '"' => {
                         go!(self: clear_doctype_id System);
-                        go!(self: to DoctypeIdentifierDoubleQuoted System);
+                        return go!(self: to DoctypeIdentifierDoubleQuoted System);
                     }
                     '\'' => {
                         go!(self: clear_doctype_id System);
-                        go!(self: to DoctypeIdentifierSingleQuoted System);
+                        return go!(self: to DoctypeIdentifierSingleQuoted System);
                     }
                     _ => {
                         go!(self: error);
                         go!(self: force_quirks);
-                        go!(self: to BogusDoctype);
+                        return go!(self: to BogusDoctype);
                     }
                 }
             },
@@ -1769,7 +1769,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '>' => {
                         go!(self: emit_doctype);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     _ => (),
                 }
@@ -1780,7 +1780,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 match get_char!(self, input) {
                     '>' => {
                         go!(self: emit_comment);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     '\0' => {
                         go!(self: push_comment '\u{fffd}');
@@ -1796,11 +1796,11 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 if eat_exact!(self, input, "--") {
                     {
                         go!(self: clear_comment);
-                        go!(self: to CommentStart);
+                        return go!(self: to CommentStart);
                     };
                 } else if eat!(self, input, "doctype") {
                     {
-                        go!(self: to Doctype);
+                        return go!(self: to Doctype);
                     };
                 } else {
                     if self
@@ -1810,13 +1810,13 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                         if eat_exact!(self, input, "[CDATA[") {
                             {
                                 go!(self: clear_temp);
-                                go!(self: to CdataSection);
+                                return go!(self: to CdataSection);
                             };
                         }
                     }
                     {
                         go!(self: error);
-                        go!(self: to BogusComment);
+                        return go!(self: to BogusComment);
                     };
                 }
             },
@@ -1825,7 +1825,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             states::CdataSection => loop {
                 match get_char!(self, input) {
                     ']' => {
-                        go!(self: to CdataSectionBracket);
+                        return go!(self: to CdataSectionBracket);
                     }
                     '\0' => {
                         go!(self: emit_temp);
@@ -1840,12 +1840,12 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             //ยง cdata-section-bracket
             states::CdataSectionBracket => match get_char!(self, input) {
                 ']' => {
-                    go!(self: to CdataSectionEnd);
+                    return go!(self: to CdataSectionEnd);
                 }
                 _ => {
                     go!(self: push_temp ']');
                     self.reconsume = true;
-                    go!(self: to CdataSection);
+                    return go!(self: to CdataSection);
                 }
             },
 
@@ -1857,13 +1857,13 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                     }
                     '>' => {
                         go!(self: emit_temp);
-                        go!(self: to Data);
+                        return go!(self: to Data);
                     }
                     _ => {
                         go!(self: push_temp ']');
                         go!(self: push_temp ']');
                         self.reconsume = true;
-                        go!(self: to CdataSection);
+                        return go!(self: to CdataSection);
                     }
                 }
             },
@@ -1997,54 +1997,54 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             | states::ScriptDataEscapedDash(_)
             | states::ScriptDataEscapedDashDash(_) => {
                 go!(self: error_eof);
-                go!(self: to Data);
+                return go!(self: to Data);
             }
 
             states::TagOpen => {
                 go!(self: error_eof);
                 go!(self: emit '<');
-                go!(self: to Data);
+                return go!(self: to Data);
             }
 
             states::EndTagOpen => {
                 go!(self: error_eof);
                 go!(self: emit '<');
                 go!(self: emit '/');
-                go!(self: to Data);
+                return go!(self: to Data);
             }
 
             states::RawLessThanSign(ScriptDataEscaped(DoubleEscaped)) => {
-                go!(self: to RawData ScriptDataEscaped DoubleEscaped);
+                return go!(self: to RawData ScriptDataEscaped DoubleEscaped);
             }
 
             states::RawLessThanSign(kind) => {
                 go!(self: emit '<');
-                go!(self: to RawData kind);
+                return go!(self: to RawData kind);
             }
 
             states::RawEndTagOpen(kind) => {
                 go!(self: emit '<');
                 go!(self: emit '/');
-                go!(self: to RawData kind);
+                return go!(self: to RawData kind);
             }
 
             states::RawEndTagName(kind) => {
                 go!(self: emit '<');
                 go!(self: emit '/');
                 go!(self: emit_temp);
-                go!(self: to RawData kind);
+                return go!(self: to RawData kind);
             }
 
             states::ScriptDataEscapeStart(kind) => {
-                go!(self: to RawData ScriptDataEscaped kind);
+                return go!(self: to RawData ScriptDataEscaped kind);
             }
 
             states::ScriptDataEscapeStartDash => {
-                go!(self: to RawData ScriptData);
+                return go!(self: to RawData ScriptData);
             }
 
             states::ScriptDataDoubleEscapeEnd => {
-                go!(self: to RawData ScriptDataEscaped DoubleEscaped);
+                return go!(self: to RawData ScriptDataEscaped DoubleEscaped);
             }
 
             states::CommentStart
@@ -2055,7 +2055,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
             | states::CommentEndBang => {
                 go!(self: error_eof);
                 go!(self: emit_comment);
-                go!(self: to Data);
+                return go!(self: to Data);
             }
 
             states::Doctype | states::BeforeDoctypeName => {
@@ -2063,7 +2063,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 go!(self: create_doctype);
                 go!(self: force_quirks);
                 go!(self: emit_doctype);
-                go!(self: to Data);
+                return go!(self: to Data);
             }
 
             states::DoctypeName
@@ -2077,39 +2077,39 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
                 go!(self: error_eof);
                 go!(self: force_quirks);
                 go!(self: emit_doctype);
-                go!(self: to Data);
+                return go!(self: to Data);
             }
 
             states::BogusDoctype => {
                 go!(self: emit_doctype);
-                go!(self: to Data);
+                return go!(self: to Data);
             }
 
             states::BogusComment => {
                 go!(self: emit_comment);
-                go!(self: to Data);
+                return go!(self: to Data);
             }
 
             states::MarkupDeclarationOpen => {
                 go!(self: error);
-                go!(self: to BogusComment);
+                return go!(self: to BogusComment);
             }
 
             states::CdataSection => {
                 go!(self: emit_temp);
                 go!(self: error_eof);
-                go!(self: to Data);
+                return go!(self: to Data);
             }
 
             states::CdataSectionBracket => {
                 go!(self: push_temp ']');
-                go!(self: to CdataSection);
+                return go!(self: to CdataSection);
             }
 
             states::CdataSectionEnd => {
                 go!(self: push_temp ']');
                 go!(self: push_temp ']');
-                go!(self: to CdataSection);
+                return go!(self: to CdataSection);
             }
         }
     }
-- 
cgit v1.2.3