/*
 * Copyright (c) 2023 fred
 * Licensed under the Apache License,Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 *
 */
import { CharacterReader } from './CharacterReader';
import { Tokeniser } from './Tokeniser';
import hilog from '@ohos.hilog';
import constants from '../util/Constants'
import { EOF } from './token/EOF';
import { CDataCharacter } from './token/CDataCharacter';
import logUtil from '../util/LogUtil';
import wantAgent from '@ohos.wantAgent';

export abstract class TokeniserState {
  static readonly attributeNameCharsSorted = ['\t', '\n', '\f', '\r', ' ', '"', '\'', '/', '<', '=', '>']
  static readonly attributeValueUnquoted = [constants.nullChar, '\t', '\n', '\f', '\r', ' ', '"', '&', '\'', '<', '=', '>', '`']

  /**
   * Handles RawtextEndTagName, ScriptDataEndTagName, and ScriptDataEscapedEndTagName. Same body impl, just
   * different else exit transitions.
   */
  static handleDataEndTag(t: Tokeniser, r: CharacterReader, elseTransition: TokeniserState) {
    if (r.matchesLetter()) {
      let name = r.consumeLetterSequence()
      t.tagPending.appendTagName(name)
      t.dataBuffer.push(name)
      return
    }

    let needsExitTransition = false
    if (t.isAppropriateEndTagToken() && !r.isEmpty()) {
      let c = r.consume();
      logUtil.i("tokeniserState",`handleDataEndTag:${c} `)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          t.transition(TokeniserStates.BeforeAttributeName)
          break
        case '/':
          t.transition(TokeniserStates.SelfClosingStartTag)

          break
        case '>':
          t.emitTagPending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.dataBuffer.push(c)
          needsExitTransition = true
      }
    } else {
      needsExitTransition = true
    }
    if (needsExitTransition) {
      t.emit("</")
      t.emit(t.dataBuffer)
      t.transition(elseTransition)
    }
  }

  static readRawData(t: Tokeniser, r: CharacterReader, current: TokeniserState, advance: TokeniserState) {
    let currentData = r.current()
    logUtil.i('tokeniserState',`readRawData:${currentData}`)
    switch (currentData) {
      case '<':
        t.advanceTransition(advance);
        break;
      case constants.nullChar:
        t.error(current);
        r.advance();
        t.emit(constants.replacementChar);
        break;
      case constants.eof:
        t.emit(new EOF());
        break;
      default:
        let data = r.consumeRawData();
        t.emit(data);
        break;
    }
  }

  static readCharRef(t: Tokeniser, advance: TokeniserState) {
    let c = t.consumeCharacterReference(null, false)
    logUtil.i("tokeniserState",`readCharRef:${c}`)
    if (c == null) {
      t.emit('&')
    } else {
      t.emit(c)
    }
    t.transition(advance)
  }

  static readEndTag(t: Tokeniser, r: CharacterReader, a: TokeniserState, b: TokeniserState) {
    if (r.matchesAsciiAlpha()) {
      t.createTagPending(false)
      t.transition(a)
    } else {
      t.emit("</")
      t.transition(b)
    }
  }

  static handleDataDoubleEscapeTag(t: Tokeniser, r: CharacterReader, primary: TokeniserState, fallback: TokeniserState) {
    if (r.matchesLetter()) {
      let name = r.consumeLetterSequence()
      t.dataBuffer.push(name)
      t.emit(name)
      return
    }

    let c = r.consume();
    logUtil.i("tokeniserState",`handleDataDoubleEscapeTag:${c} `)
    switch (c) {
      case '\t':
      case '\n':
      case '\r':
      case '\f':
      case ' ':
      case '/':
      case '>':
        if (t.dataBuffer.toString() === 'script') {
          t.transition(primary)
        } else {
          t.transition(fallback)
        }
        t.emit(c)
        break
      default:
        r.unconsume()
        t.transition(fallback)
    }
  }

  abstract read(t: Tokeniser, r: CharacterReader)
}
//
export const TokeniserStates: { [state: string]: TokeniserState } = {
  Data: {
    // in data state, gather characters until a character reference or tag is found
    read(t: Tokeniser, r: CharacterReader) {
      let current = r.current()
      logUtil.i("tokeniserState",`Data :${current}`)
      switch (current) {
        case '&':
          t.advanceTransition(TokeniserStates.CharacterReferenceInData)
          break;
        case '<':
          t.advanceTransition(TokeniserStates.TagOpen)
          break;
        case constants.nullChar:
          t.error(this) // NOT replacement character (oddly?)
          t.emit(r.consume())
          break
        case constants.eof:
          t.emit(new EOF())
          break
        default:
          let data = r.consumeData()
          t.emit(data)
          break
      }
    }
  },
  CharacterReferenceInData: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`CharacterReferenceInData`)
      TokeniserState.readCharRef(t, TokeniserStates.Data)
    }
  },
  Rcdata: {
    // handles data in title, textarea etc
    read(t: Tokeniser, r: CharacterReader) {
      let current = r.current()
      logUtil.i("tokeniserState",`Rcdata :${current}`)
      switch (current) {
        case '&':
          t.advanceTransition(TokeniserStates.CharacterReferenceInRcdata)
          break;
        case '<':
          t.advanceTransition(TokeniserStates.RcdataLessthanSign)
          break;
        case constants.nullChar:
          t.error(this)
          r.advance()
          t.emit(constants.replacementChar)
          break
        case constants.eof:
          t.emit(new EOF())
          break
        default:
          let data = r.consumeData()
          t.emit(data)
          break
      }
    }
  },
  CharacterReferenceInRcdata: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`CharacterReferenceInRcdata`)
      TokeniserState.readCharRef(t, TokeniserStates.Rcdata)
    }
  },
  Rawtext: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`Rawtext`)
      TokeniserState.readRawData(t, r, this, TokeniserStates.RawtextLessthanSign)
    }
  },
  ScriptData: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`ScriptData`)
      TokeniserState.readRawData(t, r, this, TokeniserStates.ScriptDataLessthanSign)
    }
  },
  PLAINTEXT: {
    read(t: Tokeniser, r: CharacterReader) {
      let current = r.current()
      logUtil.i("tokeniserState",`PLAINTEXT :${current}`)
      switch (current) {
        case constants.nullChar:
          t.error(this);
          r.advance();
          t.emit(constants.replacementChar);
          break;
        case constants.eof:
          t.emit(new EOF());
          break;
        default:
          let data = r.consumeTo(constants.nullChar.charCodeAt(0));
          t.emit(data);
          break;
      }
    }
  },
  TagOpen: {
    read(t: Tokeniser, r: CharacterReader) {
      let current = r.current()
      logUtil.i("tokeniserState",`TagOpen :${current}`)
      switch (current) {
        case '!':
          t.advanceTransition(TokeniserStates.MarkupDeclarationOpen)
          break
        case '/':
          t.advanceTransition(TokeniserStates.EndTagOpen)
          break
        case '?':
          t.createBogusCommentPending()
          t.transition(TokeniserStates.BogusComment)
          break
        default:
          if (r.matchesAsciiAlpha()) {
            t.createTagPending(true)
            t.transition(TokeniserStates.TagName)
          } else {
            t.error(this)
            t.emit('<') // char that got us here
            t.transition(TokeniserStates.Data)
          }
          break;
      }
    }
  },
  EndTagOpen: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`EndTagOpen`)
      if (r.isEmpty()) {
        logUtil.i("tokeniserState",`EndTagOpen1`)
        t.eofError(this)
        t.emit('</')
        t.transition(TokeniserStates.Data)
      } else if (r.matchesAsciiAlpha()) {
        logUtil.i("tokeniserState",`EndTagOpen2`)
        t.createTagPending(false)
        t.transition(TokeniserStates.TagName)
      } else if (r.matches('>')) {
        logUtil.i("tokeniserState",`EndTagOpen3`)
        t.error(this)
        t.advanceTransition(TokeniserStates.Data)
      } else {
        logUtil.i("tokeniserState",`EndTagOpen4`)
        t.error(this)
        t.createBogusCommentPending()
        // push the / back on that got us here
        t.commentPending.append('/');
        t.transition(TokeniserStates.BogusComment)
      }
    }
  },
  TagName: {
    // from < or </ in data, will have start or end tag pending
    read(t: Tokeniser, r: CharacterReader) {
      // previous TagOpen state did NOT consume, will have a letter char in current
      let tagName = r.consumeTagName()
      logUtil.i("tokeniserState",`TagName :${tagName}`)
      t.tagPending.appendTagName(tagName)
      let c = r.consume()
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          t.transition(TokeniserStates.BeforeAttributeName)
          break
        case '/':
          t.transition(TokeniserStates.SelfClosingStartTag)
          break
        case '<': // NOTE: out of spec, but clear author intent
          r.unconsume()
          t.error(this)
      // intended fall through to next >
        case '>':
          t.emitTagPending()
          t.transition(TokeniserStates.Data)
          break
        case constants.nullChar: // replacement
          t.tagPending.appendTagName(constants.replacementStr)
          break
        case constants.eof: // should emit pending tag?
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        default: // buffer underrun
          t.tagPending.appendTagName(c)
      }
    }
  },
  RcdataLessthanSign: {
    // from < in rcdata
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`RcdataLessthanSign`)
      if (r.matches('/')) {
        t.createTempBuffer()
        t.advanceTransition(TokeniserStates.RCDATAEndTagOpen)
      } else if (r.readFully() && r.matchesAsciiAlpha() && t.appropriateEndTagName() !== undefined &&
      t.appropriateEndTagName() !== null && !r.containsIgnoreCase(t.appropriateEndTagSeq())) {
        // diverge from spec: got a start tag, but there's no appropriate end tag (</title>), so rather than
        // consuming to EOF; break out here
        t.tagPending = t.createTagPending(false).setName(t.appropriateEndTagName())
        t.emitTagPending()
        // straight into TagOpen, as we came from < and looks like we're on a start tag
        t.transition(TokeniserStates.TagOpen)
      } else {
        t.emit("<")
        t.transition(TokeniserStates.Rcdata)
      }
    }
  },
  RCDATAEndTagOpen: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`RCDATAEndTagOpen`)
      if (r.matchesAsciiAlpha()) {
        t.createTagPending(false)
        t.tagPending.appendTagName(r.current())
        t.dataBuffer.push(r.current())
        t.advanceTransition(TokeniserStates.RCDATAEndTagName)
      } else {
        t.emit('</')
        t.transition(TokeniserStates.Rcdata)
      }
    }
  },
  RCDATAEndTagName: {
    read(t: Tokeniser, r: CharacterReader) {
      if (r.matchesAsciiAlpha()) {
        let name = r.consumeLetterSequence();
        t.tagPending.appendTagName(name);
        t.dataBuffer.push(name);
        return;
      }
      let c = r.consume();
      logUtil.i("tokeniserState",`RCDATAEndTagName :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          if (t.isAppropriateEndTagToken()) {
            t.transition(TokeniserStates.BeforeAttributeName)
          } else {
            t.emit('</')
            t.emit(t.dataBuffer)
            r.unconsume()
            t.transition(TokeniserStates.Rcdata)
          }
          break
        case '/':
          if (t.isAppropriateEndTagToken()) {
            t.transition(TokeniserStates.SelfClosingStartTag)
          } else {
            t.emit('</')
            t.emit(t.dataBuffer)
            r.unconsume()
            t.transition(TokeniserStates.Rcdata)
          }
          break;
        case '>':
          if (t.isAppropriateEndTagToken()) {
            t.emitTagPending();
            t.transition(TokeniserStates.Data);
          } else {
            t.emit('</')
            t.emit(t.dataBuffer)
            r.unconsume()
            t.transition(TokeniserStates.Rcdata)
          }
          break;
        default:
          t.emit('</')
          t.emit(t.dataBuffer)
          r.unconsume()
          t.transition(TokeniserStates.Rcdata)
      }
    }
  },
  RawtextLessthanSign: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`RawtextLessthanSign`)
      if (r.matches('/')) {
        t.createTempBuffer()
        t.advanceTransition(TokeniserStates.RawtextEndTagOpen)
      } else {
        t.emit('<')
        t.transition(TokeniserStates.Rawtext)
      }
    }
  },
  RawtextEndTagOpen: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`RawtextEndTagOpen`)
      TokeniserState.readEndTag(t, r, TokeniserStates.RawtextEndTagName, TokeniserStates.Rawtext)
    }
  },
  RawtextEndTagName: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`RawtextEndTagName `)
      TokeniserState.handleDataEndTag(t, r, TokeniserStates.Rawtext)
    }
  },
  ScriptDataLessthanSign: {
    read(t: Tokeniser, r: CharacterReader) {
      let consume = r.consume()
      logUtil.i("tokeniserState",`ScriptDataLessthanSign :${consume}`)
      switch (consume) {
        case '/':
          t.createTempBuffer()
          t.transition(TokeniserStates.ScriptDataEndTagOpen)
          break
        case '!':
          t.emit('<!')
          t.transition(TokeniserStates.ScriptDataEscapeStart)
          break
        case constants.eof:
          t.emit('<')
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        default:
          t.emit('<');
          r.unconsume()
          t.transition(TokeniserStates.ScriptData)
      }
    }
  },
  ScriptDataEndTagOpen: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`ScriptDataEndTagOpen `)
      TokeniserState.readEndTag(t, r, TokeniserStates.ScriptDataEndTagName, TokeniserStates.ScriptData)
    }
  },
  ScriptDataEndTagName: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`ScriptDataEndTagName `)
      TokeniserState.handleDataEndTag(t, r, TokeniserStates.ScriptData)
    }
  },
  ScriptDataEscapeStart: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`ScriptDataEscapeStart `)
      if (r.matches('-')) {
        t.emit('-')
        t.advanceTransition(TokeniserStates.ScriptDataEscapeStartDash)
      } else {
        t.transition(TokeniserStates.ScriptData)
      }
    }
  },
  ScriptDataEscapeStartDash: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`ScriptDataEscapeStartDash `)
      if (r.matches('-')) {
        t.emit('-')
        t.advanceTransition(TokeniserStates.ScriptDataEscapedDashDash)
      } else {
        t.transition(TokeniserStates.ScriptData)
      }
    }
  },
  ScriptDataEscaped: {
    read(t: Tokeniser, r: CharacterReader) {

      if (r.isEmpty()) {
        t.eofError(this)
        t.transition(TokeniserStates.Data)
        return
      }
      let current = r.current()
      logUtil.i("tokeniserState",`ScriptDataEscaped :${current}`)
      switch (current) {
        case '-':
          t.emit('-')
          t.advanceTransition(TokeniserStates.ScriptDataEscapedDash)
          break
        case '<':
          t.advanceTransition(TokeniserStates.ScriptDataEscapedLessthanSign)
          break
        case constants.nullChar:
          t.error(this)
          r.advance()
         t.emit(constants.replacementChar)
          break;
        default:
          let data = r.consumeToAny('-', '<', constants.nullChar)
          t.emit(data)
      }
    }
  },
  ScriptDataEscapedDash: {
    read(t: Tokeniser, r: CharacterReader) {
      if (r.isEmpty()) {
        t.eofError(this)
        t.transition(TokeniserStates.Data)
        return
      }
      let c = r.consume()
      logUtil.i("tokeniserState",`ScriptDataEscapedDash :${c}`)
      switch (c) {
        case '-':
          t.emit(c)
          t.transition(TokeniserStates.ScriptDataEscapedDashDash)
          break
        case '<':
          t.transition(TokeniserStates.ScriptDataEscapedLessthanSign)
          break
        case constants.nullChar:
          t.error(this)
         t.emit(constants.replacementChar)
          t.transition(TokeniserStates.ScriptDataEscaped)
          break
        default:
          t.emit(c)
          t.transition(TokeniserStates.ScriptDataEscaped)
      }
    }
  },
  ScriptDataEscapedDashDash: {
    read(t: Tokeniser, r: CharacterReader) {

      if (r.isEmpty()) {
        t.eofError(this)
        t.transition(TokeniserStates.Data)
        return
      }
      let c = r.consume()
      logUtil.i("tokeniserState",`ScriptDataEscapedDashDash :${c}`)
      switch (c) {
        case '-':
          t.emit(c)
          break
        case '<':
          t.transition(TokeniserStates.ScriptDataEscapedLessthanSign)
          break
        case '>':
          t.emit(c)
          t.transition(TokeniserStates.ScriptData)
          break
        case constants.nullChar:
          t.error(this)
         t.emit(constants.replacementChar)
          t.transition(TokeniserStates.ScriptDataEscaped)
          break
        default:
          t.emit(c)
          t.transition(TokeniserStates.ScriptDataEscaped)
      }
    }
  },
  ScriptDataEscapedLessthanSign: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`ScriptDataEscapedLessthanSign `)
      if (r.matchesAsciiAlpha()) {
        t.createTempBuffer()
        t.dataBuffer.push(r.current())
        t.emit('<')
        t.emit(r.current())
        t.advanceTransition(TokeniserStates.ScriptDataDoubleEscapeStart)
      } else if (r.matches('/')) {
        t.createTempBuffer()
        t.advanceTransition(TokeniserStates.ScriptDataEscapedEndTagOpen)
      } else {
        t.emit('<')
        t.transition(TokeniserStates.ScriptDataEscaped)
      }
    }
  },
  ScriptDataEscapedEndTagOpen: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`ScriptDataEscapedEndTagOpen `)
      if (r.matchesAsciiAlpha()) {
        t.createTagPending(false)
        t.tagPending.appendTagName(r.current())
        t.dataBuffer.push(r.current())
        t.advanceTransition(TokeniserStates.ScriptDataEscapedEndTagName)
      } else {
        t.emit('</')
        t.transition(TokeniserStates.ScriptDataEscaped)
      }
    }
  },
  ScriptDataEscapedEndTagName: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`ScriptDataEscapedEndTagName `)
      TokeniserState.handleDataEndTag(t, r, TokeniserStates.ScriptDataEscaped)
    }
  },
  ScriptDataDoubleEscapeStart: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`ScriptDataDoubleEscapeStart `)
      TokeniserState.handleDataDoubleEscapeTag(t, r, TokeniserStates.ScriptDataDoubleEscaped, TokeniserStates.ScriptDataEscaped)
    }
  },
  ScriptDataDoubleEscaped: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.current()
      logUtil.i("tokeniserState", `ScriptDataDoubleEscaped :${c}`)
      switch (c) {
        case '-':
          t.emit(c)
          t.advanceTransition(TokeniserStates.ScriptDataDoubleEscapedDash)
          break
        case '<':
          t.emit(c)
          t.advanceTransition(TokeniserStates.ScriptDataDoubleEscapedLessthanSign)
          break
        case constants.nullChar:
          t.error(this)
          r.advance()
         t.emit(constants.replacementChar)
          break;
        case constants.eof:
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        default:
          let data = r.consumeToAny('-', '<', constants.nullChar)
          t.emit(data)
      }
    }
  },
  ScriptDataDoubleEscapedDash: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`ScriptDataDoubleEscapedDash :${c}`)
      switch (c) {
        case '-':
          t.emit(c)
          t.transition(TokeniserStates.ScriptDataDoubleEscapedDashDash)
          break
        case '<':
          t.emit(c)
          t.transition(TokeniserStates.ScriptDataDoubleEscapedLessthanSign)
          break
        case constants.nullChar:
          t.error(this)
         t.emit(constants.replacementChar)
          t.transition(TokeniserStates.ScriptDataDoubleEscaped)
          break
        case constants.eof:
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        default:
          t.emit(c)
          t.transition(TokeniserStates.ScriptDataDoubleEscaped)
      }
    }
  },
  ScriptDataDoubleEscapedDashDash: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`ScriptDataDoubleEscapedDashDash :${c}`)
      switch (c) {
        case '-':
          t.emit(c)
          break
        case '<':
          t.emit(c)
          t.transition(TokeniserStates.ScriptDataDoubleEscapedLessthanSign)
          break
        case '>':
          t.emit(c)
          t.transition(TokeniserStates.ScriptData)
          break
        case constants.nullChar:
          t.error(this)
         t.emit(constants.replacementChar)
          t.transition(TokeniserStates.ScriptDataDoubleEscaped)
          break
        case constants.eof:
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        default:
          t.emit(c)
          t.transition(TokeniserStates.ScriptDataDoubleEscaped)
      }
    }
  },
  ScriptDataDoubleEscapedLessthanSign: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`ScriptDataDoubleEscapedLessthanSign`)
      if (r.matches('/')) {
        t.emit('/')
        t.createTempBuffer()
        t.advanceTransition(TokeniserStates.ScriptDataDoubleEscapeEnd)
      } else {
        t.transition(TokeniserStates.ScriptDataDoubleEscaped)
      }
    }
  },
  ScriptDataDoubleEscapeEnd: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`ScriptDataDoubleEscapeEnd `)
      TokeniserState.handleDataDoubleEscapeTag(t, r, TokeniserStates.ScriptDataEscaped, TokeniserStates.ScriptDataDoubleEscaped);
    }
  },
  BeforeAttributeName: {
    // from tagname <xxx
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`BeforeAttributeName :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          break // ignore whitespace
        case '/':
          t.transition(TokeniserStates.SelfClosingStartTag)
          break
        case '<': // NOTE: out of spec, but clear (spec has this as a part of the attribute name)
          r.unconsume()
          t.error(this)
      // intended fall through as if >
        case '>':
          t.emitTagPending()
          t.transition(TokeniserStates.Data)
          break
        case constants.nullChar:
          r.unconsume()
          t.error(this)
          t.tagPending.newAttribute()
          t.transition(TokeniserStates.AttributeName)
          break
        case constants.eof:
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        case '"':
        case '\'':
        case '=':
          t.error(this)
          t.tagPending.newAttribute()
          t.tagPending.appendAttributeName(c)
          t.transition(TokeniserStates.AttributeName)
          break
        default: // A-Z, anything else
          t.tagPending.newAttribute()
          r.unconsume()
          t.transition(TokeniserStates.AttributeName)
      }
    }
  },
  AttributeName: {
    // from before attribute name
    read(t: Tokeniser, r: CharacterReader) {
      // spec deviate - consume and emit nulls in one hit vs stepping
      let name = r.consumeToAnySorted(TokeniserState.attributeNameCharsSorted);
      t.tagPending.appendAttributeName(name)
      let c = r.consume()
      logUtil.i("tokeniserState",`AttributeName :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          t.transition(TokeniserStates.AfterAttributeName)
          break
        case '/':
          t.transition(TokeniserStates.SelfClosingStartTag)
          break
        case '=':
          t.transition(TokeniserStates.BeforeAttributeValue)
          break
        case '>':
          t.emitTagPending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        case '"':
        case '\'':
        case '<':
          t.error(this)
          t.tagPending.appendAttributeName(c)
          break
        default: // buffer underrun
          t.tagPending.appendAttributeName(c)
      }
    }
  },
  AfterAttributeName: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`AfterAttributeName :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
        // ignore
          break
        case '/':
          t.transition(TokeniserStates.SelfClosingStartTag)
          break
        case '=':
          t.transition(TokeniserStates.BeforeAttributeValue)
          break
        case '>':
          t.emitTagPending()
          t.transition(TokeniserStates.Data)
          break
        case constants.nullChar:
          t.error(this)
         t.tagPending.appendAttributeName(constants.replacementChar)
          t.transition(TokeniserStates.AttributeName)
          break
        case constants.eof:
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        case '"':
        case '\'':
        case '<':
          t.error(this)
          t.tagPending.newAttribute()
          t.tagPending.appendAttributeName(c)
          t.transition(TokeniserStates.AttributeName)
          break
        default: // A-Z, anything else
          t.tagPending.newAttribute()
          r.unconsume()
          t.transition(TokeniserStates.AttributeName)
      }
    }
  },
  BeforeAttributeValue: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`BeforeAttributeValue :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
        // ignore
          break
        case '"':
          t.transition(TokeniserStates.AttributeValue_doubleQuoted)
          break
        case '&':
          r.unconsume()
          t.transition(TokeniserStates.AttributeValue_unquoted)
          break
        case '\'':
          t.transition(TokeniserStates.AttributeValue_singleQuoted)
          break
        case constants.nullChar:
          t.error(this)
         t.tagPending.appendAttributeValue(constants.replacementChar)
          t.transition(TokeniserStates.AttributeValue_unquoted)
          break
        case constants.eof:
          t.eofError(this)
          t.emitTagPending()
          t.transition(TokeniserStates.Data)
          break;
        case '>':
          t.error(this)
          t.emitTagPending()
          t.transition(TokeniserStates.Data)
          break
        case '<':
        case '=':
        case '`':
          t.error(this)
          t.tagPending.appendAttributeValue(c)
          t.transition(TokeniserStates.AttributeValue_unquoted)
          break
        default:
          r.unconsume()
          t.transition(TokeniserStates.AttributeValue_unquoted)
      }
    }
  },
  AttributeValue_doubleQuoted: {
    read(t: Tokeniser, r: CharacterReader) {
      let value = r.consumeAttributeQuoted(false)
      if (value.length > 0) {
        t.tagPending.appendAttributeValue(value)
      } else {
        t.tagPending.setEmptyAttributeValue()
      }
      let c = r.consume()
      logUtil.i("tokeniserState",`AttributeValue_doubleQuoted :${c}`)
      switch (c) {
        case '"':
          t.transition(TokeniserStates.AfterAttributeValue_quoted)
          break
        case '&':
          let ref = t.consumeCharacterReference('"', true)
          if (ref != null) {
            t.tagPending.appendAttributeValue(ref)
          } else {
            t.tagPending.appendAttributeValue('&')
          }
          break
        case constants.nullChar:
          t.error(this)
         t.tagPending.appendAttributeValue(constants.replacementChar)
          break
        case constants.eof:
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        default: // hit end of buffer in first read, still in attribute
          t.tagPending.appendAttributeValue(c)
      }
    }
  },
  AttributeValue_singleQuoted: {
    read(t: Tokeniser, r: CharacterReader) {
      let value = r.consumeAttributeQuoted(true)
      if (value.length > 0) {
        t.tagPending.appendAttributeValue(value)
      } else {
        t.tagPending.setEmptyAttributeValue()
      }
      let c = r.consume()
      logUtil.i("tokeniserState",`AttributeValue_singleQuoted :${c}`)
      switch (c) {
        case '\'':
          t.transition(TokeniserStates.AfterAttributeValue_quoted)
          break
        case '&':
          let ref = t.consumeCharacterReference('\'', true)
          if (ref != null) {
            t.tagPending.appendAttributeValue(ref)
          } else {
            t.tagPending.appendAttributeValue('&')
          }
          break
        case constants.nullChar:
          t.error(this)
         t.tagPending.appendAttributeValue(constants.replacementChar)
          break
        case constants.eof:
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        default: // hit end of buffer in first read, still in attribute
          t.tagPending.appendAttributeValue(c)
      }
    }
  },
  AttributeValue_unquoted: {
    read(t: Tokeniser, r: CharacterReader) {
      let value = r.consumeToAnySorted(TokeniserState.attributeValueUnquoted)
      if (value.length > 0) {
        t.tagPending.appendAttributeValue(value)
      }
      let c = r.consume()
      logUtil.i("tokeniserState",`AttributeValue_unquoted :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          t.transition(TokeniserStates.BeforeAttributeName)
          break
        case '&':
          let ref = t.consumeCharacterReference('>', true)
          if (ref != null) {
            t.tagPending.appendAttributeValue(ref)
          } else {
            t.tagPending.appendAttributeValue('&')
          }
          break
        case '>':
          t.emitTagPending()
          t.transition(TokeniserStates.Data)
          break
        case constants.nullChar:
          t.error(this)
         t.tagPending.appendAttributeValue(constants.replacementChar)
          break
        case constants.eof:
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        case '"':
        case '\'':
        case '<':
        case '=':
        case '`':
          t.error(this)
          t.tagPending.appendAttributeValue(c)
          break
        default: // hit end of buffer in first read, still in attribute
          t.tagPending.appendAttributeValue(c)
      }
    }
  },
  AfterAttributeValue_quoted: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`AfterAttributeValue_quoted :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          t.transition(TokeniserStates.BeforeAttributeName)
          break
        case '/':
          t.transition(TokeniserStates.SelfClosingStartTag)
          break
        case '>':
          t.emitTagPending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        default:
          r.unconsume()
          t.error(this)
          t.transition(TokeniserStates.BeforeAttributeName)
      }
    }
  },
  SelfClosingStartTag: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`SelfClosingStartTag :${c}`)
      switch (c) {
        case '>':
          t.tagPending.selfClosing = true
          t.emitTagPending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.transition(TokeniserStates.Data)
          break
        default:
          r.unconsume()
          t.error(this)
          t.transition(TokeniserStates.BeforeAttributeName)
      }
    }
  },
  BogusComment: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState", `BogusComment `)
      // todo: handle bogus comment starting from eof. when does that trigger?
      t.commentPending.append(r.consumeTo('>'))
      // todo: replace nullChar with replaceChar
      let next = r.current()
      if (next === '>' || next === constants.eof) {
        r.consume()
        t.emitCommentPending()
        t.transition(TokeniserStates.Data)
      }
    }
  },
  MarkupDeclarationOpen: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`MarkupDeclarationOpen `)
      if (r.matchConsume('--')) {
        t.createCommentPending()
        t.transition(TokeniserStates.CommentStart)
      } else if (r.matchConsumeIgnoreCase('DOCTYPE')) {
        t.transition(TokeniserStates.Doctype)
      } else if (r.matchConsume('[CDATA[')) {
        // todo: should actually check current namespace, and only non-html allows cdata. until namespace
        // is implemented properly, keep handling as cdata
        //} else if (!t.currentNodeInHtmlNS() && r.matchConsume("[CDATA[")) {
        t.createTempBuffer()
        t.transition(TokeniserStates.CdataSection)
      } else {
        t.error(this)
        t.createBogusCommentPending()
        t.transition(TokeniserStates.BogusComment)
      }
    }
  },
  CommentStart: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`CommentStart :${c}`)
      switch (c) {
        case '-':
          t.transition(TokeniserStates.CommentStartDash)
          break;
        case constants.nullChar:
          t.error(this)
         t.commentPending.append(constants.replacementChar)
          t.transition(TokeniserStates.Comment)
          break
        case '>':
          t.error(this)
          t.emitCommentPending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.emitCommentPending()
          t.transition(TokeniserStates.Data)
          break
        default:
          r.unconsume()
          t.transition(TokeniserStates.Comment)
      }
    }
  },
  CommentStartDash: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState", `CommentStartDash :${c}`)
      switch (c) {
        case '-':
          t.transition(TokeniserStates.CommentEnd)
          break;
        case constants.nullChar:
          t.error(this)
         t.commentPending.append(constants.replacementChar)
          t.transition(TokeniserStates.Comment)
          break
        case '>':
          t.error(this)
          t.emitCommentPending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.emitCommentPending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.commentPending.append(c)
          t.transition(TokeniserStates.Comment)
      }
    }
  },
  Comment: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.current()
      logUtil.i("tokeniserState",`Comment :${c}`)
      switch (c) {
        case '-':
          t.advanceTransition(TokeniserStates.CommentEndDash)
          break
        case constants.nullChar:
          t.error(this)
          r.advance()
         t.commentPending.append(constants.replacementChar)
          break
        case constants.eof:
          t.eofError(this)
          t.emitCommentPending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.commentPending.append(r.consumeToAny('-', constants.nullChar))
      }
    }
  },
  CommentEndDash: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`CommentEndDash :${c}`)
      switch (c) {
        case '-':
          t.transition(TokeniserStates.CommentEnd)
          break
        case constants.nullChar:
          t.error(this)
         t.commentPending.append('-').append(constants.replacementChar)
          t.transition(TokeniserStates.Comment)
          break
        case constants.eof:
          t.eofError(this)
          t.emitCommentPending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.commentPending.append('-').append(c)
          t.transition(TokeniserStates.Comment)
      }
    }
  },
  CommentEnd: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`CommentEnd :${c}`)
      switch (c) {
        case '>':
          t.emitCommentPending()
          t.transition(TokeniserStates.Data)
          break
        case constants.nullChar:
          t.error(this)
         t.commentPending.append("--").append(constants.replacementChar)
          t.transition(TokeniserStates.Comment)
          break
        case '!':
          t.transition(TokeniserStates.CommentEndBang)
          break
        case '-':
          t.commentPending.append('-')
          break
        case constants.eof:
          t.eofError(this)
          t.emitCommentPending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.commentPending.append("--").append(c)
          t.transition(TokeniserStates.Comment)
      }
    }
  },
  CommentEndBang: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`CommentEndBang :${c}`)
      switch (c) {
        case '-':
          t.commentPending.append("--!")
          t.transition(TokeniserStates.CommentEndDash)
          break
        case '>':
          t.emitCommentPending()
          t.transition(TokeniserStates.Data)
          break
        case constants.nullChar:
          t.error(this)
         t.commentPending.append("--!").append(constants.replacementChar)
          t.transition(TokeniserStates.Comment)
          break
        case constants.eof:
          t.eofError(this)
          t.emitCommentPending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.commentPending.append("--!").append(c)
          t.transition(TokeniserStates.Comment)
      }
    }
  },
  Doctype: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`Doctype :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          t.transition(TokeniserStates.BeforeDoctypeName)
          break
        case constants.eof:
          t.eofError(this)
      // note: fall through to > case
        case '>': // catch invalid <!DOCTYPE>
          t.error(this)
          t.createDoctypePending()
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.error(this)
          t.transition(TokeniserStates.BeforeDoctypeName)
      }
    }
  },
  BeforeDoctypeName: {
    read(t: Tokeniser, r: CharacterReader) {
      if (r.matchesAsciiAlpha()) {
        t.createDoctypePending()
        t.transition(TokeniserStates.DoctypeName)
        return
      }
      let c = r.consume()
      logUtil.i("tokeniserState",`BeforeDoctypeName :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          break // ignore whitespace
        case constants.nullChar:
          t.error(this)
          t.createDoctypePending()
         t.doctypePending.name.push(constants.replacementStr)
          t.transition(TokeniserStates.DoctypeName)
          break
        case constants.eof:
          t.eofError(this)
          t.createDoctypePending()
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.createDoctypePending()
          t.doctypePending.name.push(c)
          t.transition(TokeniserStates.DoctypeName)
      }
    }
  },
  DoctypeName: {
    read(t: Tokeniser, r: CharacterReader) {
      if (r.matchesLetter()) {
        let name = r.consumeLetterSequence()
        t.doctypePending.name.push(name)
        return
      }
      let c = r.consume()
      logUtil.i("tokeniserState",`DoctypeName :${c}`)
      switch (c) {
        case '>':
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          t.transition(TokeniserStates.AfterDoctypeName)
          break
        case constants.nullChar:
          t.error(this)
         t.doctypePending.name.push(constants.replacementStr)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.doctypePending.name.push(c)
      }
    }
  },
  AfterDoctypeName: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`AfterDoctypeName`)
      if (r.isEmpty()) {
        t.eofError(this)
        t.doctypePending.forceQuirks = true
        t.emitDoctypePending()
        t.transition(TokeniserStates.Data)
        return
      }
      if (r.matchesAny('\t', '\n', '\r', '\f', ' '))
        r.advance() // ignore whitespace
      else if (r.matches('>')) {
        t.emitDoctypePending()
        t.advanceTransition(TokeniserStates.Data)
      } else if (r.matchConsumeIgnoreCase(constants.PUBLIC_KEY)) {
        t.doctypePending.pubSysKey = constants.PUBLIC_KEY
        t.transition(TokeniserStates.AfterDoctypePublicKeyword)
      } else if (r.matchConsumeIgnoreCase(constants.SYSTEM_KEY)) {
        t.doctypePending.pubSysKey = constants.SYSTEM_KEY
        t.transition(TokeniserStates.AfterDoctypeSystemKeyword)
      } else {
        t.error(this)
        t.doctypePending.forceQuirks = true
        t.advanceTransition(TokeniserStates.BogusDoctype)
      }
    }
  },
  AfterDoctypePublicKeyword: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`AfterDoctypePublicKeyword :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          t.transition(TokeniserStates.BeforeDoctypePublicIdentifier)
          break
        case '"':
          t.error(this)
        // set public id to empty string
          t.transition(TokeniserStates.DoctypePublicIdentifier_doubleQuoted)
          break
        case '\'':
          t.error(this)
        // set public id to empty string
          t.transition(TokeniserStates.DoctypePublicIdentifier_singleQuoted)
          break
        case '>':
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.transition(TokeniserStates.BogusDoctype)
      }
    }
  },
  BeforeDoctypePublicIdentifier: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`BeforeDoctypePublicIdentifier :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          break
        case '"':
        // set public id to empty string
          t.transition(TokeniserStates.DoctypePublicIdentifier_doubleQuoted)
          break
        case '\'':
        // set public id to empty string
          t.transition(TokeniserStates.DoctypePublicIdentifier_singleQuoted)
          break
        case '>':
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.transition(TokeniserStates.BogusDoctype)
      }
    }
  },
  DoctypePublicIdentifier_doubleQuoted: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`DoctypePublicIdentifier_doubleQuoted :${c}`)
      switch (c) {
        case '"':
          t.transition(TokeniserStates.AfterDoctypePublicIdentifier)
          break
        case constants.nullChar:
          t.error(this)
         t.doctypePending.publicIdentifier.push(constants.replacementStr)
          break
        case '>':
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.doctypePending.publicIdentifier.push(c)
      }
    }
  },
  DoctypePublicIdentifier_singleQuoted: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`DoctypePublicIdentifier_singleQuoted :${c}`)
      switch (c) {
        case '\'':
          t.transition(TokeniserStates.AfterDoctypePublicIdentifier)
          break
        case constants.nullChar:
          t.error(this)
         t.doctypePending.publicIdentifier.push(constants.replacementStr)
          break
        case '>':
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.doctypePending.publicIdentifier.push(c)
      }
    }
  },
  AfterDoctypePublicIdentifier: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`AfterDoctypePublicIdentifier :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          t.transition(TokeniserStates.BetweenDoctypePublicAndSystemIdentifiers)
          break
        case '>':
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case '"':
          t.error(this)
        // system id empty
          t.transition(TokeniserStates.DoctypeSystemIdentifier_doubleQuoted)
          break
        case '\'':
          t.error(this)
        // system id empty
          t.transition(TokeniserStates.DoctypeSystemIdentifier_singleQuoted)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.transition(TokeniserStates.BogusDoctype)
      }
    }
  },
  BetweenDoctypePublicAndSystemIdentifiers: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`BetweenDoctypePublicAndSystemIdentifiers :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          break
        case '>':
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case '"':
          t.error(this)
        // system id empty
          t.transition(TokeniserStates.DoctypeSystemIdentifier_doubleQuoted)
          break
        case '\'':
          t.error(this)
        // system id empty
          t.transition(TokeniserStates.DoctypeSystemIdentifier_singleQuoted)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.transition(TokeniserStates.BogusDoctype)
      }
    }
  },
  AfterDoctypeSystemKeyword: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`AfterDoctypeSystemKeyword :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          t.transition(TokeniserStates.BeforeDoctypeSystemIdentifier)
          break
        case '>':
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case '"':
          t.error(this)
        // system id empty
          t.transition(TokeniserStates.DoctypeSystemIdentifier_doubleQuoted)
          break
        case '\'':
          t.error(this)
        // system id empty
          t.transition(TokeniserStates.DoctypeSystemIdentifier_singleQuoted)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
      }
    }
  },
  BeforeDoctypeSystemIdentifier: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`BeforeDoctypeSystemIdentifier :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          break
        case '"':
        // set system id to empty string
          t.transition(TokeniserStates.DoctypeSystemIdentifier_doubleQuoted)
          break
        case '\'':
        // set public id to empty string
          t.transition(TokeniserStates.DoctypeSystemIdentifier_singleQuoted)
          break
        case '>':
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.transition(TokeniserStates.BogusDoctype)
      }
    }
  },
  DoctypeSystemIdentifier_doubleQuoted: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`DoctypeSystemIdentifier_doubleQuoted :${c}`)
      switch (c) {
        case '"':
          t.transition(TokeniserStates.AfterDoctypeSystemIdentifier)
          break
        case constants.nullChar:
          t.error(this)
         t.doctypePending.systemIdentifier.push(constants.replacementStr)
          break
        case '>':
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.doctypePending.systemIdentifier.push(c)
      }
    }
  },
  DoctypeSystemIdentifier_singleQuoted: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`DoctypeSystemIdentifier_singleQuoted :${c}`)
      switch (c) {
        case '\'':
          t.transition(TokeniserStates.AfterDoctypeSystemIdentifier)
          break
        case constants.nullChar:
          t.error(this)
         t.doctypePending.systemIdentifier.push(constants.replacementStr)
          break
        case '>':
          t.error(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.doctypePending.systemIdentifier.push(c)
      }
    }
  },
  AfterDoctypeSystemIdentifier: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`AfterDoctypeSystemIdentifier :${c}`)
      switch (c) {
        case '\t':
        case '\n':
        case '\r':
        case '\f':
        case ' ':
          break
        case '>':
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.eofError(this)
          t.doctypePending.forceQuirks = true
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
          t.error(this)
          t.transition(TokeniserStates.BogusDoctype)
      // NOT force quirks
      }
    }
  },
  BogusDoctype: {
    read(t: Tokeniser, r: CharacterReader) {
      let c = r.consume()
      logUtil.i("tokeniserState",`BogusDoctype :${c}`)
      switch (c) {
        case '>':
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        case constants.eof:
          t.emitDoctypePending()
          t.transition(TokeniserStates.Data)
          break
        default:
        // ignore char
          break
      }
    }
  },
  CdataSection: {
    read(t: Tokeniser, r: CharacterReader) {
      logUtil.i("tokeniserState",`CdataSection`)
      let data = r.consumeTo("]]>")
      t.dataBuffer.push(data)
      if (r.matchConsume("]]>") || r.isEmpty()) {
        t.emit(new CDataCharacter(t.dataBuffer.join('')))
        t.transition(TokeniserStates.Data)
      } // otherwise, buffer underrun, stay in data section
    }
  }

}