'use strict'; var Tokenizer = require('../tokenization/tokenizer'), OpenElementStack = require('./open_element_stack'), FormattingElementList = require('./formatting_element_list'), LocationInfoMixin = require('./location_info_mixin'), DefaultTreeAdapter = require('../tree_adapters/default'), Doctype = require('../common/doctype'), ForeignContent = require('../common/foreign_content'), Utils = require('../common/utils'), UNICODE = require('../common/unicode'), HTML = require('../common/html'); //Aliases var $ = HTML.TAG_NAMES, NS = HTML.NAMESPACES, ATTRS = HTML.ATTRS; //Default options var DEFAULT_OPTIONS = { decodeHtmlEntities: true, locationInfo: false }; //Misc constants var SEARCHABLE_INDEX_DEFAULT_PROMPT = 'This is a searchable index. Enter search keywords: ', SEARCHABLE_INDEX_INPUT_NAME = 'isindex', HIDDEN_INPUT_TYPE = 'hidden'; //Adoption agency loops iteration count var AA_OUTER_LOOP_ITER = 8, AA_INNER_LOOP_ITER = 3; //Insertion modes var INITIAL_MODE = 'INITIAL_MODE', BEFORE_HTML_MODE = 'BEFORE_HTML_MODE', BEFORE_HEAD_MODE = 'BEFORE_HEAD_MODE', IN_HEAD_MODE = 'IN_HEAD_MODE', AFTER_HEAD_MODE = 'AFTER_HEAD_MODE', IN_BODY_MODE = 'IN_BODY_MODE', TEXT_MODE = 'TEXT_MODE', IN_TABLE_MODE = 'IN_TABLE_MODE', IN_TABLE_TEXT_MODE = 'IN_TABLE_TEXT_MODE', IN_CAPTION_MODE = 'IN_CAPTION_MODE', IN_COLUMN_GROUP_MODE = 'IN_COLUMN_GROUP_MODE', IN_TABLE_BODY_MODE = 'IN_TABLE_BODY_MODE', IN_ROW_MODE = 'IN_ROW_MODE', IN_CELL_MODE = 'IN_CELL_MODE', IN_SELECT_MODE = 'IN_SELECT_MODE', IN_SELECT_IN_TABLE_MODE = 'IN_SELECT_IN_TABLE_MODE', IN_TEMPLATE_MODE = 'IN_TEMPLATE_MODE', AFTER_BODY_MODE = 'AFTER_BODY_MODE', IN_FRAMESET_MODE = 'IN_FRAMESET_MODE', AFTER_FRAMESET_MODE = 'AFTER_FRAMESET_MODE', AFTER_AFTER_BODY_MODE = 'AFTER_AFTER_BODY_MODE', AFTER_AFTER_FRAMESET_MODE = 'AFTER_AFTER_FRAMESET_MODE'; //Insertion mode reset map var INSERTION_MODE_RESET_MAP = {}; INSERTION_MODE_RESET_MAP[$.TR] = IN_ROW_MODE; INSERTION_MODE_RESET_MAP[$.TBODY] = INSERTION_MODE_RESET_MAP[$.THEAD] = INSERTION_MODE_RESET_MAP[$.TFOOT] = IN_TABLE_BODY_MODE; INSERTION_MODE_RESET_MAP[$.CAPTION] = IN_CAPTION_MODE; INSERTION_MODE_RESET_MAP[$.COLGROUP] = IN_COLUMN_GROUP_MODE; INSERTION_MODE_RESET_MAP[$.TABLE] = IN_TABLE_MODE; INSERTION_MODE_RESET_MAP[$.BODY] = IN_BODY_MODE; INSERTION_MODE_RESET_MAP[$.FRAMESET] = IN_FRAMESET_MODE; //Template insertion mode switch map var TEMPLATE_INSERTION_MODE_SWITCH_MAP = {}; TEMPLATE_INSERTION_MODE_SWITCH_MAP[$.CAPTION] = TEMPLATE_INSERTION_MODE_SWITCH_MAP[$.COLGROUP] = TEMPLATE_INSERTION_MODE_SWITCH_MAP[$.TBODY] = TEMPLATE_INSERTION_MODE_SWITCH_MAP[$.TFOOT] = TEMPLATE_INSERTION_MODE_SWITCH_MAP[$.THEAD] = IN_TABLE_MODE; TEMPLATE_INSERTION_MODE_SWITCH_MAP[$.COL] = IN_COLUMN_GROUP_MODE; TEMPLATE_INSERTION_MODE_SWITCH_MAP[$.TR] = IN_TABLE_BODY_MODE; TEMPLATE_INSERTION_MODE_SWITCH_MAP[$.TD] = TEMPLATE_INSERTION_MODE_SWITCH_MAP[$.TH] = IN_ROW_MODE; //Token handlers map for insertion modes var _ = {}; _[INITIAL_MODE] = {}; _[INITIAL_MODE][Tokenizer.CHARACTER_TOKEN] = _[INITIAL_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = tokenInInitialMode; _[INITIAL_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = ignoreToken; _[INITIAL_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[INITIAL_MODE][Tokenizer.DOCTYPE_TOKEN] = doctypeInInitialMode; _[INITIAL_MODE][Tokenizer.START_TAG_TOKEN] = _[INITIAL_MODE][Tokenizer.END_TAG_TOKEN] = _[INITIAL_MODE][Tokenizer.EOF_TOKEN] = tokenInInitialMode; _[BEFORE_HTML_MODE] = {}; _[BEFORE_HTML_MODE][Tokenizer.CHARACTER_TOKEN] = _[BEFORE_HTML_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = tokenBeforeHtml; _[BEFORE_HTML_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = ignoreToken; _[BEFORE_HTML_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[BEFORE_HTML_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[BEFORE_HTML_MODE][Tokenizer.START_TAG_TOKEN] = startTagBeforeHtml; _[BEFORE_HTML_MODE][Tokenizer.END_TAG_TOKEN] = endTagBeforeHtml; _[BEFORE_HTML_MODE][Tokenizer.EOF_TOKEN] = tokenBeforeHtml; _[BEFORE_HEAD_MODE] = {}; _[BEFORE_HEAD_MODE][Tokenizer.CHARACTER_TOKEN] = _[BEFORE_HEAD_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = tokenBeforeHead; _[BEFORE_HEAD_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = ignoreToken; _[BEFORE_HEAD_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[BEFORE_HEAD_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[BEFORE_HEAD_MODE][Tokenizer.START_TAG_TOKEN] = startTagBeforeHead; _[BEFORE_HEAD_MODE][Tokenizer.END_TAG_TOKEN] = endTagBeforeHead; _[BEFORE_HEAD_MODE][Tokenizer.EOF_TOKEN] = tokenBeforeHead; _[IN_HEAD_MODE] = {}; _[IN_HEAD_MODE][Tokenizer.CHARACTER_TOKEN] = _[IN_HEAD_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = tokenInHead; _[IN_HEAD_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = insertCharacters; _[IN_HEAD_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_HEAD_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_HEAD_MODE][Tokenizer.START_TAG_TOKEN] = startTagInHead; _[IN_HEAD_MODE][Tokenizer.END_TAG_TOKEN] = endTagInHead; _[IN_HEAD_MODE][Tokenizer.EOF_TOKEN] = tokenInHead; _[AFTER_HEAD_MODE] = {}; _[AFTER_HEAD_MODE][Tokenizer.CHARACTER_TOKEN] = _[AFTER_HEAD_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = tokenAfterHead; _[AFTER_HEAD_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = insertCharacters; _[AFTER_HEAD_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[AFTER_HEAD_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[AFTER_HEAD_MODE][Tokenizer.START_TAG_TOKEN] = startTagAfterHead; _[AFTER_HEAD_MODE][Tokenizer.END_TAG_TOKEN] = endTagAfterHead; _[AFTER_HEAD_MODE][Tokenizer.EOF_TOKEN] = tokenAfterHead; _[IN_BODY_MODE] = {}; _[IN_BODY_MODE][Tokenizer.CHARACTER_TOKEN] = characterInBody; _[IN_BODY_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = ignoreToken; _[IN_BODY_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = whitespaceCharacterInBody; _[IN_BODY_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_BODY_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_BODY_MODE][Tokenizer.START_TAG_TOKEN] = startTagInBody; _[IN_BODY_MODE][Tokenizer.END_TAG_TOKEN] = endTagInBody; _[IN_BODY_MODE][Tokenizer.EOF_TOKEN] = eofInBody; _[TEXT_MODE] = {}; _[TEXT_MODE][Tokenizer.CHARACTER_TOKEN] = _[TEXT_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = _[TEXT_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = insertCharacters; _[TEXT_MODE][Tokenizer.COMMENT_TOKEN] = _[TEXT_MODE][Tokenizer.DOCTYPE_TOKEN] = _[TEXT_MODE][Tokenizer.START_TAG_TOKEN] = ignoreToken; _[TEXT_MODE][Tokenizer.END_TAG_TOKEN] = endTagInText; _[TEXT_MODE][Tokenizer.EOF_TOKEN] = eofInText; _[IN_TABLE_MODE] = {}; _[IN_TABLE_MODE][Tokenizer.CHARACTER_TOKEN] = _[IN_TABLE_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = _[IN_TABLE_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = characterInTable; _[IN_TABLE_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_TABLE_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_TABLE_MODE][Tokenizer.START_TAG_TOKEN] = startTagInTable; _[IN_TABLE_MODE][Tokenizer.END_TAG_TOKEN] = endTagInTable; _[IN_TABLE_MODE][Tokenizer.EOF_TOKEN] = eofInBody; _[IN_TABLE_TEXT_MODE] = {}; _[IN_TABLE_TEXT_MODE][Tokenizer.CHARACTER_TOKEN] = characterInTableText; _[IN_TABLE_TEXT_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = ignoreToken; _[IN_TABLE_TEXT_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = whitespaceCharacterInTableText; _[IN_TABLE_TEXT_MODE][Tokenizer.COMMENT_TOKEN] = _[IN_TABLE_TEXT_MODE][Tokenizer.DOCTYPE_TOKEN] = _[IN_TABLE_TEXT_MODE][Tokenizer.START_TAG_TOKEN] = _[IN_TABLE_TEXT_MODE][Tokenizer.END_TAG_TOKEN] = _[IN_TABLE_TEXT_MODE][Tokenizer.EOF_TOKEN] = tokenInTableText; _[IN_CAPTION_MODE] = {}; _[IN_CAPTION_MODE][Tokenizer.CHARACTER_TOKEN] = characterInBody; _[IN_CAPTION_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = ignoreToken; _[IN_CAPTION_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = whitespaceCharacterInBody; _[IN_CAPTION_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_CAPTION_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_CAPTION_MODE][Tokenizer.START_TAG_TOKEN] = startTagInCaption; _[IN_CAPTION_MODE][Tokenizer.END_TAG_TOKEN] = endTagInCaption; _[IN_CAPTION_MODE][Tokenizer.EOF_TOKEN] = eofInBody; _[IN_COLUMN_GROUP_MODE] = {}; _[IN_COLUMN_GROUP_MODE][Tokenizer.CHARACTER_TOKEN] = _[IN_COLUMN_GROUP_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = tokenInColumnGroup; _[IN_COLUMN_GROUP_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = insertCharacters; _[IN_COLUMN_GROUP_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_COLUMN_GROUP_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_COLUMN_GROUP_MODE][Tokenizer.START_TAG_TOKEN] = startTagInColumnGroup; _[IN_COLUMN_GROUP_MODE][Tokenizer.END_TAG_TOKEN] = endTagInColumnGroup; _[IN_COLUMN_GROUP_MODE][Tokenizer.EOF_TOKEN] = eofInBody; _[IN_TABLE_BODY_MODE] = {}; _[IN_TABLE_BODY_MODE][Tokenizer.CHARACTER_TOKEN] = _[IN_TABLE_BODY_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = _[IN_TABLE_BODY_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = characterInTable; _[IN_TABLE_BODY_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_TABLE_BODY_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_TABLE_BODY_MODE][Tokenizer.START_TAG_TOKEN] = startTagInTableBody; _[IN_TABLE_BODY_MODE][Tokenizer.END_TAG_TOKEN] = endTagInTableBody; _[IN_TABLE_BODY_MODE][Tokenizer.EOF_TOKEN] = eofInBody; _[IN_ROW_MODE] = {}; _[IN_ROW_MODE][Tokenizer.CHARACTER_TOKEN] = _[IN_ROW_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = _[IN_ROW_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = characterInTable; _[IN_ROW_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_ROW_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_ROW_MODE][Tokenizer.START_TAG_TOKEN] = startTagInRow; _[IN_ROW_MODE][Tokenizer.END_TAG_TOKEN] = endTagInRow; _[IN_ROW_MODE][Tokenizer.EOF_TOKEN] = eofInBody; _[IN_CELL_MODE] = {}; _[IN_CELL_MODE][Tokenizer.CHARACTER_TOKEN] = characterInBody; _[IN_CELL_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = ignoreToken; _[IN_CELL_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = whitespaceCharacterInBody; _[IN_CELL_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_CELL_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_CELL_MODE][Tokenizer.START_TAG_TOKEN] = startTagInCell; _[IN_CELL_MODE][Tokenizer.END_TAG_TOKEN] = endTagInCell; _[IN_CELL_MODE][Tokenizer.EOF_TOKEN] = eofInBody; _[IN_SELECT_MODE] = {}; _[IN_SELECT_MODE][Tokenizer.CHARACTER_TOKEN] = insertCharacters; _[IN_SELECT_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = ignoreToken; _[IN_SELECT_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = insertCharacters; _[IN_SELECT_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_SELECT_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_SELECT_MODE][Tokenizer.START_TAG_TOKEN] = startTagInSelect; _[IN_SELECT_MODE][Tokenizer.END_TAG_TOKEN] = endTagInSelect; _[IN_SELECT_MODE][Tokenizer.EOF_TOKEN] = eofInBody; _[IN_SELECT_IN_TABLE_MODE] = {}; _[IN_SELECT_IN_TABLE_MODE][Tokenizer.CHARACTER_TOKEN] = insertCharacters; _[IN_SELECT_IN_TABLE_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = ignoreToken; _[IN_SELECT_IN_TABLE_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = insertCharacters; _[IN_SELECT_IN_TABLE_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_SELECT_IN_TABLE_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_SELECT_IN_TABLE_MODE][Tokenizer.START_TAG_TOKEN] = startTagInSelectInTable; _[IN_SELECT_IN_TABLE_MODE][Tokenizer.END_TAG_TOKEN] = endTagInSelectInTable; _[IN_SELECT_IN_TABLE_MODE][Tokenizer.EOF_TOKEN] = eofInBody; _[IN_TEMPLATE_MODE] = {}; _[IN_TEMPLATE_MODE][Tokenizer.CHARACTER_TOKEN] = characterInBody; _[IN_TEMPLATE_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = ignoreToken; _[IN_TEMPLATE_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = whitespaceCharacterInBody; _[IN_TEMPLATE_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_TEMPLATE_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_TEMPLATE_MODE][Tokenizer.START_TAG_TOKEN] = startTagInTemplate; _[IN_TEMPLATE_MODE][Tokenizer.END_TAG_TOKEN] = endTagInTemplate; _[IN_TEMPLATE_MODE][Tokenizer.EOF_TOKEN] = eofInTemplate; _[AFTER_BODY_MODE] = {}; _[AFTER_BODY_MODE][Tokenizer.CHARACTER_TOKEN] = _[AFTER_BODY_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = tokenAfterBody; _[AFTER_BODY_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = whitespaceCharacterInBody; _[AFTER_BODY_MODE][Tokenizer.COMMENT_TOKEN] = appendCommentToRootHtmlElement; _[AFTER_BODY_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[AFTER_BODY_MODE][Tokenizer.START_TAG_TOKEN] = startTagAfterBody; _[AFTER_BODY_MODE][Tokenizer.END_TAG_TOKEN] = endTagAfterBody; _[AFTER_BODY_MODE][Tokenizer.EOF_TOKEN] = stopParsing; _[IN_FRAMESET_MODE] = {}; _[IN_FRAMESET_MODE][Tokenizer.CHARACTER_TOKEN] = _[IN_FRAMESET_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = ignoreToken; _[IN_FRAMESET_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = insertCharacters; _[IN_FRAMESET_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[IN_FRAMESET_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[IN_FRAMESET_MODE][Tokenizer.START_TAG_TOKEN] = startTagInFrameset; _[IN_FRAMESET_MODE][Tokenizer.END_TAG_TOKEN] = endTagInFrameset; _[IN_FRAMESET_MODE][Tokenizer.EOF_TOKEN] = stopParsing; _[AFTER_FRAMESET_MODE] = {}; _[AFTER_FRAMESET_MODE][Tokenizer.CHARACTER_TOKEN] = _[AFTER_FRAMESET_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = ignoreToken; _[AFTER_FRAMESET_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = insertCharacters; _[AFTER_FRAMESET_MODE][Tokenizer.COMMENT_TOKEN] = appendComment; _[AFTER_FRAMESET_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[AFTER_FRAMESET_MODE][Tokenizer.START_TAG_TOKEN] = startTagAfterFrameset; _[AFTER_FRAMESET_MODE][Tokenizer.END_TAG_TOKEN] = endTagAfterFrameset; _[AFTER_FRAMESET_MODE][Tokenizer.EOF_TOKEN] = stopParsing; _[AFTER_AFTER_BODY_MODE] = {}; _[AFTER_AFTER_BODY_MODE][Tokenizer.CHARACTER_TOKEN] = tokenAfterAfterBody; _[AFTER_AFTER_BODY_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = tokenAfterAfterBody; _[AFTER_AFTER_BODY_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = whitespaceCharacterInBody; _[AFTER_AFTER_BODY_MODE][Tokenizer.COMMENT_TOKEN] = appendCommentToDocument; _[AFTER_AFTER_BODY_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[AFTER_AFTER_BODY_MODE][Tokenizer.START_TAG_TOKEN] = startTagAfterAfterBody; _[AFTER_AFTER_BODY_MODE][Tokenizer.END_TAG_TOKEN] = tokenAfterAfterBody; _[AFTER_AFTER_BODY_MODE][Tokenizer.EOF_TOKEN] = stopParsing; _[AFTER_AFTER_FRAMESET_MODE] = {}; _[AFTER_AFTER_FRAMESET_MODE][Tokenizer.CHARACTER_TOKEN] = _[AFTER_AFTER_FRAMESET_MODE][Tokenizer.NULL_CHARACTER_TOKEN] = ignoreToken; _[AFTER_AFTER_FRAMESET_MODE][Tokenizer.WHITESPACE_CHARACTER_TOKEN] = whitespaceCharacterInBody; _[AFTER_AFTER_FRAMESET_MODE][Tokenizer.COMMENT_TOKEN] = appendCommentToDocument; _[AFTER_AFTER_FRAMESET_MODE][Tokenizer.DOCTYPE_TOKEN] = ignoreToken; _[AFTER_AFTER_FRAMESET_MODE][Tokenizer.START_TAG_TOKEN] = startTagAfterAfterFrameset; _[AFTER_AFTER_FRAMESET_MODE][Tokenizer.END_TAG_TOKEN] = ignoreToken; _[AFTER_AFTER_FRAMESET_MODE][Tokenizer.EOF_TOKEN] = stopParsing; //Searchable index building utils ( tag) function getSearchableIndexFormAttrs(isindexStartTagToken) { var indexAction = Tokenizer.getTokenAttr(isindexStartTagToken, ATTRS.ACTION), attrs = []; if (indexAction !== null) { attrs.push({ name: ATTRS.ACTION, value: indexAction }); } return attrs; } function getSearchableIndexLabelText(isindexStartTagToken) { var indexPrompt = Tokenizer.getTokenAttr(isindexStartTagToken, ATTRS.PROMPT); return indexPrompt === null ? SEARCHABLE_INDEX_DEFAULT_PROMPT : indexPrompt; } function getSearchableIndexInputAttrs(isindexStartTagToken) { var isindexAttrs = isindexStartTagToken.attrs, inputAttrs = []; for (var i = 0; i < isindexAttrs.length; i++) { var name = isindexAttrs[i].name; if (name !== ATTRS.NAME && name !== ATTRS.ACTION && name !== ATTRS.PROMPT) inputAttrs.push(isindexAttrs[i]); } inputAttrs.push({ name: ATTRS.NAME, value: SEARCHABLE_INDEX_INPUT_NAME }); return inputAttrs; } //Parser var Parser = module.exports = function (treeAdapter, options) { this.treeAdapter = treeAdapter || DefaultTreeAdapter; this.options = Utils.mergeOptions(DEFAULT_OPTIONS, options); this.scriptHandler = null; if (this.options.locationInfo) LocationInfoMixin.assign(this); }; //API Parser.prototype.parse = function (html) { var document = this.treeAdapter.createDocument(); this._reset(html, document, null); this._runParsingLoop(); return document; }; Parser.prototype.parseFragment = function (html, fragmentContext) { //NOTE: use