diff --git a/modules/parser/html5/parser.js b/modules/parser/html5/parser.js
index 11a52486d2..3a362e6895 100644
--- a/modules/parser/html5/parser.js
+++ b/modules/parser/html5/parser.js
@@ -5,7 +5,7 @@ var HTML5 = exports.HTML5 = require('../html5');
var events = require('events');
require('./treebuilder');
-require('../mediawiki.html5TokenEmitter');
+require('../mediawiki.HTML5TreeBuilder.node');
var Phase = require('./parser/phase').Phase;
diff --git a/modules/parser/mediawiki.html5TokenEmitter.js b/modules/parser/mediawiki.HTML5TreeBuilder.node.js
similarity index 71%
rename from modules/parser/mediawiki.html5TokenEmitter.js
rename to modules/parser/mediawiki.HTML5TreeBuilder.node.js
index a4e00cfec9..519eee4214 100644
--- a/modules/parser/mediawiki.html5TokenEmitter.js
+++ b/modules/parser/mediawiki.HTML5TreeBuilder.node.js
@@ -6,15 +6,21 @@ var HTML5 = require('./html5/index');
FauxHTML5 = {};
-FauxHTML5.Tokenizer = function ( ) {
+FauxHTML5.TreeBuilder = function ( ) {
+ // The parser we are going to emit our tokens to
this.parser = new HTML5.Parser();
+
+ // Sets up the parser
this.parser.parse(this);
+ this.document = this.parser.document;
return this;
};
-FauxHTML5.Tokenizer.prototype = new events.EventEmitter();
+FauxHTML5.TreeBuilder.prototype = new events.EventEmitter();
-FauxHTML5.Tokenizer.prototype.processToken = function (token) {
+// Adapt the token format to internal HTML tree builder format, call the actual
+// html tree builder by emitting the token.
+FauxHTML5.TreeBuilder.prototype.processToken = function (token) {
var att = function (maybeAttribs) {
if ( $.isArray(maybeAttribs) ) {
var atts = [];
@@ -63,6 +69,11 @@ FauxHTML5.Tokenizer.prototype.processToken = function (token) {
}
};
+FauxHTML5.TreeBuilder.prototype.body = function () {
+ return this.parser.document.getElementsByTagName('body')[0];
+}
+
+
if (typeof module == "object") {
module.exports.FauxHTML5 = FauxHTML5;
}
diff --git a/tests/parser/parserTests.js b/tests/parser/parserTests.js
index 882a71bc8e..3270b5f78c 100644
--- a/tests/parser/parserTests.js
+++ b/tests/parser/parserTests.js
@@ -146,7 +146,7 @@ _import(pj('parser', 'mediawiki.parser.peg.js'), ['PegParser']);
_import(pj('parser', 'mediawiki.parser.environment.js'), ['MWParserEnvironment']);
_import(pj('parser', 'ext.cite.taghook.ref.js'), ['MWRefTagHook']);
-_import(pj('parser', 'mediawiki.html5TokenEmitter.js'), ['FauxHTML5']);
+_import(pj('parser', 'mediawiki.HTML5TreeBuilder.node.js'), ['FauxHTML5']);
_import(pj('parser', 'mediawiki.DOMPostProcessor.js'), ['DOMPostProcessor']);
// WikiDom and serializers
@@ -346,9 +346,9 @@ var passedTests = 0,
failTreeTests = 0,
failOutputTests = 0;
+var postProcessor = new DOMPostProcessor();
function processTest(item) {
- var tokenizer = new FauxHTML5.Tokenizer(),
- postProcessor = new DOMPostProcessor();
+ var treeBuilder = new FauxHTML5.TreeBuilder();
if (!('title' in item)) {
console.log(item);
throw new Error('Missing title from test case.');
@@ -397,14 +397,13 @@ function processTest(item) {
// Build a DOM tree from tokens using the HTML tree
// builder/parser.
- processTokens(tokens, tokenizer);
+ processTokens(tokens, treeBuilder);
// Perform post-processing on DOM.
- postProcessor.doPostProcess(tokenizer.parser.document);
+ postProcessor.doPostProcess(treeBuilder.parser.document);
// And serialize the result.
- var out = tokenizer.parser.document
- .getElementsByTagName('body')[0]
+ var out = treeBuilder.body()
.innerHTML;
if ( err ) {
@@ -485,15 +484,15 @@ function processTest(item) {
});
}
-function processTokens ( tokens, tokenizer ) {
+function processTokens ( tokens, treeBuilder ) {
// push a body element, just to be sure to have one
- tokenizer.processToken({type: 'TAG', name: 'body'});
+ treeBuilder.processToken({type: 'TAG', name: 'body'});
// Process all tokens
for (var i = 0, length = tokens.length; i < length; i++) {
- tokenizer.processToken(tokens[i]);
+ treeBuilder.processToken(tokens[i]);
}
// And signal the end
- tokenizer.processToken({type: 'END'});
+ treeBuilder.processToken({type: 'END'});
}
var comments = [];