2011-11-03 13:55:48 +00:00
|
|
|
/**
|
|
|
|
* Initial parser tests runner for experimental JS parser
|
|
|
|
*
|
|
|
|
* This pulls all the parserTests.txt items and runs them through the JS
|
|
|
|
* parser and JS HTML renderer. Currently no comparison is done on output,
|
|
|
|
* as a direct string comparison won't be very encouraging. :)
|
|
|
|
*
|
|
|
|
* Needs smarter compare, as well as search-y helpers.
|
|
|
|
*
|
|
|
|
* 2011-07-20 <brion@pobox.com>
|
|
|
|
*/
|
|
|
|
|
2011-11-28 11:10:39 +00:00
|
|
|
(function() {
|
2011-11-29 15:11:51 +00:00
|
|
|
//"use strict";
|
2011-11-18 13:57:07 +00:00
|
|
|
|
2011-11-03 13:55:48 +00:00
|
|
|
var fs = require('fs'),
|
2011-11-28 11:10:39 +00:00
|
|
|
path = require('path'),
|
2011-11-28 11:19:50 +00:00
|
|
|
jsDiff = require('diff'),
|
2011-11-28 11:10:39 +00:00
|
|
|
HTML5 = require('html5').HTML5;
|
2011-11-03 13:55:48 +00:00
|
|
|
|
|
|
|
// @fixme wrap more or this setup in a common module
|
|
|
|
|
|
|
|
// Fetch up some of our wacky parser bits...
|
|
|
|
|
|
|
|
var basePath = path.join(path.dirname(path.dirname(process.cwd())), 'modules');
|
|
|
|
function _require(filename) {
|
|
|
|
return require(path.join(basePath, filename));
|
|
|
|
}
|
|
|
|
|
|
|
|
function _import(filename, symbols) {
|
|
|
|
var module = _require(filename);
|
|
|
|
symbols.forEach(function(symbol) {
|
|
|
|
global[symbol] = module[symbol];
|
2011-11-28 11:10:39 +00:00
|
|
|
});
|
2011-11-03 13:55:48 +00:00
|
|
|
}
|
|
|
|
|
2011-11-18 13:57:07 +00:00
|
|
|
// needed for html5 parser adapter
|
|
|
|
//var events = require('events');
|
|
|
|
|
2011-11-03 13:55:48 +00:00
|
|
|
// For now most modules only need this for $.extend and $.each :)
|
|
|
|
global.$ = require('jquery');
|
|
|
|
|
|
|
|
// hack for renderer
|
|
|
|
global.document = $('<div>')[0].ownerDocument;
|
|
|
|
|
|
|
|
var pj = path.join;
|
|
|
|
|
|
|
|
// Local CommonJS-friendly libs
|
|
|
|
global.PEG = _require(pj('parser', 'lib.pegjs.js'));
|
|
|
|
|
2011-11-18 13:57:07 +00:00
|
|
|
|
2011-11-03 13:55:48 +00:00
|
|
|
// Our code...
|
|
|
|
_import(pj('parser', 'mediawiki.parser.peg.js'), ['PegParser']);
|
|
|
|
_import(pj('parser', 'mediawiki.parser.environment.js'), ['MWParserEnvironment']);
|
|
|
|
_import(pj('parser', 'ext.cite.taghook.ref.js'), ['MWRefTagHook']);
|
|
|
|
|
2011-11-29 15:11:51 +00:00
|
|
|
_import(pj('parser', 'mediawiki.html5TokenEmitter.js'), ['FauxHTML5']);
|
|
|
|
_import(pj('parser', 'mediawiki.DOMPostProcessor.js'), ['DOMPostProcessor']);
|
2011-11-18 13:57:07 +00:00
|
|
|
|
2011-11-03 13:55:48 +00:00
|
|
|
// WikiDom and serializers
|
|
|
|
_require(pj('es', 'es.js'));
|
2011-11-04 07:45:05 +00:00
|
|
|
_require(pj('es', 'es.Html.js'));
|
2011-11-03 15:16:05 +00:00
|
|
|
_require(pj('es', 'serializers', 'es.AnnotationSerializer.js'));
|
2011-11-04 07:45:05 +00:00
|
|
|
_require(pj('es', 'serializers', 'es.HtmlSerializer.js'));
|
|
|
|
_require(pj('es', 'serializers', 'es.WikitextSerializer.js'));
|
|
|
|
_require(pj('es', 'serializers', 'es.JsonSerializer.js'));
|
2011-11-03 13:55:48 +00:00
|
|
|
|
|
|
|
// Preload the grammar file...
|
|
|
|
PegParser.src = fs.readFileSync(path.join(basePath, 'parser', 'pegParser.pegjs.txt'), 'utf8');
|
|
|
|
|
|
|
|
var parser = new PegParser();
|
|
|
|
|
2011-11-22 12:32:34 +00:00
|
|
|
var testFileName = '../../../../phase3/tests/parser/parserTests.txt'; // default
|
2011-11-28 11:41:47 +00:00
|
|
|
var testFileName2 = '../../../../tests/parser/parserTests.txt'; // Fallback. Not everyone fetch at phase3 level
|
2011-11-03 13:55:48 +00:00
|
|
|
if (process.argv.length > 2) {
|
|
|
|
// hack :D
|
|
|
|
testFileName = process.argv[2];
|
2011-11-28 11:41:47 +00:00
|
|
|
testFileName2 = null;
|
2011-11-03 13:55:48 +00:00
|
|
|
console.log(testFileName);
|
|
|
|
}
|
|
|
|
|
|
|
|
try {
|
2011-11-28 11:10:39 +00:00
|
|
|
var testParser = PEG.buildParser(fs.readFileSync('parserTests.pegjs', 'utf8'));
|
2011-11-03 13:55:48 +00:00
|
|
|
} catch (e) {
|
|
|
|
console.log(e);
|
|
|
|
}
|
|
|
|
|
2011-11-28 11:41:47 +00:00
|
|
|
var testFile;
|
|
|
|
try {
|
|
|
|
testFile = fs.readFileSync(testFileName, 'utf8');
|
|
|
|
} catch (e) {
|
|
|
|
// Try opening fallback file
|
|
|
|
if( testFileName2 !== '' ) {
|
|
|
|
try { testFile = fs.readFileSync( testFileName2, 'utf8' ); }
|
|
|
|
catch(e) { console.log(e); }
|
|
|
|
}
|
|
|
|
}
|
2011-11-03 13:55:48 +00:00
|
|
|
|
|
|
|
try {
|
|
|
|
var cases = testParser.parse(testFile);
|
|
|
|
} catch (e) {
|
|
|
|
console.log(e);
|
|
|
|
}
|
|
|
|
|
|
|
|
var articles = {};
|
|
|
|
|
|
|
|
function normalizeTitle(name) {
|
|
|
|
if (typeof name !== 'string') {
|
|
|
|
throw new Error('nooooooooo not a string');
|
|
|
|
}
|
|
|
|
name = name.replace(/[\s_]+/g, '_');
|
|
|
|
name = name.substr(0, 1).toUpperCase() + name.substr(1);
|
2011-11-28 11:10:39 +00:00
|
|
|
if (name === '') {
|
2011-11-03 13:55:48 +00:00
|
|
|
throw new Error('Invalid/empty title');
|
|
|
|
}
|
|
|
|
return name;
|
|
|
|
}
|
|
|
|
|
|
|
|
function fetchArticle(name) {
|
|
|
|
var norm = normalizeTitle(name);
|
|
|
|
if (norm in articles) {
|
|
|
|
return articles[norm];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function processArticle(item) {
|
|
|
|
var norm = normalizeTitle(item.title);
|
|
|
|
articles[norm] = item.text;
|
|
|
|
}
|
|
|
|
|
|
|
|
function nodeToHtml(node) {
|
|
|
|
return $('<div>').append(node).html();
|
|
|
|
}
|
2011-11-17 15:26:02 +00:00
|
|
|
|
2011-11-29 15:11:51 +00:00
|
|
|
var htmlparser = new HTML5.Parser();
|
|
|
|
|
2011-11-28 14:00:14 +00:00
|
|
|
/* Normalize the expected parser output by parsing it using a HTML5 parser and
|
|
|
|
* re-serializing it to HTML. Ideally, the parser would normalize inter-tag
|
|
|
|
* whitespace for us. For now, we fake that by simply stripping all newlines.
|
|
|
|
*/
|
2011-11-28 11:10:39 +00:00
|
|
|
function normalizeHTML(source) {
|
|
|
|
// TODO: Do not strip newlines in pre and nowiki blocks!
|
|
|
|
source = source.replace(/\n/g, '');
|
2011-11-28 14:00:14 +00:00
|
|
|
try {
|
2011-11-29 15:11:51 +00:00
|
|
|
htmlparser.parse('<body>' + source + '</body>');
|
|
|
|
return htmlparser.document
|
2011-11-28 14:00:14 +00:00
|
|
|
.getElementsByTagName('body')[0]
|
2011-11-29 15:11:51 +00:00
|
|
|
.innerHTML
|
|
|
|
// a few things we ignore for now..
|
|
|
|
.replace(/(title|class|rel)="[^"]+"/g, '');
|
2011-11-28 14:00:14 +00:00
|
|
|
} catch(e) {
|
2011-11-29 15:11:51 +00:00
|
|
|
console.log("normalizeHTML failed on" +
|
|
|
|
source + " with the following error: " + e);
|
2011-11-28 14:53:07 +00:00
|
|
|
console.trace();
|
2011-11-28 14:00:14 +00:00
|
|
|
return source;
|
|
|
|
}
|
|
|
|
|
2011-11-28 11:10:39 +00:00
|
|
|
}
|
|
|
|
|
2011-11-28 14:53:07 +00:00
|
|
|
// Specialized normalization of the wiki parser output, mostly to ignore a few
|
2011-11-28 11:10:39 +00:00
|
|
|
// known-ok differences.
|
|
|
|
function normalizeOut ( out ) {
|
|
|
|
// TODO: Do not strip newlines in pre and nowiki blocks!
|
2011-11-28 16:55:50 +00:00
|
|
|
return out.replace(/\n| data-[a-zA-Z]+="[^">]+"/g, '')
|
|
|
|
.replace(/<!--.*?-->\n?/gm, '');
|
2011-11-28 11:10:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function formatHTML ( source ) {
|
2011-11-28 14:53:07 +00:00
|
|
|
// Quick hack to insert newlines before some block level start tags
|
|
|
|
return source.replace(/(?!^)<((div|dd|dt|li|p|table|tr|td|tbody|dl|ol|ul)[^>]*)>/g,
|
2011-11-28 14:00:14 +00:00
|
|
|
'\n<$1>');
|
2011-11-28 11:10:39 +00:00
|
|
|
}
|
|
|
|
|
2011-11-28 14:53:07 +00:00
|
|
|
var passedTests = 0,
|
|
|
|
failParseTests = 0,
|
|
|
|
failTreeTests = 0,
|
|
|
|
failOutputTests = 0;
|
2011-11-03 13:55:48 +00:00
|
|
|
|
|
|
|
function processTest(item) {
|
2011-11-29 15:11:51 +00:00
|
|
|
var tokenizer = new FauxHTML5.Tokenizer(),
|
|
|
|
postProcessor = new DOMPostProcessor();
|
2011-11-03 13:55:48 +00:00
|
|
|
if (!('title' in item)) {
|
|
|
|
console.log(item);
|
|
|
|
throw new Error('Missing title from test case.');
|
|
|
|
}
|
|
|
|
if (!('input' in item)) {
|
|
|
|
console.log(item);
|
|
|
|
throw new Error('Missing input from test case ' + item.title);
|
|
|
|
}
|
|
|
|
if (!('result' in item)) {
|
|
|
|
console.log(item);
|
|
|
|
throw new Error('Missing input from test case ' + item.title);
|
|
|
|
}
|
|
|
|
|
2011-11-28 11:10:39 +00:00
|
|
|
function printTitle() {
|
|
|
|
console.log('=====================================================');
|
|
|
|
console.log(item.title);
|
2011-11-28 11:38:48 +00:00
|
|
|
console.log(item.comments.join('\n'));
|
2011-11-28 11:10:39 +00:00
|
|
|
console.log("INPUT:");
|
|
|
|
console.log(item.input + "\n");
|
|
|
|
}
|
2011-11-18 13:57:07 +00:00
|
|
|
|
2011-11-28 14:00:14 +00:00
|
|
|
parser.parseToTree(item.input + "\n", function(tokens, err) {
|
2011-11-03 13:55:48 +00:00
|
|
|
if (err) {
|
2011-11-28 11:10:39 +00:00
|
|
|
printTitle();
|
2011-11-28 14:53:07 +00:00
|
|
|
failParseTests++;
|
2011-11-03 13:55:48 +00:00
|
|
|
console.log('PARSE FAIL', err);
|
|
|
|
} else {
|
|
|
|
var environment = new MWParserEnvironment({
|
|
|
|
tagHooks: {
|
|
|
|
'ref': MWRefTagHook,
|
|
|
|
'references': MWReferencesTagHook
|
|
|
|
}
|
|
|
|
});
|
2011-11-28 14:00:14 +00:00
|
|
|
//var res = es.HtmlSerializer.stringify(tokens,environment);
|
|
|
|
//console.log(JSON.stringify(tokens));
|
2011-11-29 15:11:51 +00:00
|
|
|
|
|
|
|
// Build a DOM tree from tokens using the HTML tree
|
|
|
|
// builder/parser.
|
2011-11-28 14:00:14 +00:00
|
|
|
processTokens(tokens, tokenizer);
|
2011-11-29 15:11:51 +00:00
|
|
|
|
|
|
|
// Perform post-processing on DOM.
|
|
|
|
postProcessor.doPostProcess(tokenizer.parser.document);
|
|
|
|
|
|
|
|
// And serialize the result.
|
2011-11-28 11:10:39 +00:00
|
|
|
var out = tokenizer.parser.document
|
|
|
|
.getElementsByTagName('body')[0]
|
|
|
|
.innerHTML;
|
|
|
|
|
2011-11-28 14:00:14 +00:00
|
|
|
if ( err ) {
|
|
|
|
printTitle();
|
2011-11-28 14:53:07 +00:00
|
|
|
failTreeTests++;
|
2011-11-28 14:00:14 +00:00
|
|
|
console.log('RENDER FAIL', err);
|
2011-11-29 15:11:51 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
var normalizedOut = normalizeOut(out);
|
|
|
|
var normalizedExpected = normalizeHTML(item.result);
|
|
|
|
if ( normalizedOut !== normalizedExpected ) {
|
2011-11-28 14:00:14 +00:00
|
|
|
printTitle();
|
2011-11-28 14:53:07 +00:00
|
|
|
failOutputTests++;
|
2011-11-28 14:00:14 +00:00
|
|
|
console.log('RAW EXPECTED:');
|
|
|
|
console.log(item.result + "\n");
|
2011-11-28 11:10:39 +00:00
|
|
|
|
2011-11-28 14:00:14 +00:00
|
|
|
console.log('RAW RENDERED:');
|
|
|
|
console.log(formatHTML(out) + "\n");
|
2011-11-28 11:10:39 +00:00
|
|
|
|
2011-11-29 15:11:51 +00:00
|
|
|
var a = formatHTML(normalizedExpected);
|
2011-11-28 11:19:50 +00:00
|
|
|
|
2011-11-28 14:00:14 +00:00
|
|
|
console.log('NORMALIZED EXPECTED:');
|
|
|
|
console.log(a + "\n");
|
2011-11-28 11:19:50 +00:00
|
|
|
|
2011-11-29 15:11:51 +00:00
|
|
|
var b = formatHTML(normalizedOut);
|
2011-11-28 11:10:39 +00:00
|
|
|
|
2011-11-28 14:00:14 +00:00
|
|
|
console.log('NORMALIZED RENDERED:')
|
|
|
|
console.log(formatHTML(normalizeOut(out)) + "\n");
|
|
|
|
var patch = jsDiff.createPatch('wikitext.txt', a, b, 'before', 'after');
|
2011-11-28 11:19:50 +00:00
|
|
|
|
2011-11-28 14:00:14 +00:00
|
|
|
console.log('DIFF:');
|
|
|
|
console.log(patch.replace(/^[^\n]*\n[^\n]*\n[^\n]*\n[^\n]*\n/, ''));
|
|
|
|
} else {
|
2011-11-28 14:53:07 +00:00
|
|
|
passedTests++;
|
2011-11-29 15:11:51 +00:00
|
|
|
console.log( 'PASSED: ' + item.title );
|
2011-11-28 14:00:14 +00:00
|
|
|
}
|
2011-11-03 13:55:48 +00:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2011-11-18 13:57:07 +00:00
|
|
|
function processTokens ( tokens, tokenizer ) {
|
2011-11-22 12:32:34 +00:00
|
|
|
// push a body element, just to be sure to have one
|
2011-11-18 13:57:07 +00:00
|
|
|
tokenizer.processToken({type: 'TAG', name: 'body'});
|
|
|
|
// Process all tokens
|
|
|
|
for (var i = 0, length = tokens.length; i < length; i++) {
|
|
|
|
tokenizer.processToken(tokens[i]);
|
|
|
|
}
|
|
|
|
// And signal the end
|
|
|
|
tokenizer.processToken({type: 'END'});
|
|
|
|
}
|
|
|
|
|
2011-11-28 11:38:48 +00:00
|
|
|
var comments = [];
|
2011-11-18 13:57:07 +00:00
|
|
|
|
2011-11-03 13:55:48 +00:00
|
|
|
cases.forEach(function(item) {
|
|
|
|
if (typeof item == 'object') {
|
2011-11-28 11:38:48 +00:00
|
|
|
switch(item.type) {
|
|
|
|
case 'article':
|
|
|
|
//processArticle(item);
|
|
|
|
break;
|
|
|
|
case 'test':
|
|
|
|
// Add comments to following test.
|
|
|
|
item.comments = comments;
|
|
|
|
comments = [];
|
|
|
|
processTest(item);
|
|
|
|
break;
|
|
|
|
case 'comment':
|
|
|
|
comments.push(item.comment);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
2011-11-03 13:55:48 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
2011-11-28 14:53:07 +00:00
|
|
|
|
|
|
|
console.log( "==========================================================");
|
|
|
|
console.log( "SUMMARY: ");
|
|
|
|
console.log( passedTests + " passed");
|
|
|
|
console.log( failParseTests + " parse failures");
|
|
|
|
console.log( failTreeTests + " tree build failures");
|
|
|
|
console.log( failOutputTests + " output differences");
|
|
|
|
console.log( "\n" + (failParseTests + failTreeTests + failOutputTests) + " total failures");
|
|
|
|
console.log( "==========================================================");
|
|
|
|
|
2011-11-28 11:10:39 +00:00
|
|
|
})();
|