2011-11-02 21:07:51 +00:00
|
|
|
/**
|
2011-12-08 11:40:59 +00:00
|
|
|
* Tokenizer for wikitext, using PEG.js and a separate PEG grammar file
|
|
|
|
* (pegTokenizer.pegjs.txt)
|
2011-11-02 21:07:51 +00:00
|
|
|
*
|
2011-12-08 11:40:59 +00:00
|
|
|
* Use along with a HTML5TreeBuilder and the DOMPostProcessor(s) for HTML
|
|
|
|
* output.
|
2012-02-21 18:26:40 +00:00
|
|
|
*
|
2011-11-02 21:07:51 +00:00
|
|
|
*/
|
2011-12-12 14:03:54 +00:00
|
|
|
|
2011-12-28 17:04:16 +00:00
|
|
|
var PEG = require('pegjs'),
|
|
|
|
path = require('path'),
|
2012-04-18 12:30:59 +00:00
|
|
|
LRU = require("lru-cache"),
|
2012-01-03 18:44:31 +00:00
|
|
|
fs = require('fs'),
|
2012-01-04 08:39:45 +00:00
|
|
|
$ = require('jquery'),
|
2012-02-01 16:30:43 +00:00
|
|
|
events = require('events'),
|
2012-03-15 11:43:49 +00:00
|
|
|
//colors = require('colors'),
|
2012-02-01 16:30:43 +00:00
|
|
|
defines = require('./mediawiki.parser.defines.js');
|
2011-12-12 14:03:54 +00:00
|
|
|
|
2012-04-18 12:30:59 +00:00
|
|
|
function PegTokenizer( env, canCache ) {
|
|
|
|
this.env = env;
|
|
|
|
this.canCache = canCache;
|
|
|
|
if ( this.canCache ) {
|
|
|
|
this.cacheAccum = { chunks: [] };
|
|
|
|
}
|
2011-11-02 21:07:51 +00:00
|
|
|
}
|
|
|
|
|
2012-02-01 16:30:43 +00:00
|
|
|
|
|
|
|
|
2012-01-03 18:44:31 +00:00
|
|
|
// Inherit from EventEmitter
|
|
|
|
PegTokenizer.prototype = new events.EventEmitter();
|
2012-01-04 12:28:41 +00:00
|
|
|
PegTokenizer.prototype.constructor = PegTokenizer;
|
2012-01-03 18:44:31 +00:00
|
|
|
|
2011-12-08 10:59:44 +00:00
|
|
|
PegTokenizer.src = false;
|
2011-11-02 21:07:51 +00:00
|
|
|
|
2012-02-21 18:26:40 +00:00
|
|
|
/*
|
|
|
|
* The main worker. Sets up event emission ('chunk' and 'end' events).
|
|
|
|
* Consumers are supposed to register with PegTokenizer before calling
|
|
|
|
* process().
|
|
|
|
*/
|
2012-04-18 12:30:59 +00:00
|
|
|
PegTokenizer.prototype.process = function( text, cacheKey ) {
|
2011-12-12 14:03:54 +00:00
|
|
|
var out, err;
|
2012-03-08 09:00:45 +00:00
|
|
|
if ( !this.tokenizer ) {
|
|
|
|
// Construct a singleton static tokenizer.
|
2012-03-01 18:07:20 +00:00
|
|
|
var pegSrcPath = path.join( __dirname, 'pegTokenizer.pegjs.txt' );
|
|
|
|
this.src = fs.readFileSync( pegSrcPath, 'utf8' );
|
2012-04-26 16:18:08 +00:00
|
|
|
var tokenizerSource = PEG.buildParser(this.src,
|
|
|
|
{ cache: true, trackLineAndColumn: false }).toSource();
|
2012-03-08 09:00:45 +00:00
|
|
|
|
|
|
|
/* We patch the generated source to assign the arguments array for the
|
|
|
|
* parse function to a function-scoped variable. We use this to pass
|
|
|
|
* in callbacks and other information, which can be used from actions
|
|
|
|
* run when matching a production. In particular, we pass in a
|
|
|
|
* callback called for a chunk of tokens in toplevelblock. Setting this
|
|
|
|
* callback per call to parse() keeps the tokenizer reentrant, so that it
|
|
|
|
* can be reused to expand templates while a main parse is ongoing.
|
|
|
|
* PEG tokenizer construction is very expensive, so having a single
|
|
|
|
* reentrant tokenizer is a big win.
|
|
|
|
*
|
|
|
|
* We could also make modules available to the tokenizer by prepending
|
|
|
|
* requires to the source.
|
|
|
|
*/
|
|
|
|
tokenizerSource = tokenizerSource.replace( 'parse: function(input, startRule) {',
|
2012-06-01 10:53:49 +00:00
|
|
|
'parse: function(input, startRule) { var __parseArgs = arguments;' )
|
|
|
|
// Include the stops key in the cache key
|
|
|
|
.replace(/var cacheKey = "[^@"]+@" \+ pos/g,
|
|
|
|
function(m){ return m +' + stops.key'; });
|
2012-03-08 09:00:45 +00:00
|
|
|
//console.warn( tokenizerSource );
|
|
|
|
PegTokenizer.prototype.tokenizer = eval( tokenizerSource );
|
|
|
|
// alias the parse method
|
|
|
|
this.tokenizer.tokenize = this.tokenizer.parse;
|
2012-04-18 12:30:59 +00:00
|
|
|
|
|
|
|
// Also, while we are at it, create a tokenizer cache.
|
2012-04-27 21:10:32 +00:00
|
|
|
PegTokenizer.prototype.cache = LRU(25);
|
2012-04-18 12:30:59 +00:00
|
|
|
}
|
|
|
|
if ( this.canCache ) {
|
|
|
|
var maybeCached = this.cache.get(cacheKey);
|
|
|
|
if ( maybeCached ) {
|
2012-04-18 15:53:04 +00:00
|
|
|
this.env.tp( 'tokenizer cache hit for ' + cacheKey );
|
2012-04-18 12:30:59 +00:00
|
|
|
//console.warn( JSON.stringify( maybeCached, null, 2 ) );
|
|
|
|
for ( var i = 0, l = maybeCached.length; i < l; i++ ) {
|
|
|
|
// emit a clone of this chunk
|
2012-05-10 08:04:24 +00:00
|
|
|
this.emit('chunk', maybeCached[i] );
|
2012-04-18 12:30:59 +00:00
|
|
|
}
|
|
|
|
this.emit('end');
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
this.cacheAccum.key = cacheKey;
|
|
|
|
}
|
2011-12-12 14:03:54 +00:00
|
|
|
}
|
2011-12-28 01:37:11 +00:00
|
|
|
|
2012-04-18 12:30:59 +00:00
|
|
|
|
2012-02-21 18:26:40 +00:00
|
|
|
// Some input normalization: force a trailing newline
|
2012-05-29 06:04:19 +00:00
|
|
|
//if ( text.substring(text.length - 1) !== "\n" ) {
|
|
|
|
// text += "\n";
|
|
|
|
//}
|
2011-12-28 01:37:11 +00:00
|
|
|
|
2012-05-24 08:18:41 +00:00
|
|
|
var chunkCB;
|
|
|
|
if ( this.canCache ) {
|
|
|
|
chunkCB = this.onCacheChunk.bind( this );
|
|
|
|
} else {
|
|
|
|
chunkCB = this.emit.bind( this, 'chunk' );
|
|
|
|
}
|
2012-01-03 18:44:31 +00:00
|
|
|
// XXX: Commented out exception handling during development to get
|
2012-02-21 18:26:40 +00:00
|
|
|
// reasonable traces.
|
2012-05-24 08:18:41 +00:00
|
|
|
if ( ! this.env.debug ) {
|
|
|
|
try {
|
|
|
|
this.tokenizer.tokenize(text, 'start',
|
|
|
|
// callback
|
|
|
|
chunkCB,
|
|
|
|
// inline break test
|
|
|
|
this
|
|
|
|
);
|
|
|
|
this.onEnd();
|
|
|
|
} catch (e) {
|
|
|
|
console.warn( 'Tokenizer error in ' + cacheKey + ': ' + e );
|
|
|
|
console.trace();
|
2012-05-24 08:30:32 +00:00
|
|
|
chunkCB( ['Tokenizer error in ' + cacheKey + ': ' + e] );
|
|
|
|
this.onEnd();
|
2012-04-18 12:30:59 +00:00
|
|
|
}
|
2012-05-24 08:18:41 +00:00
|
|
|
} else {
|
2012-03-08 09:00:45 +00:00
|
|
|
this.tokenizer.tokenize(text, 'start',
|
2012-02-21 17:21:42 +00:00
|
|
|
// callback
|
2012-04-18 12:30:59 +00:00
|
|
|
chunkCB,
|
2012-02-21 17:21:42 +00:00
|
|
|
// inline break test
|
|
|
|
this
|
|
|
|
);
|
2012-04-18 12:30:59 +00:00
|
|
|
this.onEnd();
|
2012-05-24 08:18:41 +00:00
|
|
|
}
|
2012-01-03 18:44:31 +00:00
|
|
|
};
|
2011-12-28 01:37:15 +00:00
|
|
|
|
2012-04-18 12:30:59 +00:00
|
|
|
PegTokenizer.prototype.onCacheChunk = function ( chunk ) {
|
|
|
|
// make a deep copy of the chunk for now
|
2012-04-18 15:53:04 +00:00
|
|
|
this.cacheAccum.chunks.push( chunk.slice() );
|
2012-04-18 12:30:59 +00:00
|
|
|
//console.warn( 'onCacheChunk: ' + this.cacheAccum.key + JSON.stringify( chunk, null, 2 ) );
|
|
|
|
this.emit('chunk', chunk);
|
|
|
|
};
|
|
|
|
|
|
|
|
PegTokenizer.prototype.onEnd = function ( ) {
|
|
|
|
if ( this.canCache ) {
|
|
|
|
this.cache.set(this.cacheAccum.key, this.cacheAccum.chunks);
|
|
|
|
// reset cacheAccum
|
|
|
|
this.cacheAccum = { chunks: [] };
|
|
|
|
}
|
|
|
|
|
|
|
|
this.emit('end');
|
|
|
|
};
|
|
|
|
|
2012-03-02 13:36:37 +00:00
|
|
|
PegTokenizer.prototype.processImageOptions = function( text ) {
|
2012-03-08 09:00:45 +00:00
|
|
|
return this.tokenizer.tokenize(text, 'img_options', null, this );
|
2012-03-02 13:36:37 +00:00
|
|
|
};
|
2012-02-21 18:26:40 +00:00
|
|
|
|
2012-03-08 09:00:45 +00:00
|
|
|
/**
|
|
|
|
* Tokenize a URL
|
|
|
|
*/
|
|
|
|
PegTokenizer.prototype.tokenizeURL = function( text ) {
|
2012-03-05 15:34:27 +00:00
|
|
|
try {
|
2012-03-08 09:00:45 +00:00
|
|
|
return this.tokenizer.tokenize(text, 'url', null, this );
|
2012-03-05 15:34:27 +00:00
|
|
|
} catch ( e ) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2012-02-21 18:26:40 +00:00
|
|
|
/*
|
|
|
|
* Inline breaks, flag-enabled production which detects end positions for
|
|
|
|
* active higher-level productions in inline and other nested productions.
|
|
|
|
* Those inner productions are then exited, so that the outer production can
|
|
|
|
* handle the end marker.
|
|
|
|
*/
|
2012-03-12 13:08:43 +00:00
|
|
|
PegTokenizer.prototype.inline_breaks = function (input, pos, stops ) {
|
|
|
|
var counters = stops.counters;
|
2012-02-21 18:26:40 +00:00
|
|
|
switch( input[pos] ) {
|
|
|
|
case '=':
|
2012-03-12 13:08:43 +00:00
|
|
|
return stops.onStack( 'equal' ) ||
|
|
|
|
( counters.h &&
|
2012-05-29 06:04:19 +00:00
|
|
|
( pos === input.length - 1 ||
|
|
|
|
input.substr( pos + 1, 200)
|
|
|
|
.match(/[ \t]*(?:[\r\n]|$)/) !== null )
|
|
|
|
) || null;
|
2012-02-21 18:26:40 +00:00
|
|
|
case '|':
|
2012-03-12 13:08:43 +00:00
|
|
|
return counters.pipe ||
|
|
|
|
counters.template ||
|
2012-04-09 20:58:55 +00:00
|
|
|
counters.linkdesc ||
|
2012-03-14 16:30:59 +00:00
|
|
|
( stops.onStack('table') &&
|
2012-05-29 06:04:19 +00:00
|
|
|
( ( pos < input.length - 1 &&
|
|
|
|
input[pos + 1].match(/[|}]/) !== null ) ||
|
2012-03-14 10:58:11 +00:00
|
|
|
counters.tableCellArg
|
|
|
|
)
|
2012-02-21 18:26:40 +00:00
|
|
|
) || null;
|
2012-03-12 17:31:45 +00:00
|
|
|
case '{':
|
2012-03-13 12:32:31 +00:00
|
|
|
// {{!}} pipe templates..
|
2012-03-12 17:31:45 +00:00
|
|
|
return (
|
2012-03-14 10:58:11 +00:00
|
|
|
counters.pipe ||
|
2012-03-14 16:30:59 +00:00
|
|
|
( stops.onStack( 'table' ) &&
|
2012-04-11 15:37:25 +00:00
|
|
|
(
|
|
|
|
input.substr(pos, 10) === '{{!}}{{!}}' ||
|
|
|
|
counters.tableCellArg
|
|
|
|
)
|
2012-03-14 10:58:11 +00:00
|
|
|
)
|
2012-04-11 15:37:25 +00:00
|
|
|
) && input.substr( pos, 5 ) === '{{!}}' || null;
|
2012-02-21 18:26:40 +00:00
|
|
|
case "!":
|
2012-03-14 16:30:59 +00:00
|
|
|
return stops.onStack( 'table' ) && input[pos + 1] === "!" ||
|
2012-02-21 18:26:40 +00:00
|
|
|
null;
|
|
|
|
case "}":
|
2012-03-12 13:08:43 +00:00
|
|
|
return counters.template && input[pos + 1] === "}" || null;
|
2012-02-21 18:26:40 +00:00
|
|
|
case ":":
|
2012-03-12 13:08:43 +00:00
|
|
|
return counters.colon &&
|
2012-04-13 09:00:51 +00:00
|
|
|
! stops.onStack( 'extlink' ) &&
|
2012-03-12 13:08:43 +00:00
|
|
|
! counters.linkdesc || null;
|
2012-02-21 18:26:40 +00:00
|
|
|
case "\r":
|
2012-03-14 16:30:59 +00:00
|
|
|
return stops.onStack( 'table' ) &&
|
2012-05-30 16:38:23 +00:00
|
|
|
input.substr(pos).match(/\r\n?\s*[!|]/) !== null ||
|
2012-02-21 18:26:40 +00:00
|
|
|
null;
|
|
|
|
case "\n":
|
2012-06-01 10:53:49 +00:00
|
|
|
//console.warn(input.substr(pos, 5));
|
2012-05-30 16:38:23 +00:00
|
|
|
return ( stops.onStack( 'table' ) &&
|
|
|
|
// allow leading whitespace in tables
|
|
|
|
input.substr(pos, 200).match( /^\n\s*[!|]/ ) ) ||
|
|
|
|
// break on table-like syntax when the table stop is not
|
|
|
|
// enabled. XXX: see if this can be improved
|
2012-05-31 10:02:42 +00:00
|
|
|
//input.substr(pos, 200).match( /^\n[!|]/ ) ||
|
2012-02-21 18:26:40 +00:00
|
|
|
null;
|
|
|
|
case "]":
|
2012-04-13 09:00:51 +00:00
|
|
|
return stops.onStack( 'extlink' ) ||
|
2012-03-12 13:08:43 +00:00
|
|
|
( counters.linkdesc && input[pos + 1] === ']' ) ||
|
2012-02-21 18:26:40 +00:00
|
|
|
null;
|
|
|
|
case "<":
|
2012-04-11 15:37:25 +00:00
|
|
|
return ( counters.pre && input.substr( pos, 6 ) === '<pre>' ) ||
|
|
|
|
( counters.noinclude && input.substr(pos, 12) === '</noinclude>' ) ||
|
|
|
|
( counters.includeonly && input.substr(pos, 14) === '</includeonly>' ) ||
|
|
|
|
( counters.onlyinclude && input.substr(pos, 14) === '</onlyinclude>' ) ||
|
|
|
|
null;
|
2012-02-21 18:26:40 +00:00
|
|
|
default:
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// Alternate version of the above. The hash is likely faster, but the nested
|
|
|
|
// function calls seem to cancel that out.
|
2012-02-21 17:21:42 +00:00
|
|
|
PegTokenizer.prototype.breakMap = {
|
|
|
|
'=': function(input, pos, syntaxFlags) {
|
|
|
|
return syntaxFlags.equal ||
|
|
|
|
( syntaxFlags.h &&
|
2012-04-11 15:37:25 +00:00
|
|
|
input.substr( pos + 1, 200)
|
|
|
|
.match(/[ \t]*[\r\n]/) !== null ) || null;
|
2012-02-21 17:21:42 +00:00
|
|
|
},
|
|
|
|
'|': function ( input, pos, syntaxFlags ) {
|
|
|
|
return syntaxFlags.template ||
|
2012-04-11 15:37:25 +00:00
|
|
|
syntaxFlags.linkdesc ||
|
2012-02-21 17:21:42 +00:00
|
|
|
( syntaxFlags.table &&
|
2012-04-11 15:37:25 +00:00
|
|
|
(
|
|
|
|
input[pos + 1].match(/[|}]/) !== null ||
|
|
|
|
syntaxFlags.tableCellArg
|
|
|
|
)
|
2012-02-21 17:21:42 +00:00
|
|
|
) || null;
|
|
|
|
},
|
|
|
|
"!": function ( input, pos, syntaxFlags ) {
|
|
|
|
return syntaxFlags.table && input[pos + 1] === "!" ||
|
|
|
|
null;
|
|
|
|
},
|
|
|
|
"}": function ( input, pos, syntaxFlags ) {
|
|
|
|
return syntaxFlags.template && input[pos + 1] === "}" || null;
|
|
|
|
},
|
|
|
|
":": function ( input, pos, syntaxFlags ) {
|
|
|
|
return syntaxFlags.colon &&
|
|
|
|
! syntaxFlags.extlink &&
|
|
|
|
! syntaxFlags.linkdesc || null;
|
|
|
|
},
|
|
|
|
"\r": function ( input, pos, syntaxFlags ) {
|
|
|
|
return syntaxFlags.table &&
|
2012-02-21 17:57:30 +00:00
|
|
|
input.substr(pos, 4).match(/\r\n?[!|]/) !== null ||
|
2012-02-21 17:21:42 +00:00
|
|
|
null;
|
|
|
|
},
|
|
|
|
"\n": function ( input, pos, syntaxFlags ) {
|
|
|
|
return syntaxFlags.table &&
|
2012-02-21 17:57:30 +00:00
|
|
|
input[pos + 1] === '!' ||
|
|
|
|
input[pos + 1] === '|' ||
|
2012-02-21 17:21:42 +00:00
|
|
|
null;
|
|
|
|
},
|
|
|
|
"]": function ( input, pos, syntaxFlags ) {
|
|
|
|
return syntaxFlags.extlink ||
|
|
|
|
( syntaxFlags.linkdesc && input[pos + 1] === ']' ) ||
|
|
|
|
null;
|
|
|
|
},
|
|
|
|
"<": function ( input, pos, syntaxFlags ) {
|
|
|
|
return syntaxFlags.pre && input.substr( pos, 6 ) === '</pre>' || null;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2012-02-21 18:26:40 +00:00
|
|
|
PegTokenizer.prototype.inline_breaks_hash = function (input, pos, syntaxFlags ) {
|
2012-02-21 17:57:30 +00:00
|
|
|
return this.breakMap[ input[pos] ]( input, pos, syntaxFlags);
|
|
|
|
//console.warn( 'ilbn res: ' + JSON.stringify( [ res, input.substr( pos, 4 ) ] ) );
|
|
|
|
//return res;
|
|
|
|
};
|
|
|
|
|
2011-11-02 21:07:51 +00:00
|
|
|
|
2012-02-21 17:21:42 +00:00
|
|
|
|
2011-11-02 21:07:51 +00:00
|
|
|
if (typeof module == "object") {
|
2011-12-08 10:59:44 +00:00
|
|
|
module.exports.PegTokenizer = PegTokenizer;
|
2011-11-02 21:07:51 +00:00
|
|
|
}
|