mediawiki-extensions-Visual.../modules/parser/parse.js
Gabriel Wicke 7cc8e69147 Collapse all requests per template into a single outstanding request using an
event-emitting TemplateRequest object and a request queue.
2012-01-20 02:36:18 +00:00

60 lines
1.6 KiB
JavaScript

/**
* Command line wikidom parse utility.
* Read from STDIN, write to STDOUT.
*
* @author Neil Kandalgaonkar <neilk@wikimedia.org>
* @author Gabriel Wicke <gwicke@wikimedia.org>
*/
( function() {
var ParserPipeline = require('./mediawiki.parser.js').ParserPipeline,
ParserEnv = require('./mediawiki.parser.environment.js').MWParserEnvironment,
DOMConverter = require('./mediawiki.DOMConverter.js').DOMConverter,
optimist = require('optimist');
var env = new ParserEnv( {
// fetch templates from enwiki by default..
wgScriptPath: "http://en.wikipedia.org/w",
wgScriptExtension: ".php",
fetchTemplates: true,
// enable/disable debug output using this switch
debug: false
} ),
parser = new ParserPipeline( env );
process.stdin.resume();
process.stdin.setEncoding('utf8');
var inputChunks = [];
process.stdin.on( 'data', function( chunk ) {
inputChunks.push( chunk );
} );
process.stdin.on( 'end', function() {
var input = inputChunks.join('');
parser.on('document', function ( document ) {
var wikiDom = new DOMConverter().HTMLtoWiki( document.body ),
// Serialize the WikiDom with indentation
output = JSON.stringify( wikiDom, null, 2 );
process.stdout.write( output );
// add a trailing newline for shell user's benefit
process.stdout.write( "\n" );
if ( env.debug ) {
// Also print out the html
process.stderr.write( document.body.innerHTML );
process.stderr.write( "\n" );
}
process.exit(0);
});
// Kick off the pipeline by feeding the input into the parser pipeline
parser.parse( input );
} );
} )();