diff --git a/api/ParserService.js b/api/ParserService.js index 1f56b56ad4..5e542d19fb 100644 --- a/api/ParserService.js +++ b/api/ParserService.js @@ -149,7 +149,7 @@ app.get(/\/(.*)/, function(req, res){ */ app.post(/\/(.*)/, function(req, res){ env.pageName = req.params[0]; - res.setHeader('Content-Type', 'text/plain; charset=UTF-8'); + res.setHeader('Content-Type', 'text/x-mediawiki; charset=UTF-8'); var p = new html5.Parser(); p.parse( req.body.content ); new WikitextSerializer({env: env}).serializeDOM( diff --git a/modules/parser/mediawiki.tokenizer.peg.js b/modules/parser/mediawiki.tokenizer.peg.js index a429af53ec..1741770c76 100644 --- a/modules/parser/mediawiki.tokenizer.peg.js +++ b/modules/parser/mediawiki.tokenizer.peg.js @@ -91,15 +91,28 @@ PegTokenizer.prototype.process = function( text, cacheKey ) { text += "\n"; } + var chunkCB; + if ( this.canCache ) { + chunkCB = this.onCacheChunk.bind( this ); + } else { + chunkCB = this.emit.bind( this, 'chunk' ); + } // XXX: Commented out exception handling during development to get // reasonable traces. - //try { - var chunkCB; - if ( this.canCache ) { - chunkCB = this.onCacheChunk.bind( this ); - } else { - chunkCB = this.emit.bind( this, 'chunk' ); + if ( ! this.env.debug ) { + try { + this.tokenizer.tokenize(text, 'start', + // callback + chunkCB, + // inline break test + this + ); + this.onEnd(); + } catch (e) { + console.warn( 'Tokenizer error in ' + cacheKey + ': ' + e ); + console.trace(); } + } else { this.tokenizer.tokenize(text, 'start', // callback chunkCB, @@ -107,11 +120,7 @@ PegTokenizer.prototype.process = function( text, cacheKey ) { this ); this.onEnd(); - /*} catch (e) { - err = e; - console.warn( e ); - console.trace(); - }*/ + } }; PegTokenizer.prototype.onCacheChunk = function ( chunk ) {