Use text/x-mediawiki content type, and handle tokenizer errors without --debug

Change-Id: I154cd344306aa05ada7ff30f631d487f39fa9739
This commit is contained in:
Gabriel Wicke 2012-05-24 10:18:41 +02:00
parent c5cd131e1c
commit e70448e53a
2 changed files with 21 additions and 12 deletions

View file

@ -149,7 +149,7 @@ app.get(/\/(.*)/, function(req, res){
*/
app.post(/\/(.*)/, function(req, res){
env.pageName = req.params[0];
res.setHeader('Content-Type', 'text/plain; charset=UTF-8');
res.setHeader('Content-Type', 'text/x-mediawiki; charset=UTF-8');
var p = new html5.Parser();
p.parse( req.body.content );
new WikitextSerializer({env: env}).serializeDOM(

View file

@ -91,15 +91,28 @@ PegTokenizer.prototype.process = function( text, cacheKey ) {
text += "\n";
}
var chunkCB;
if ( this.canCache ) {
chunkCB = this.onCacheChunk.bind( this );
} else {
chunkCB = this.emit.bind( this, 'chunk' );
}
// XXX: Commented out exception handling during development to get
// reasonable traces.
//try {
var chunkCB;
if ( this.canCache ) {
chunkCB = this.onCacheChunk.bind( this );
} else {
chunkCB = this.emit.bind( this, 'chunk' );
if ( ! this.env.debug ) {
try {
this.tokenizer.tokenize(text, 'start',
// callback
chunkCB,
// inline break test
this
);
this.onEnd();
} catch (e) {
console.warn( 'Tokenizer error in ' + cacheKey + ': ' + e );
console.trace();
}
} else {
this.tokenizer.tokenize(text, 'start',
// callback
chunkCB,
@ -107,11 +120,7 @@ PegTokenizer.prototype.process = function( text, cacheKey ) {
this
);
this.onEnd();
/*} catch (e) {
err = e;
console.warn( e );
console.trace();
}*/
}
};
PegTokenizer.prototype.onCacheChunk = function ( chunk ) {