A few fixes to parser functions and template expansion. Trim whitespace off

template arguments, let the last duplicate key win and fake pagenamee slightly
better.
This commit is contained in:
Gabriel Wicke 2012-03-08 11:44:37 +00:00
parent 51023feaa4
commit 7518db8197
Notes: Gabriel Wicke 2012-03-08 11:44:37 +00:00
3 changed files with 49 additions and 10 deletions

View file

@ -39,9 +39,7 @@ ParserFunctions.prototype._switchLookupFallback = function ( kvs, key ) {
var kv;
for ( var i = 0, l = kvs.length; i < l; i++ ) {
kv = kvs[i];
// XXX: tokensToString actually strips too much here! Anything
// non-stringish should not match at all.
if ( this.manager.env.tokensToString( kv.v ) === key ) {
if ( this.manager.env.tokensToString( kv.v, true ) === key ) {
// found. now look for the next entry with a non-empty key.
for ( var j = i; j < l; j++) {
kv = kvs[j];
@ -61,7 +59,7 @@ ParserFunctions.prototype._switchLookupFallback = function ( kvs, key ) {
// TODO: Implement
// http://www.mediawiki.org/wiki/Help:Extension:ParserFunctions#Grouping_results
ParserFunctions.prototype['pf_#switch'] = function ( target, argList, argDict, unnamedArgs ) {
this.manager.env.dp( 'switch enter: ' + target.trim() +
this.manager.env.dp( 'switch enter', target.trim(),
' looking in ', argDict );
target = target.trim();
if ( argDict[target] !== undefined ) {
@ -396,7 +394,7 @@ ParserFunctions.prototype['pf_currentpage'] = function ( target, argList, argDic
return [ target ];
};
ParserFunctions.prototype['pf_pagenamee'] = function ( target, argList, argDict ) {
return [ target ];
return [ target.split(':', 2)[1] || '' ];
};
ParserFunctions.prototype['pf_fullpagename'] = function ( target, argList, argDict ) {
return target && [target] || ["http://example.com/fixme/"];

View file

@ -57,6 +57,46 @@ MWParserEnvironment.prototype.lookupValue = function ( kvs, key ) {
return null;
};
/**
* Trim space and newlines from leading and trailing text tokens.
*/
MWParserEnvironment.prototype.tokenTrim = function ( tokens ) {
var l = tokens.length,
i, token;
// strip leading space
for ( i = 0; i < l; i++ ) {
token = tokens[i];
if ( token.constructor === String ) {
token = token.replace( /^\s+/, '' );
tokens[i] = token;
if ( token !== '' ) {
break;
}
} else {
break;
}
}
// strip trailing space
for ( i = l - 1; i >= 0; i-- ) {
token = tokens[i];
if ( token.constructor === String ) {
token = token.replace( /\s+$/, '' );
tokens[i] = token;
if ( token !== '' ) {
break;
}
} else {
break;
}
}
return tokens;
};
/**
* Convert an array of key-value pairs into a hash of keys to values. For
* duplicate keys, the last entry wins.
*/
MWParserEnvironment.prototype.KVtoHash = function ( kvs ) {
if ( ! kvs ) {
console.warn( "Invalid kvs!: " + JSON.stringify( kvs, null, 2 ) );
@ -66,9 +106,9 @@ MWParserEnvironment.prototype.KVtoHash = function ( kvs ) {
for ( var i = 0, l = kvs.length; i < l; i++ ) {
var kv = kvs[i],
key = this.tokensToString( kv.k ).trim();
if( res[key] === undefined ) {
res[key] = kv.v;
}
//if( res[key] === undefined ) {
res[key] = this.tokenTrim( kv.v );
//}
}
//console.warn( 'KVtoHash: ' + JSON.stringify( res ));
return res;
@ -203,7 +243,8 @@ MWParserEnvironment.prototype.tokensToString = function ( tokens, strict ) {
var tstring = JSON.stringify( token );
this.dp ( 'MWParserEnvironment.tokensToString, non-text token: ' +
tstring + JSON.stringify( tokens, null, 2 ) );
//out.push( tstring );
//console.trace();
out.push( tstring );
}
}
//console.warn( 'MWParserEnvironment.tokensToString result: ' + out.join('') );

View file

@ -50,7 +50,7 @@ var ParserPipeline = require('./mediawiki.parser.js').ParserPipeline,
wgScriptPath: argv.wgScriptPath,
wgScriptExtension: argv.wgScriptExtension,
// XXX: add options for this!
wgUploadPath: 'http://upload.wikimedia.org/wikipedia/commons/thumb',
wgUploadPath: 'http://upload.wikimedia.org/wikipedia/commons',
fetchTemplates: argv.fetchTemplates,
// enable/disable debug output using this switch
debug: argv.debug,