mirror of
https://gerrit.wikimedia.org/r/mediawiki/extensions/DiscussionTools
synced 2024-11-28 02:00:57 +00:00
Merge "Change CommentParser into a service"
This commit is contained in:
commit
b6e930a101
|
@ -190,7 +190,8 @@ class ApiDiscussionToolsEdit extends ApiBase {
|
|||
|
||||
$container = DOMCompat::getBody( $doc );
|
||||
|
||||
$parser = CommentParser::newFromGlobalState( $container, $title );
|
||||
$parser = MediaWikiServices::getInstance()->getService( 'DiscussionTools.CommentParser' )
|
||||
->parse( $container, $title );
|
||||
|
||||
if ( $commentId ) {
|
||||
$comment = $parser->findCommentById( $commentId );
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
namespace MediaWiki\Extension\DiscussionTools;
|
||||
|
||||
use MediaWiki\MediaWikiServices;
|
||||
use MediaWiki\Revision\RevisionRecord;
|
||||
use Title;
|
||||
use Wikimedia\Parsoid\Utils\DOMCompat;
|
||||
|
@ -24,7 +25,8 @@ trait ApiDiscussionToolsTrait {
|
|||
$revision->getPageAsLinkTarget()
|
||||
);
|
||||
|
||||
return CommentParser::newFromGlobalState( $container, $title );
|
||||
$parser = MediaWikiServices::getInstance()->getService( 'DiscussionTools.CommentParser' );
|
||||
return $parser->parse( $container, $title );
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -11,7 +11,6 @@ use ParserOutput;
|
|||
use Throwable;
|
||||
use Title;
|
||||
use WebRequest;
|
||||
use Wikimedia\Parsoid\DOM\Element;
|
||||
use Wikimedia\Parsoid\Utils\DOMCompat;
|
||||
use Wikimedia\Parsoid\Utils\DOMUtils;
|
||||
use Wikimedia\Parsoid\Wt2Html\XMLSerializer;
|
||||
|
@ -28,12 +27,10 @@ class CommentFormatter {
|
|||
*
|
||||
* This method exists so it can mocked in tests.
|
||||
*
|
||||
* @param Element $container
|
||||
* @param Title $title
|
||||
* @return CommentParser
|
||||
*/
|
||||
protected static function getParser( Element $container, Title $title ): CommentParser {
|
||||
return CommentParser::newFromGlobalState( $container, $title );
|
||||
protected static function getParser(): CommentParser {
|
||||
return MediaWikiServices::getInstance()->getService( 'DiscussionTools.CommentParser' );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -90,7 +87,7 @@ class CommentFormatter {
|
|||
$doc = DOMUtils::parseHTML( $html );
|
||||
$container = DOMCompat::getBody( $doc );
|
||||
|
||||
$parser = static::getParser( $container, $title );
|
||||
$parser = static::getParser()->parse( $container, $title );
|
||||
$threadItems = $parser->getThreadItems();
|
||||
|
||||
// Iterate in reverse order, because adding the range markers for a thread item
|
||||
|
|
|
@ -17,7 +17,6 @@ use Wikimedia\Parsoid\DOM\Node;
|
|||
use Wikimedia\Parsoid\DOM\Text;
|
||||
use Wikimedia\Parsoid\Utils\DOMCompat;
|
||||
|
||||
// TODO clean up static vs non-static
|
||||
// TODO consider making timestamp parsing not a returned function
|
||||
|
||||
class CommentParser {
|
||||
|
@ -53,25 +52,17 @@ class CommentParser {
|
|||
private $timezones;
|
||||
|
||||
/**
|
||||
* @param Element $rootNode Root node of content to parse
|
||||
* @param Title $title Title of the page being parsed
|
||||
* @param Language $language Content language
|
||||
* @param Config $config
|
||||
* @param array $data
|
||||
* @param LanguageData $languageData
|
||||
*/
|
||||
public function __construct(
|
||||
Element $rootNode, Title $title, Language $language, Config $config, array $data = []
|
||||
Language $language, Config $config, LanguageData $languageData
|
||||
) {
|
||||
$this->rootNode = $rootNode;
|
||||
$this->config = $config;
|
||||
$this->language = $language;
|
||||
$this->title = $title;
|
||||
|
||||
if ( !$data ) {
|
||||
// TODO: Instead of passing data used for mocking, mock the methods that fetch the data.
|
||||
$data = LanguageData::getLocalData( $config, $language );
|
||||
}
|
||||
|
||||
$data = $languageData->getLocalData();
|
||||
$this->dateFormat = $data['dateFormat'];
|
||||
$this->digits = $data['digits'];
|
||||
$this->contLangMessages = $data['contLangMessages'];
|
||||
|
@ -80,17 +71,18 @@ class CommentParser {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param Element $rootNode
|
||||
* @param Title $title
|
||||
* @return CommentParser
|
||||
* Parse a discussion page.
|
||||
*
|
||||
* @param Element $rootNode Root node of content to parse
|
||||
* @param Title $title Title of the page being parsed
|
||||
* @return $this
|
||||
*/
|
||||
public static function newFromGlobalState( Element $rootNode, Title $title ): CommentParser {
|
||||
return new static(
|
||||
$rootNode,
|
||||
$title,
|
||||
MediaWikiServices::getInstance()->getContentLanguage(),
|
||||
MediaWikiServices::getInstance()->getMainConfig()
|
||||
);
|
||||
public function parse( Element $rootNode, Title $title ) {
|
||||
$this->rootNode = $rootNode;
|
||||
$this->title = $title;
|
||||
// TODO Return a data object
|
||||
// (This line is a big fat hack)
|
||||
return clone $this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -13,26 +13,53 @@ use Config;
|
|||
use DateTimeZone;
|
||||
use ILanguageConverter;
|
||||
use Language;
|
||||
use MediaWiki\MediaWikiServices;
|
||||
use MediaWiki\Languages\LanguageConverterFactory;
|
||||
use MediaWiki\SpecialPage\SpecialPageFactory;
|
||||
|
||||
class LanguageData {
|
||||
/** @var Config */
|
||||
private $config;
|
||||
/** @var Language */
|
||||
private $language;
|
||||
/** @var LanguageConverterFactory */
|
||||
private $languageConverterFactory;
|
||||
/** @var SpecialPageFactory */
|
||||
private $specialPageFactory;
|
||||
|
||||
/**
|
||||
* @param Config $config
|
||||
* @param Language $language
|
||||
* @param LanguageConverterFactory $languageConverterFactory
|
||||
* @param SpecialPageFactory $specialPageFactory
|
||||
*/
|
||||
public function __construct(
|
||||
Config $config,
|
||||
Language $language,
|
||||
LanguageConverterFactory $languageConverterFactory,
|
||||
SpecialPageFactory $specialPageFactory
|
||||
) {
|
||||
$this->config = $config;
|
||||
$this->language = $language;
|
||||
$this->languageConverterFactory = $languageConverterFactory;
|
||||
$this->specialPageFactory = $specialPageFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute data we need to parse discussion threads on pages.
|
||||
*
|
||||
* @param Config $config
|
||||
* @param Language $lang
|
||||
* @return array
|
||||
*/
|
||||
public static function getLocalData( Config $config, Language $lang ): array {
|
||||
$langConv = MediaWikiServices::getInstance()->getLanguageConverterFactory()
|
||||
->getLanguageConverter( $lang );
|
||||
public function getLocalData(): array {
|
||||
$config = $this->config;
|
||||
$lang = $this->language;
|
||||
$langConv = $this->languageConverterFactory->getLanguageConverter( $lang );
|
||||
|
||||
$data = [];
|
||||
|
||||
$data['dateFormat'] = [];
|
||||
$dateFormat = $lang->getDateFormatString( 'both', $lang->dateFormat( false ) );
|
||||
foreach ( $langConv->getVariants() as $variant ) {
|
||||
$convDateFormat = self::convertDateFormat( $dateFormat, $langConv, $variant );
|
||||
$convDateFormat = $this->convertDateFormat( $dateFormat, $langConv, $variant );
|
||||
$data['dateFormat'][$variant] = $convDateFormat;
|
||||
}
|
||||
|
||||
|
@ -53,10 +80,10 @@ class LanguageData {
|
|||
// ApiQuerySiteinfo
|
||||
$data['localTimezone'] = $config->get( 'Localtimezone' );
|
||||
|
||||
$data['specialContributionsName'] = MediaWikiServices::getInstance()
|
||||
->getSpecialPageFactory()->getLocalNameFor( 'Contributions' );
|
||||
$data['specialNewSectionName'] = MediaWikiServices::getInstance()
|
||||
->getSpecialPageFactory()->getLocalNameFor( 'NewSection' );
|
||||
$data['specialContributionsName'] = $this->specialPageFactory
|
||||
->getLocalNameFor( 'Contributions' );
|
||||
$data['specialNewSectionName'] = $this->specialPageFactory
|
||||
->getLocalNameFor( 'NewSection' );
|
||||
|
||||
$localTimezone = $config->get( 'Localtimezone' );
|
||||
// Return all timezone abbreviations for the local timezone (there will often be two, for
|
||||
|
@ -128,7 +155,7 @@ class LanguageData {
|
|||
* @param string $variant
|
||||
* @return string
|
||||
*/
|
||||
private static function convertDateFormat(
|
||||
private function convertDateFormat(
|
||||
string $format,
|
||||
ILanguageConverter $langConv,
|
||||
string $variant
|
||||
|
|
|
@ -69,7 +69,8 @@ class EventDispatcher {
|
|||
|
||||
$doc = DOMUtils::parseHTML( $html );
|
||||
$container = DOMCompat::getBody( $doc );
|
||||
return CommentParser::newFromGlobalState( $container, $title );
|
||||
$parser = $services->getService( 'DiscussionTools.CommentParser' );
|
||||
return $parser->parse( $container, $title );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -104,7 +105,8 @@ class EventDispatcher {
|
|||
// Page creation
|
||||
$doc = DOMUtils::parseHTML( '' );
|
||||
$container = DOMCompat::getBody( $doc );
|
||||
$oldParser = CommentParser::newFromGlobalState( $container, $title );
|
||||
$oldParser = $services->getService( 'DiscussionTools.CommentParser' )
|
||||
->parse( $container, $title );
|
||||
}
|
||||
$newParser = self::getParsedRevision( $newRevRecord );
|
||||
|
||||
|
|
|
@ -33,13 +33,20 @@ class ResourceLoaderData {
|
|||
public static function getLocalData(
|
||||
ResourceLoaderContext $context, Config $config, ?string $langCode = null
|
||||
): array {
|
||||
$services = MediaWikiServices::getInstance();
|
||||
|
||||
if ( $langCode === null ) {
|
||||
$lang = MediaWikiServices::getInstance()->getContentLanguage();
|
||||
$langData = $services->getService( 'DiscussionTools.LanguageData' );
|
||||
} else {
|
||||
$lang = MediaWikiServices::getInstance()->getLanguageFactory()->getLanguage( $langCode );
|
||||
$langData = new LanguageData(
|
||||
$services->getMainConfig(),
|
||||
$services->getLanguageFactory()->getLanguage( $langCode ),
|
||||
$services->getLanguageConverterFactory(),
|
||||
$services->getSpecialPageFactory()
|
||||
);
|
||||
}
|
||||
|
||||
return LanguageData::getLocalData( $config, $lang );
|
||||
return $langData->getLocalData();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -5,6 +5,21 @@ namespace MediaWiki\Extension\DiscussionTools;
|
|||
use MediaWiki\MediaWikiServices;
|
||||
|
||||
return [
|
||||
'DiscussionTools.CommentParser' => static function ( MediaWikiServices $services ): CommentParser {
|
||||
return new CommentParser(
|
||||
$services->getContentLanguage(),
|
||||
$services->getMainConfig(),
|
||||
$services->getService( 'DiscussionTools.LanguageData' )
|
||||
);
|
||||
},
|
||||
'DiscussionTools.LanguageData' => static function ( MediaWikiServices $services ): LanguageData {
|
||||
return new LanguageData(
|
||||
$services->getMainConfig(),
|
||||
$services->getContentLanguage(),
|
||||
$services->getLanguageConverterFactory(),
|
||||
$services->getSpecialPageFactory()
|
||||
);
|
||||
},
|
||||
'DiscussionTools.SubscriptionStore' => static function ( MediaWikiServices $services ): SubscriptionStore {
|
||||
return new SubscriptionStore(
|
||||
$services->getConfigFactory(),
|
||||
|
|
|
@ -8,8 +8,6 @@ var
|
|||
CommentItem = require( './CommentItem.js' ),
|
||||
HeadingItem = require( './HeadingItem.js' ),
|
||||
ThreadItem = require( './ThreadItem.js' ),
|
||||
// LanguageData::getLocalData()
|
||||
data = require( './parser/data.json' ),
|
||||
moment = require( './lib/moment-timezone/moment-timezone-with-data-1970-2030.js' );
|
||||
|
||||
/**
|
||||
|
@ -17,11 +15,22 @@ var
|
|||
* comments and threads.
|
||||
*
|
||||
* @class mw.dt.Parser
|
||||
* @param {Array} data Language-specific data to be used for parsing
|
||||
* @constructor
|
||||
*/
|
||||
function Parser( data ) {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a discussion page.
|
||||
*
|
||||
* @param {HTMLElement} rootNode Root node of content to parse
|
||||
* @param {mw.Title} title Title of the page being parsed
|
||||
* @chainable
|
||||
* @return {Parser}
|
||||
*/
|
||||
function Parser( rootNode, title ) {
|
||||
Parser.prototype.parse = function ( rootNode, title ) {
|
||||
this.rootNode = rootNode;
|
||||
this.title = title;
|
||||
this.threadItems = null;
|
||||
|
@ -29,7 +38,8 @@ function Parser( rootNode, title ) {
|
|||
this.threadItemsByName = null;
|
||||
this.threadItemsById = null;
|
||||
this.threads = null;
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
OO.initClass( Parser );
|
||||
|
||||
|
@ -41,11 +51,12 @@ OO.initClass( Parser );
|
|||
* @param {string[]} messages Message keys
|
||||
* @return {string[]} Message values
|
||||
*/
|
||||
function getMessages( contLangVariant, messages ) {
|
||||
Parser.prototype.getMessages = function ( contLangVariant, messages ) {
|
||||
var parser = this;
|
||||
return messages.map( function ( code ) {
|
||||
return data.contLangMessages[ contLangVariant ][ code ];
|
||||
return parser.data.contLangMessages[ contLangVariant ][ code ];
|
||||
} );
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get a regexp that matches timestamps generated using the given date format.
|
||||
|
@ -72,6 +83,8 @@ Parser.prototype.getTimestampRegexp = function ( contLangVariant, format, digits
|
|||
return '(' + array.map( mw.util.escapeRegExp ).join( '|' ) + ')';
|
||||
}
|
||||
|
||||
var parser = this;
|
||||
|
||||
var s = '';
|
||||
// Adapted from Language::sprintfDate()
|
||||
for ( var p = 0; p < format.length; p++ ) {
|
||||
|
@ -89,7 +102,7 @@ Parser.prototype.getTimestampRegexp = function ( contLangVariant, format, digits
|
|||
s += 'x';
|
||||
break;
|
||||
case 'xg':
|
||||
s += regexpAlternateGroup( getMessages( contLangVariant, [
|
||||
s += regexpAlternateGroup( parser.getMessages( contLangVariant, [
|
||||
'january-gen', 'february-gen', 'march-gen', 'april-gen', 'may-gen', 'june-gen',
|
||||
'july-gen', 'august-gen', 'september-gen', 'october-gen', 'november-gen',
|
||||
'december-gen'
|
||||
|
@ -99,7 +112,7 @@ Parser.prototype.getTimestampRegexp = function ( contLangVariant, format, digits
|
|||
num = '2';
|
||||
break;
|
||||
case 'D':
|
||||
s += regexpAlternateGroup( getMessages( contLangVariant, [
|
||||
s += regexpAlternateGroup( parser.getMessages( contLangVariant, [
|
||||
'sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat'
|
||||
] ) );
|
||||
break;
|
||||
|
@ -107,20 +120,20 @@ Parser.prototype.getTimestampRegexp = function ( contLangVariant, format, digits
|
|||
num = '1,2';
|
||||
break;
|
||||
case 'l':
|
||||
s += regexpAlternateGroup( getMessages( contLangVariant, [
|
||||
s += regexpAlternateGroup( parser.getMessages( contLangVariant, [
|
||||
'sunday', 'monday', 'tuesday', 'wednesday', 'thursday',
|
||||
'friday', 'saturday'
|
||||
] ) );
|
||||
break;
|
||||
case 'F':
|
||||
s += regexpAlternateGroup( getMessages( contLangVariant, [
|
||||
s += regexpAlternateGroup( parser.getMessages( contLangVariant, [
|
||||
'january', 'february', 'march', 'april', 'may_long', 'june',
|
||||
'july', 'august', 'september', 'october', 'november',
|
||||
'december'
|
||||
] ) );
|
||||
break;
|
||||
case 'M':
|
||||
s += regexpAlternateGroup( getMessages( contLangVariant, [
|
||||
s += regexpAlternateGroup( parser.getMessages( contLangVariant, [
|
||||
'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug',
|
||||
'sep', 'oct', 'nov', 'dec'
|
||||
] ) );
|
||||
|
@ -257,6 +270,8 @@ Parser.prototype.getTimestampParser = function ( contLangVariant, format, digits
|
|||
);
|
||||
}
|
||||
|
||||
var parser = this;
|
||||
|
||||
/**
|
||||
* @typedef {function(Array):moment} TimestampParser
|
||||
*/
|
||||
|
@ -281,7 +296,7 @@ Parser.prototype.getTimestampParser = function ( contLangVariant, format, digits
|
|||
|
||||
switch ( code2 ) {
|
||||
case 'xg':
|
||||
monthIdx = getMessages( contLangVariant, [
|
||||
monthIdx = parser.getMessages( contLangVariant, [
|
||||
'january-gen', 'february-gen', 'march-gen', 'april-gen', 'may-gen', 'june-gen',
|
||||
'july-gen', 'august-gen', 'september-gen', 'october-gen', 'november-gen',
|
||||
'december-gen'
|
||||
|
@ -296,14 +311,14 @@ Parser.prototype.getTimestampParser = function ( contLangVariant, format, digits
|
|||
// Day of the week - unused
|
||||
break;
|
||||
case 'F':
|
||||
monthIdx = getMessages( contLangVariant, [
|
||||
monthIdx = parser.getMessages( contLangVariant, [
|
||||
'january', 'february', 'march', 'april', 'may_long', 'june',
|
||||
'july', 'august', 'september', 'october', 'november',
|
||||
'december'
|
||||
] ).indexOf( text );
|
||||
break;
|
||||
case 'M':
|
||||
monthIdx = getMessages( contLangVariant, [
|
||||
monthIdx = parser.getMessages( contLangVariant, [
|
||||
'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug',
|
||||
'sep', 'oct', 'nov', 'dec'
|
||||
] ).indexOf( text );
|
||||
|
@ -372,12 +387,12 @@ Parser.prototype.getTimestampParser = function ( contLangVariant, format, digits
|
|||
*/
|
||||
Parser.prototype.getLocalTimestampRegexps = function () {
|
||||
var parser = this;
|
||||
return Object.keys( data.dateFormat ).map( function ( contLangVariant ) {
|
||||
return Object.keys( this.data.dateFormat ).map( function ( contLangVariant ) {
|
||||
return parser.getTimestampRegexp(
|
||||
contLangVariant,
|
||||
data.dateFormat[ contLangVariant ],
|
||||
'[' + data.digits[ contLangVariant ].join( '' ) + ']',
|
||||
data.timezones[ contLangVariant ]
|
||||
parser.data.dateFormat[ contLangVariant ],
|
||||
'[' + parser.data.digits[ contLangVariant ].join( '' ) + ']',
|
||||
parser.data.timezones[ contLangVariant ]
|
||||
);
|
||||
} );
|
||||
};
|
||||
|
@ -393,13 +408,13 @@ Parser.prototype.getLocalTimestampRegexps = function () {
|
|||
*/
|
||||
Parser.prototype.getLocalTimestampParsers = function () {
|
||||
var parser = this;
|
||||
return Object.keys( data.dateFormat ).map( function ( contLangVariant ) {
|
||||
return Object.keys( this.data.dateFormat ).map( function ( contLangVariant ) {
|
||||
return parser.getTimestampParser(
|
||||
contLangVariant,
|
||||
data.dateFormat[ contLangVariant ],
|
||||
data.digits[ contLangVariant ],
|
||||
data.localTimezone,
|
||||
data.timezones[ contLangVariant ]
|
||||
parser.data.dateFormat[ contLangVariant ],
|
||||
parser.data.digits[ contLangVariant ],
|
||||
parser.data.localTimezone,
|
||||
parser.data.timezones[ contLangVariant ]
|
||||
);
|
||||
} );
|
||||
};
|
||||
|
@ -527,7 +542,7 @@ Parser.prototype.getUsernameFromLink = function ( link ) {
|
|||
}
|
||||
} else if (
|
||||
namespaceId === namespaceIds.special &&
|
||||
mainText.split( '/' )[ 0 ] === data.specialContributionsName
|
||||
mainText.split( '/' )[ 0 ] === this.data.specialContributionsName
|
||||
) {
|
||||
username = mainText.split( '/' )[ 1 ];
|
||||
if ( !username ) {
|
||||
|
@ -578,7 +593,7 @@ Parser.prototype.findSignature = function ( timestampNode, until ) {
|
|||
if ( event === 'enter' && node === until ) {
|
||||
return true;
|
||||
}
|
||||
if ( length >= data.signatureScanLimit ) {
|
||||
if ( length >= parser.data.signatureScanLimit ) {
|
||||
return true;
|
||||
}
|
||||
if ( utils.isBlockElement( node ) ) {
|
||||
|
|
|
@ -623,7 +623,7 @@ function init( $container, state ) {
|
|||
|
||||
$pageContainer = $container;
|
||||
linksController = new ReplyLinksController( $pageContainer );
|
||||
var parser = new Parser(
|
||||
var parser = new Parser( require( './parser/data.json' ) ).parse(
|
||||
$pageContainer[ 0 ],
|
||||
mw.Title.newFromText( mw.config.get( 'wgRelevantPageName' ) )
|
||||
);
|
||||
|
|
|
@ -3,7 +3,7 @@ var
|
|||
modifier = require( 'ext.discussionTools.init' ).modifier,
|
||||
utils = require( 'ext.discussionTools.init' ).utils,
|
||||
highlighter = require( './highlighter.js' ),
|
||||
parser = new Parser(
|
||||
parser = new Parser( require( 'ext.discussionTools.init' ).parserData ).parse(
|
||||
document.getElementById( 'mw-content-text' ),
|
||||
mw.Title.newFromText( mw.config.get( 'wgRelevantPageName' ) )
|
||||
),
|
||||
|
|
|
@ -68,6 +68,7 @@ if ( uri.query.dtdebug ) {
|
|||
module.exports = {
|
||||
controller: controller,
|
||||
Parser: require( './Parser.js' ),
|
||||
parserData: require( './parser/data.json' ),
|
||||
modifier: require( './modifier.js' ),
|
||||
ThreadItem: require( './ThreadItem.js' ),
|
||||
HeadingItem: require( './HeadingItem.js' ),
|
||||
|
|
|
@ -7,4 +7,4 @@ config.json:
|
|||
JSON.stringify( mw.config.get( [ 'wgContentLanguage', 'wgArticlePath', 'wgNamespaceIds', 'wgFormattedNamespaces' ] ), null, '\t' )
|
||||
|
||||
data.json:
|
||||
JSON.stringify( mw.loader.moduleRegistry[ 'ext.discussionTools.init' ].packageExports[ 'parser/data.json' ], null, '\t' )
|
||||
JSON.stringify( mw.loader.require( 'ext.discussionTools.init' ).parserData )
|
||||
|
|
|
@ -26,7 +26,7 @@ class CommentFormatterTest extends IntegrationTestCase {
|
|||
$data = self::getJson( $data );
|
||||
|
||||
$this->setupEnv( $config, $data );
|
||||
MockCommentFormatter::$data = $data;
|
||||
MockCommentFormatter::$parser = TestUtils::createParser( $data );
|
||||
|
||||
$commentFormatter = TestingAccessWrapper::newFromClass( MockCommentFormatter::class );
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ class CommentModifierTest extends IntegrationTestCase {
|
|||
$doc = self::createDocument( $dom );
|
||||
$container = DOMCompat::getBody( $doc );
|
||||
|
||||
$parser = self::createParser( $container, $title, $data );
|
||||
$parser = self::createParser( $data )->parse( $container, $title );
|
||||
$comments = $parser->getCommentItems();
|
||||
|
||||
foreach ( $comments as $comment ) {
|
||||
|
@ -81,7 +81,7 @@ class CommentModifierTest extends IntegrationTestCase {
|
|||
$doc = self::createDocument( $dom );
|
||||
$container = DOMCompat::getBody( $doc );
|
||||
|
||||
$parser = self::createParser( $container, $title, $data );
|
||||
$parser = self::createParser( $data )->parse( $container, $title );
|
||||
$comments = $parser->getCommentItems();
|
||||
|
||||
foreach ( $comments as $comment ) {
|
||||
|
|
|
@ -5,17 +5,16 @@ namespace MediaWiki\Extension\DiscussionTools\Tests;
|
|||
use DateTimeImmutable;
|
||||
use Error;
|
||||
use MediaWiki\Extension\DiscussionTools\CommentItem;
|
||||
use MediaWiki\Extension\DiscussionTools\CommentParser;
|
||||
use MediaWiki\Extension\DiscussionTools\CommentUtils;
|
||||
use MediaWiki\Extension\DiscussionTools\HeadingItem;
|
||||
use MediaWiki\Extension\DiscussionTools\ImmutableRange;
|
||||
use MediaWiki\Extension\DiscussionTools\ThreadItem;
|
||||
use MediaWiki\MediaWikiServices;
|
||||
use stdClass;
|
||||
use Title;
|
||||
use Wikimedia\Parsoid\DOM\Element;
|
||||
use Wikimedia\Parsoid\DOM\Node;
|
||||
use Wikimedia\Parsoid\Utils\DOMCompat;
|
||||
use Wikimedia\Parsoid\Utils\DOMUtils;
|
||||
use Wikimedia\TestingAccessWrapper;
|
||||
|
||||
/**
|
||||
|
@ -114,10 +113,7 @@ class CommentParserTest extends IntegrationTestCase {
|
|||
string $format, string $expected, string $message
|
||||
): void {
|
||||
$parser = TestingAccessWrapper::newFromObject(
|
||||
CommentParser::newFromGlobalState(
|
||||
DOMCompat::getBody( DOMUtils::parseHTML( '' ) ),
|
||||
Title::newFromText( 'Dummy' )
|
||||
)
|
||||
MediaWikiServices::getInstance()->getService( 'DiscussionTools.CommentParser' )
|
||||
);
|
||||
|
||||
// HACK: Fix differences between JS & PHP regexes
|
||||
|
@ -142,10 +138,7 @@ class CommentParserTest extends IntegrationTestCase {
|
|||
string $format, array $data, string $expected, string $message
|
||||
): void {
|
||||
$parser = TestingAccessWrapper::newFromObject(
|
||||
CommentParser::newFromGlobalState(
|
||||
DOMCompat::getBody( DOMUtils::parseHTML( '' ) ),
|
||||
Title::newFromText( 'Dummy' )
|
||||
)
|
||||
MediaWikiServices::getInstance()->getService( 'DiscussionTools.CommentParser' )
|
||||
);
|
||||
|
||||
$expected = new DateTimeImmutable( $expected );
|
||||
|
@ -167,10 +160,7 @@ class CommentParserTest extends IntegrationTestCase {
|
|||
string $timezone, array $timezoneAbbrs, string $message
|
||||
): void {
|
||||
$parser = TestingAccessWrapper::newFromObject(
|
||||
CommentParser::newFromGlobalState(
|
||||
DOMCompat::getBody( DOMUtils::parseHTML( '' ) ),
|
||||
Title::newFromText( 'Dummy' )
|
||||
)
|
||||
MediaWikiServices::getInstance()->getService( 'DiscussionTools.CommentParser' )
|
||||
);
|
||||
|
||||
$regexp = $parser->getTimestampRegexp( 'en', $format, '\\d', $timezoneAbbrs );
|
||||
|
@ -208,7 +198,7 @@ class CommentParserTest extends IntegrationTestCase {
|
|||
$body = DOMCompat::getBody( $doc );
|
||||
|
||||
$this->setupEnv( $config, $data );
|
||||
$parser = self::createParser( $body, $title, $data );
|
||||
$parser = self::createParser( $data )->parse( $body, $title );
|
||||
$threads = $parser->getThreads();
|
||||
|
||||
$processedThreads = [];
|
||||
|
|
|
@ -40,8 +40,8 @@ class EventDispatcherTest extends IntegrationTestCase {
|
|||
|
||||
$dummyTitle = Title::newFromText( 'Dummy' );
|
||||
$this->setupEnv( $config, $data );
|
||||
$parser1 = self::createParser( $body1, $dummyTitle, $data );
|
||||
$parser2 = self::createParser( $body2, $dummyTitle, $data );
|
||||
$parser1 = self::createParser( $data )->parse( $body1, $dummyTitle );
|
||||
$parser2 = self::createParser( $data )->parse( $body2, $dummyTitle );
|
||||
|
||||
$events = self::getJson( $other, true );
|
||||
|
||||
|
|
|
@ -23,8 +23,15 @@ class LanguageDataTest extends IntegrationTestCase {
|
|||
] );
|
||||
$expectedData = self::getJson( $expectedPath );
|
||||
|
||||
$lang = MediaWikiServices::getInstance()->getLanguageFactory()->getLanguage( $langCode );
|
||||
$data = LanguageData::getLocalData( $conf, $lang );
|
||||
$services = MediaWikiServices::getInstance();
|
||||
$languageData = new LanguageData(
|
||||
$conf,
|
||||
MediaWikiServices::getInstance()->getLanguageFactory()->getLanguage( $langCode ),
|
||||
$services->getLanguageConverterFactory(),
|
||||
$services->getSpecialPageFactory()
|
||||
);
|
||||
|
||||
$data = $languageData->getLocalData();
|
||||
|
||||
// Optionally write updated content to the JSON files
|
||||
if ( getenv( 'DISCUSSIONTOOLS_OVERWRITE_TESTS' ) ) {
|
||||
|
|
|
@ -4,20 +4,19 @@ namespace MediaWiki\Extension\DiscussionTools\Tests;
|
|||
|
||||
use MediaWiki\Extension\DiscussionTools\CommentFormatter;
|
||||
use MediaWiki\Extension\DiscussionTools\CommentParser;
|
||||
use Title;
|
||||
use Wikimedia\Parsoid\DOM\Element;
|
||||
|
||||
class MockCommentFormatter extends CommentFormatter {
|
||||
|
||||
public static $data;
|
||||
/**
|
||||
* @var CommentParser
|
||||
*/
|
||||
public static $parser;
|
||||
|
||||
/**
|
||||
* @param Element $container
|
||||
* @param Title $title
|
||||
* @return CommentParser
|
||||
*/
|
||||
protected static function getParser( Element $container, Title $title ): CommentParser {
|
||||
return TestUtils::createParser( $container, $title, static::$data );
|
||||
protected static function getParser(): CommentParser {
|
||||
return self::$parser;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
27
tests/phpunit/MockLanguageData.php
Normal file
27
tests/phpunit/MockLanguageData.php
Normal file
|
@ -0,0 +1,27 @@
|
|||
<?php
|
||||
|
||||
namespace MediaWiki\Extension\DiscussionTools\Tests;
|
||||
|
||||
use MediaWiki\Extension\DiscussionTools\LanguageData;
|
||||
|
||||
class MockLanguageData extends LanguageData {
|
||||
|
||||
/**
|
||||
* @var array
|
||||
*/
|
||||
private $data;
|
||||
|
||||
/**
|
||||
* @param array $data
|
||||
*/
|
||||
public function __construct( array $data ) {
|
||||
$this->data = $data;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return array
|
||||
*/
|
||||
public function getLocalData(): array {
|
||||
return $this->data;
|
||||
}
|
||||
}
|
|
@ -4,9 +4,7 @@ namespace MediaWiki\Extension\DiscussionTools\Tests;
|
|||
|
||||
use MediaWiki\Extension\DiscussionTools\CommentParser;
|
||||
use MediaWiki\MediaWikiServices;
|
||||
use Title;
|
||||
use Wikimedia\Parsoid\DOM\Document;
|
||||
use Wikimedia\Parsoid\DOM\Element;
|
||||
use Wikimedia\Parsoid\Utils\DOMCompat;
|
||||
use Wikimedia\Parsoid\Utils\DOMUtils;
|
||||
|
||||
|
@ -120,19 +118,15 @@ trait TestUtils {
|
|||
/**
|
||||
* Create a comment parser
|
||||
*
|
||||
* @param Element $rootNode
|
||||
* @param Title $title
|
||||
* @param array $data
|
||||
* @return CommentParser
|
||||
*/
|
||||
public static function createParser( Element $rootNode, Title $title, array $data ): CommentParser {
|
||||
public static function createParser( array $data ): CommentParser {
|
||||
$services = MediaWikiServices::getInstance();
|
||||
return new CommentParser(
|
||||
$rootNode,
|
||||
$title,
|
||||
$services->getContentLanguage(),
|
||||
$services->getMainConfig(),
|
||||
$data
|
||||
new MockLanguageData( $data )
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ class ThreadItemTest extends IntegrationTestCase {
|
|||
|
||||
CommentUtils::unwrapParsoidSections( $container );
|
||||
|
||||
$parser = self::createParser( $container, $title, $data );
|
||||
$parser = self::createParser( $data )->parse( $container, $title );
|
||||
$comments = $parser->getCommentItems();
|
||||
|
||||
$transcludedFrom = [];
|
||||
|
@ -113,7 +113,7 @@ class ThreadItemTest extends IntegrationTestCase {
|
|||
$body = DOMCompat::getBody( $doc );
|
||||
|
||||
$this->setupEnv( $config, $data );
|
||||
$parser = self::createParser( $body, $title, $data );
|
||||
$parser = self::createParser( $data )->parse( $body, $title );
|
||||
$items = $parser->getThreadItems();
|
||||
|
||||
$output = [];
|
||||
|
@ -159,7 +159,7 @@ class ThreadItemTest extends IntegrationTestCase {
|
|||
$body = DOMCompat::getBody( $doc );
|
||||
|
||||
$this->setupEnv( $config, $data );
|
||||
$parser = self::createParser( $body, $title, $data );
|
||||
$parser = self::createParser( $data )->parse( $body, $title );
|
||||
$items = $parser->getThreadItems();
|
||||
|
||||
$output = [];
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
var
|
||||
testUtils = require( './testUtils.js' ),
|
||||
CommentItem = require( 'ext.discussionTools.init' ).CommentItem,
|
||||
HeadingItem = require( 'ext.discussionTools.init' ).HeadingItem;
|
||||
|
||||
QUnit.module( 'mw.dt.ThreadItem', testUtils.newEnvironment() );
|
||||
QUnit.module( 'mw.dt.ThreadItem', QUnit.newMwEnvironment() );
|
||||
|
||||
QUnit.test( '#getAuthorsBelow', function ( assert ) {
|
||||
var cases = require( '../cases/authors.json' );
|
||||
|
|
|
@ -3,7 +3,7 @@ var
|
|||
Parser = require( 'ext.discussionTools.init' ).Parser,
|
||||
modifier = require( 'ext.discussionTools.init' ).modifier;
|
||||
|
||||
QUnit.module( 'mw.dt.modifier', testUtils.newEnvironment() );
|
||||
QUnit.module( 'mw.dt.modifier', QUnit.newMwEnvironment() );
|
||||
|
||||
require( '../cases/modified.json' ).forEach( function ( caseItem, i ) {
|
||||
// This should be one test with many cases, rather than multiple tests, but the cases are large
|
||||
|
@ -18,7 +18,6 @@ require( '../cases/modified.json' ).forEach( function ( caseItem, i ) {
|
|||
title = mw.Title.newFromText( caseItem.title );
|
||||
|
||||
testUtils.overrideMwConfig( config );
|
||||
testUtils.overrideParserData( data );
|
||||
|
||||
$( fixture ).empty().append( expected );
|
||||
var expectedHtml = fixture.innerHTML;
|
||||
|
@ -26,7 +25,7 @@ require( '../cases/modified.json' ).forEach( function ( caseItem, i ) {
|
|||
$( fixture ).empty().append( dom );
|
||||
var reverseExpectedHtml = fixture.innerHTML;
|
||||
|
||||
var parser = new Parser( fixture, title );
|
||||
var parser = new Parser( data ).parse( fixture, title );
|
||||
var comments = parser.getCommentItems();
|
||||
|
||||
// Add a reply to every comment. Note that this inserts *all* of the replies, unlike the real
|
||||
|
@ -76,14 +75,13 @@ QUnit.test( '#addReplyLink', function ( assert ) {
|
|||
title = mw.Title.newFromText( caseItem.title );
|
||||
|
||||
testUtils.overrideMwConfig( config );
|
||||
testUtils.overrideParserData( data );
|
||||
|
||||
$( fixture ).empty().append( expected );
|
||||
var expectedHtml = fixture.innerHTML;
|
||||
|
||||
$( fixture ).empty().append( dom );
|
||||
|
||||
var parser = new Parser( fixture, title );
|
||||
var parser = new Parser( data ).parse( fixture, title );
|
||||
var comments = parser.getCommentItems();
|
||||
|
||||
// Add a reply link to every comment.
|
||||
|
|
|
@ -1,16 +1,13 @@
|
|||
/* global moment */
|
||||
var
|
||||
testUtils = require( './testUtils.js' ),
|
||||
Parser = require( 'ext.discussionTools.init' ).Parser,
|
||||
dummyTitle = mw.Title.newFromText( 'Dummy' );
|
||||
Parser = require( 'ext.discussionTools.init' ).Parser;
|
||||
|
||||
QUnit.module( 'mw.dt.Parser', testUtils.newEnvironment() );
|
||||
QUnit.module( 'mw.dt.Parser', QUnit.newMwEnvironment() );
|
||||
|
||||
QUnit.test( '#getTimestampRegexp', function ( assert ) {
|
||||
var cases = require( '../cases/timestamp-regex.json' ),
|
||||
parser = new Parser( document.createElement( 'div' ), dummyTitle );
|
||||
|
||||
testUtils.overrideParserData( require( '../data-en.json' ) );
|
||||
parser = new Parser( require( '../data-en.json' ) );
|
||||
|
||||
cases.forEach( function ( caseItem ) {
|
||||
assert.strictEqual(
|
||||
|
@ -23,9 +20,7 @@ QUnit.test( '#getTimestampRegexp', function ( assert ) {
|
|||
|
||||
QUnit.test( '#getTimestampParser', function ( assert ) {
|
||||
var cases = require( '../cases/timestamp-parser.json' ),
|
||||
parser = new Parser( document.createElement( 'div' ), dummyTitle );
|
||||
|
||||
testUtils.overrideParserData( require( '../data-en.json' ) );
|
||||
parser = new Parser( require( '../data-en.json' ) );
|
||||
|
||||
cases.forEach( function ( caseItem ) {
|
||||
var tsParser = parser.getTimestampParser( 'en', caseItem.format, null, 'UTC', { UTC: 'UTC' } ),
|
||||
|
@ -40,9 +35,7 @@ QUnit.test( '#getTimestampParser', function ( assert ) {
|
|||
|
||||
QUnit.test( '#getTimestampParser (at DST change)', function ( assert ) {
|
||||
var cases = require( '../cases/timestamp-parser-dst.json' ),
|
||||
parser = new Parser( document.createElement( 'div' ), dummyTitle );
|
||||
|
||||
testUtils.overrideParserData( require( '../data-en.json' ) );
|
||||
parser = new Parser( require( '../data-en.json' ) );
|
||||
|
||||
cases.forEach( function ( caseItem ) {
|
||||
var regexp = parser.getTimestampRegexp( 'en', caseItem.format, '\\d', caseItem.timezoneAbbrs ),
|
||||
|
@ -80,9 +73,8 @@ QUnit.test( '#getThreads', function ( assert ) {
|
|||
|
||||
$( fixture ).empty().append( $dom );
|
||||
testUtils.overrideMwConfig( config );
|
||||
testUtils.overrideParserData( data );
|
||||
|
||||
var parser = new Parser( fixture, title );
|
||||
var parser = new Parser( data ).parse( fixture, title );
|
||||
var threads = parser.getThreads();
|
||||
|
||||
threads.forEach( function ( thread, i ) {
|
||||
|
|
|
@ -2,42 +2,9 @@ var utils = require( 'ext.discussionTools.init' ).utils;
|
|||
|
||||
module.exports = {};
|
||||
|
||||
/* eslint-disable qunit/no-commented-tests */
|
||||
/**
|
||||
* Create a QUnit environment that will automatically restore the parser data and config at the end
|
||||
* of every test (so that we can use #overrideParserData and #overrideMwConfig without thinking
|
||||
* about cleaning it up).
|
||||
*
|
||||
* @return {Object}
|
||||
*/
|
||||
module.exports.newEnvironment = function () {
|
||||
var originalParserData = $.extend(
|
||||
{},
|
||||
mw.loader.moduleRegistry[ 'ext.discussionTools.init' ].packageExports[ 'parser/data.json' ]
|
||||
);
|
||||
|
||||
return QUnit.newMwEnvironment( {
|
||||
afterEach: function () {
|
||||
module.exports.overrideParserData( originalParserData );
|
||||
// mw.config is restored by QUnit.newMwEnvironment already
|
||||
}
|
||||
} );
|
||||
};
|
||||
|
||||
/**
|
||||
* Override the parser data with the given data. Used for testing different languages etc.
|
||||
*
|
||||
* @param {Object} data
|
||||
*/
|
||||
module.exports.overrideParserData = function ( data ) {
|
||||
$.extend(
|
||||
mw.loader.moduleRegistry[ 'ext.discussionTools.init' ].packageExports[ 'parser/data.json' ],
|
||||
data
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Override mw.config with the given data. Used for testing different languages etc.
|
||||
* (Automatically restored after every test by QUnit.newMwEnvironment.)
|
||||
*
|
||||
* @param {Object} config
|
||||
*/
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
var
|
||||
testUtils = require( './testUtils.js' ),
|
||||
utils = require( 'ext.discussionTools.init' ).utils;
|
||||
var utils = require( 'ext.discussionTools.init' ).utils;
|
||||
|
||||
QUnit.module( 'mw.dt.utils', testUtils.newEnvironment() );
|
||||
QUnit.module( 'mw.dt.utils', QUnit.newMwEnvironment() );
|
||||
|
||||
QUnit.test( '#linearWalk', function ( assert ) {
|
||||
var cases = require( '../cases/linearWalk.json' );
|
||||
|
|
Loading…
Reference in a new issue