Localisation updates from https://translatewiki.net.
[mediawiki.git] / includes / specials / SpecialListDuplicatedFiles.php
blobfd2fb1ddba98b4528c8968241bb1a939e592be94
1 <?php
2 /**
3 * Copyright © 2013 Brian Wolff
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
20 * @file
23 namespace MediaWiki\Specials;
25 use MediaWiki\Cache\LinkBatchFactory;
26 use MediaWiki\SpecialPage\QueryPage;
27 use MediaWiki\SpecialPage\SpecialPage;
28 use MediaWiki\Title\Title;
29 use Skin;
30 use stdClass;
31 use Wikimedia\Rdbms\IConnectionProvider;
32 use Wikimedia\Rdbms\IDatabase;
33 use Wikimedia\Rdbms\IResultWrapper;
35 /**
36 * List all files where the current version is a duplicate of the current
37 * version of another file.
39 * @ingroup SpecialPage
40 * @author Brian Wolff
42 class SpecialListDuplicatedFiles extends QueryPage {
44 /**
45 * @param IConnectionProvider $dbProvider
46 * @param LinkBatchFactory $linkBatchFactory
48 public function __construct(
49 IConnectionProvider $dbProvider,
50 LinkBatchFactory $linkBatchFactory
51 ) {
52 parent::__construct( 'ListDuplicatedFiles' );
53 $this->setDatabaseProvider( $dbProvider );
54 $this->setLinkBatchFactory( $linkBatchFactory );
57 public function isExpensive() {
58 return true;
61 public function isSyndicated() {
62 return false;
65 /**
66 * Get all the duplicates by grouping on sha1s.
68 * A cheaper (but less useful) version of this
69 * query would be to not care how many duplicates a
70 * particular file has, and do a self-join on image table.
71 * However this version should be no more expensive then
72 * Special:MostLinked, which seems to get handled fine
73 * with however we are doing cached special pages.
74 * @return array
76 public function getQueryInfo() {
77 return [
78 'tables' => [ 'image' ],
79 'fields' => [
80 'namespace' => NS_FILE,
81 'title' => 'MIN(img_name)',
82 'value' => 'count(*)'
84 'options' => [
85 'GROUP BY' => 'img_sha1',
86 'HAVING' => 'count(*) > 1',
91 /**
92 * Pre-fill the link cache
94 * @param IDatabase $db
95 * @param IResultWrapper $res
97 public function preprocessResults( $db, $res ) {
98 $this->executeLBFromResultWrapper( $res );
102 * @param Skin $skin
103 * @param stdClass $result Result row
104 * @return string
106 public function formatResult( $skin, $result ) {
107 // Future version might include a list of the first 5 duplicates
108 // perhaps separated by an "↔".
109 $image1 = Title::makeTitle( $result->namespace, $result->title );
110 $dupeSearch = SpecialPage::getTitleFor( 'FileDuplicateSearch', $image1->getDBkey() );
112 $msg = $this->msg( 'listduplicatedfiles-entry' )
113 ->params( $image1->getText() )
114 ->numParams( $result->value - 1 )
115 ->params( $dupeSearch->getPrefixedDBkey() );
117 return $msg->parse();
120 public function execute( $par ) {
121 $this->addHelpLink( 'Help:Managing_files' );
122 parent::execute( $par );
125 protected function getGroupName() {
126 return 'media';
130 /** @deprecated class alias since 1.41 */
131 class_alias( SpecialListDuplicatedFiles::class, 'SpecialListDuplicatedFiles' );