Expose $wgMaxArticleSize in siteinfo query api
[mediawiki.git] / includes / specials / SpecialListDuplicatedFiles.php
blob49fa417c0e3b10eb83ec457361be6731aa405f9b
1 <?php
2 /**
3 * Implements Special:ListDuplicatedFiles
5 * Copyright © 2013 Brian Wolff
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * You should have received a copy of the GNU General Public License along
18 * with this program; if not, write to the Free Software Foundation, Inc.,
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20 * http://www.gnu.org/copyleft/gpl.html
22 * @file
23 * @ingroup SpecialPage
24 * @author Brian Wolff
27 /**
28 * Special:ListDuplicatedFiles Lists all files where the current version is
29 * a duplicate of the current version of some other file.
30 * @ingroup SpecialPage
32 class ListDuplicatedFilesPage extends QueryPage {
33 function __construct( $name = 'ListDuplicatedFiles' ) {
34 parent::__construct( $name );
37 public function isExpensive() {
38 return true;
41 function isSyndicated() {
42 return false;
45 /**
46 * Get all the duplicates by grouping on sha1s.
48 * A cheaper (but less useful) version of this
49 * query would be to not care how many duplicates a
50 * particular file has, and do a self-join on image table.
51 * However this version should be no more expensive then
52 * Special:MostLinked, which seems to get handled fine
53 * with however we are doing cached special pages.
54 * @return array
56 public function getQueryInfo() {
57 return [
58 'tables' => [ 'image' ],
59 'fields' => [
60 'namespace' => NS_FILE,
61 'title' => 'MIN(img_name)',
62 'value' => 'count(*)'
64 'options' => [
65 'GROUP BY' => 'img_sha1',
66 'HAVING' => 'count(*) > 1',
71 /**
72 * Pre-fill the link cache
74 * @param IDatabase $db
75 * @param ResultWrapper $res
77 function preprocessResults( $db, $res ) {
78 if ( $res->numRows() > 0 ) {
79 $linkBatch = new LinkBatch();
81 foreach ( $res as $row ) {
82 $linkBatch->add( $row->namespace, $row->title );
85 $res->seek( 0 );
86 $linkBatch->execute();
90 /**
91 * @param Skin $skin
92 * @param object $result Result row
93 * @return string
95 function formatResult( $skin, $result ) {
96 // Future version might include a list of the first 5 duplicates
97 // perhaps separated by an "↔".
98 $image1 = Title::makeTitle( $result->namespace, $result->title );
99 $dupeSearch = SpecialPage::getTitleFor( 'FileDuplicateSearch', $image1->getDBkey() );
101 $msg = $this->msg( 'listduplicatedfiles-entry' )
102 ->params( $image1->getText() )
103 ->numParams( $result->value - 1 )
104 ->params( $dupeSearch->getPrefixedDBkey() );
106 return $msg->parse();
109 protected function getGroupName() {
110 return 'media';