Merge "Typo fix"
[mediawiki.git] / includes / api / ApiQueryDuplicateFiles.php
blob0311fa7fec20cff4b6766580d98a105a94db8bbc
1 <?php
2 /**
5 * Created on Sep 27, 2008
7 * Copyright © 2008 Roan Kattouw "<Firstname>.<Lastname>@gmail.com"
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
19 * You should have received a copy of the GNU General Public License along
20 * with this program; if not, write to the Free Software Foundation, Inc.,
21 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
22 * http://www.gnu.org/copyleft/gpl.html
24 * @file
27 /**
28 * A query module to list duplicates of the given file(s)
30 * @ingroup API
32 class ApiQueryDuplicateFiles extends ApiQueryGeneratorBase {
34 public function __construct( $query, $moduleName ) {
35 parent::__construct( $query, $moduleName, 'df' );
38 public function execute() {
39 $this->run();
42 public function getCacheMode( $params ) {
43 return 'public';
46 public function executeGenerator( $resultPageSet ) {
47 $this->run( $resultPageSet );
50 /**
51 * @param ApiPageSet $resultPageSet
53 private function run( $resultPageSet = null ) {
54 $params = $this->extractRequestParams();
55 $namespaces = $this->getPageSet()->getAllTitlesByNamespace();
56 if ( empty( $namespaces[NS_FILE] ) ) {
57 return;
59 $images = $namespaces[NS_FILE];
61 if ( $params['dir'] == 'descending' ) {
62 $images = array_reverse( $images );
65 $skipUntilThisDup = false;
66 if ( isset( $params['continue'] ) ) {
67 $cont = explode( '|', $params['continue'] );
68 $this->dieContinueUsageIf( count( $cont ) != 2 );
69 $fromImage = $cont[0];
70 $skipUntilThisDup = $cont[1];
71 // Filter out any images before $fromImage
72 foreach ( $images as $image => $pageId ) {
73 if ( $image < $fromImage ) {
74 unset( $images[$image] );
75 } else {
76 break;
81 $filesToFind = array_keys( $images );
82 if ( $params['localonly'] ) {
83 $files = RepoGroup::singleton()->getLocalRepo()->findFiles( $filesToFind );
84 } else {
85 $files = RepoGroup::singleton()->findFiles( $filesToFind );
88 $fit = true;
89 $count = 0;
90 $titles = array();
92 $sha1s = array();
93 foreach ( $files as $file ) {
94 /** @var $file File */
95 $sha1s[$file->getName()] = $file->getSha1();
98 // find all files with the hashes, result format is: array( hash => array( dup1, dup2 ), hash1 => ... )
99 $filesToFindBySha1s = array_unique( array_values( $sha1s ) );
100 if ( $params['localonly'] ) {
101 $filesBySha1s = RepoGroup::singleton()->getLocalRepo()->findBySha1s( $filesToFindBySha1s );
102 } else {
103 $filesBySha1s = RepoGroup::singleton()->findBySha1s( $filesToFindBySha1s );
106 // iterate over $images to handle continue param correct
107 foreach ( $images as $image => $pageId ) {
108 if ( !isset( $sha1s[$image] ) ) {
109 continue; //file does not exist
111 $sha1 = $sha1s[$image];
112 $dupFiles = $filesBySha1s[$sha1];
113 if ( $params['dir'] == 'descending' ) {
114 $dupFiles = array_reverse( $dupFiles );
116 /** @var $dupFile File */
117 foreach ( $dupFiles as $dupFile ) {
118 $dupName = $dupFile->getName();
119 if ( $image == $dupName && $dupFile->isLocal() ) {
120 continue; //ignore the local file itself
122 if ( $skipUntilThisDup !== false && $dupName < $skipUntilThisDup ) {
123 continue; //skip to pos after the image from continue param
125 $skipUntilThisDup = false;
126 if ( ++$count > $params['limit'] ) {
127 $fit = false; //break outer loop
128 // We're one over limit which shows that
129 // there are additional images to be had. Stop here...
130 $this->setContinueEnumParameter( 'continue', $image . '|' . $dupName );
131 break;
133 if ( !is_null( $resultPageSet ) ) {
134 $titles[] = $dupFile->getTitle();
135 } else {
136 $r = array(
137 'name' => $dupName,
138 'user' => $dupFile->getUser( 'text' ),
139 'timestamp' => wfTimestamp( TS_ISO_8601, $dupFile->getTimestamp() )
141 if ( !$dupFile->isLocal() ) {
142 $r['shared'] = '';
144 $fit = $this->addPageSubItem( $pageId, $r );
145 if ( !$fit ) {
146 $this->setContinueEnumParameter( 'continue', $image . '|' . $dupName );
147 break;
151 if ( !$fit ) {
152 break;
155 if ( !is_null( $resultPageSet ) ) {
156 $resultPageSet->populateFromTitles( $titles );
160 public function getAllowedParams() {
161 return array(
162 'limit' => array(
163 ApiBase::PARAM_DFLT => 10,
164 ApiBase::PARAM_TYPE => 'limit',
165 ApiBase::PARAM_MIN => 1,
166 ApiBase::PARAM_MAX => ApiBase::LIMIT_BIG1,
167 ApiBase::PARAM_MAX2 => ApiBase::LIMIT_BIG2
169 'continue' => null,
170 'dir' => array(
171 ApiBase::PARAM_DFLT => 'ascending',
172 ApiBase::PARAM_TYPE => array(
173 'ascending',
174 'descending'
177 'localonly' => false,
181 public function getParamDescription() {
182 return array(
183 'limit' => 'How many duplicate files to return',
184 'continue' => 'When more results are available, use this to continue',
185 'dir' => 'The direction in which to list',
186 'localonly' => 'Look only for files in the local repository',
190 public function getResultProperties() {
191 return array(
192 '' => array(
193 'name' => 'string',
194 'user' => 'string',
195 'timestamp' => 'timestamp',
196 'shared' => 'boolean',
201 public function getDescription() {
202 return 'List all files that are duplicates of the given file(s) based on hash values';
205 public function getExamples() {
206 return array(
207 'api.php?action=query&titles=File:Albert_Einstein_Head.jpg&prop=duplicatefiles',
208 'api.php?action=query&generator=allimages&prop=duplicatefiles',
212 public function getHelpUrls() {
213 return 'https://www.mediawiki.org/wiki/API:Properties#duplicatefiles_.2F_df';