Make AuthManager::getAuthenticationProvider() public
[mediawiki.git] / maintenance / importImages.php
blobc653a5f06d33a89617d73bf0ef804c57a9978e65
1 <?php
2 /**
3 * Import one or more images from the local file system into the wiki without
4 * using the web-based interface.
6 * "Smart import" additions:
7 * - aim: preserve the essential metadata (user, description) when importing media
8 * files from an existing wiki.
9 * - process:
10 * - interface with the source wiki, don't use bare files only (see --source-wiki-url).
11 * - fetch metadata from source wiki for each file to import.
12 * - commit the fetched metadata to the destination wiki while submitting.
14 * This program is free software; you can redistribute it and/or modify
15 * it under the terms of the GNU General Public License as published by
16 * the Free Software Foundation; either version 2 of the License, or
17 * (at your option) any later version.
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
24 * You should have received a copy of the GNU General Public License along
25 * with this program; if not, write to the Free Software Foundation, Inc.,
26 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
27 * http://www.gnu.org/copyleft/gpl.html
29 * @file
30 * @ingroup Maintenance
31 * @author Rob Church <robchur@gmail.com>
32 * @author Mij <mij@bitchx.it>
35 $optionsWithArgs = [
36 'extensions', 'comment', 'comment-file', 'comment-ext', 'summary', 'user',
37 'license', 'sleep', 'limit', 'from', 'source-wiki-url', 'timestamp',
40 $optionsWithoutArgs = [
41 'protect', 'unprotect', 'search-recursively', 'check-userblock', 'overwrite',
42 'skip-dupes', 'dry'
45 require_once __DIR__ . '/commandLine.inc';
46 require_once __DIR__ . '/importImages.inc';
47 $processed = $added = $ignored = $skipped = $overwritten = $failed = 0;
49 echo "Import Images\n\n";
51 # Need a path
52 if ( count( $args ) == 0 ) {
53 showUsage();
56 $dir = $args[0];
58 # Check Protection
59 if ( isset( $options['protect'] ) && isset( $options['unprotect'] ) ) {
60 die( "Cannot specify both protect and unprotect. Only 1 is allowed.\n" );
63 if ( isset( $options['protect'] ) && $options['protect'] == 1 ) {
64 die( "You must specify a protection option.\n" );
67 # Prepare the list of allowed extensions
68 global $wgFileExtensions;
69 $extensions = isset( $options['extensions'] )
70 ? explode( ',', strtolower( $options['extensions'] ) )
71 : $wgFileExtensions;
73 # Search the path provided for candidates for import
74 $files = findFiles( $dir, $extensions, isset( $options['search-recursively'] ) );
76 # Initialise the user for this operation
77 $user = isset( $options['user'] )
78 ? User::newFromName( $options['user'] )
79 : User::newSystemUser( 'Maintenance script', [ 'steal' => true ] );
80 if ( !$user instanceof User ) {
81 $user = User::newSystemUser( 'Maintenance script', [ 'steal' => true ] );
83 $wgUser = $user;
85 # Get block check. If a value is given, this specified how often the check is performed
86 if ( isset( $options['check-userblock'] ) ) {
87 if ( !$options['check-userblock'] ) {
88 $checkUserBlock = 1;
89 } else {
90 $checkUserBlock = (int)$options['check-userblock'];
92 } else {
93 $checkUserBlock = false;
96 # Get --from
97 MediaWiki\suppressWarnings();
98 $from = $options['from'];
99 MediaWiki\restoreWarnings();
101 # Get sleep time.
102 MediaWiki\suppressWarnings();
103 $sleep = $options['sleep'];
104 MediaWiki\restoreWarnings();
106 if ( $sleep ) {
107 $sleep = (int)$sleep;
110 # Get limit number
111 MediaWiki\suppressWarnings();
112 $limit = $options['limit'];
113 MediaWiki\restoreWarnings();
115 if ( $limit ) {
116 $limit = (int)$limit;
119 $timestamp = isset( $options['timestamp'] ) ? $options['timestamp'] : false;
121 # Get the upload comment. Provide a default one in case there's no comment given.
122 $comment = 'Importing file';
124 if ( isset( $options['comment-file'] ) ) {
125 $comment = file_get_contents( $options['comment-file'] );
126 if ( $comment === false || $comment === null ) {
127 die( "failed to read comment file: {$options['comment-file']}\n" );
129 } elseif ( isset( $options['comment'] ) ) {
130 $comment = $options['comment'];
133 $commentExt = isset( $options['comment-ext'] ) ? $options['comment-ext'] : false;
135 $summary = isset( $options['summary'] ) ? $options['summary'] : '';
137 # Get the license specifier
138 $license = isset( $options['license'] ) ? $options['license'] : '';
140 # Batch "upload" operation
141 $count = count( $files );
142 if ( $count > 0 ) {
144 foreach ( $files as $file ) {
145 $base = UtfNormal\Validator::cleanUp( wfBaseName( $file ) );
147 # Validate a title
148 $title = Title::makeTitleSafe( NS_FILE, $base );
149 if ( !is_object( $title ) ) {
150 echo "{$base} could not be imported; a valid title cannot be produced\n";
151 continue;
154 if ( $from ) {
155 if ( $from == $title->getDBkey() ) {
156 $from = null;
157 } else {
158 $ignored++;
159 continue;
163 if ( $checkUserBlock && ( ( $processed % $checkUserBlock ) == 0 ) ) {
164 $user->clearInstanceCache( 'name' ); // reload from DB!
165 if ( $user->isBlocked() ) {
166 echo $user->getName() . " was blocked! Aborting.\n";
167 break;
171 # Check existence
172 $image = wfLocalFile( $title );
173 if ( $image->exists() ) {
174 if ( isset( $options['overwrite'] ) ) {
175 echo "{$base} exists, overwriting...";
176 $svar = 'overwritten';
177 } else {
178 echo "{$base} exists, skipping\n";
179 $skipped++;
180 continue;
182 } else {
183 if ( isset( $options['skip-dupes'] ) ) {
184 $repo = $image->getRepo();
185 # XXX: we end up calculating this again when actually uploading. that sucks.
186 $sha1 = FSFile::getSha1Base36FromPath( $file );
188 $dupes = $repo->findBySha1( $sha1 );
190 if ( $dupes ) {
191 echo "{$base} already exists as " . $dupes[0]->getName() . ", skipping\n";
192 $skipped++;
193 continue;
197 echo "Importing {$base}...";
198 $svar = 'added';
201 if ( isset( $options['source-wiki-url'] ) ) {
202 /* find comment text directly from source wiki, through MW's API */
203 $real_comment = getFileCommentFromSourceWiki( $options['source-wiki-url'], $base );
204 if ( $real_comment === false ) {
205 $commentText = $comment;
206 } else {
207 $commentText = $real_comment;
210 /* find user directly from source wiki, through MW's API */
211 $real_user = getFileUserFromSourceWiki( $options['source-wiki-url'], $base );
212 if ( $real_user === false ) {
213 $wgUser = $user;
214 } else {
215 $wgUser = User::newFromName( $real_user );
216 if ( $wgUser === false ) {
217 # user does not exist in target wiki
218 echo "failed: user '$real_user' does not exist in target wiki.";
219 continue;
222 } else {
223 # Find comment text
224 $commentText = false;
226 if ( $commentExt ) {
227 $f = findAuxFile( $file, $commentExt );
228 if ( !$f ) {
229 echo " No comment file with extension {$commentExt} found "
230 . "for {$file}, using default comment. ";
231 } else {
232 $commentText = file_get_contents( $f );
233 if ( !$commentText ) {
234 echo " Failed to load comment file {$f}, using default comment. ";
239 if ( !$commentText ) {
240 $commentText = $comment;
244 # Import the file
245 if ( isset( $options['dry'] ) ) {
246 echo " publishing {$file} by '" . $wgUser->getName() . "', comment '$commentText'... ";
247 } else {
248 $props = FSFile::getPropsFromPath( $file );
249 $flags = 0;
250 $publishOptions = [];
251 $handler = MediaHandler::getHandler( $props['mime'] );
252 if ( $handler ) {
253 $publishOptions['headers'] = $handler->getStreamHeaders( $props['metadata'] );
254 } else {
255 $publishOptions['headers'] = [];
257 $archive = $image->publish( $file, $flags, $publishOptions );
258 if ( !$archive->isGood() ) {
259 echo "failed. (" .
260 $archive->getWikiText( false, false, 'en' ) .
261 ")\n";
262 $failed++;
263 continue;
267 $commentText = SpecialUpload::getInitialPageText( $commentText, $license );
268 if ( !isset( $options['summary'] ) ) {
269 $summary = $commentText;
272 if ( isset( $options['dry'] ) ) {
273 echo "done.\n";
274 } elseif ( $image->recordUpload2(
275 $archive->value,
276 $summary,
277 $commentText,
278 $props,
279 $timestamp
280 ) ) {
281 # We're done!
282 echo "done.\n";
284 $doProtect = false;
286 global $wgRestrictionLevels;
288 $protectLevel = isset( $options['protect'] ) ? $options['protect'] : null;
290 if ( $protectLevel && in_array( $protectLevel, $wgRestrictionLevels ) ) {
291 $doProtect = true;
293 if ( isset( $options['unprotect'] ) ) {
294 $protectLevel = '';
295 $doProtect = true;
298 if ( $doProtect ) {
299 # Protect the file
300 echo "\nWaiting for slaves...\n";
301 // Wait for slaves.
302 sleep( 2.0 ); # Why this sleep?
303 wfWaitForSlaves();
305 echo "\nSetting image restrictions ... ";
307 $cascade = false;
308 $restrictions = [];
309 foreach ( $title->getRestrictionTypes() as $type ) {
310 $restrictions[$type] = $protectLevel;
313 $page = WikiPage::factory( $title );
314 $status = $page->doUpdateRestrictions( $restrictions, [], $cascade, '', $user );
315 echo ( $status->isOK() ? 'done' : 'failed' ) . "\n";
317 } else {
318 echo "failed. (at recordUpload stage)\n";
319 $svar = 'failed';
322 $$svar++;
323 $processed++;
325 if ( $limit && $processed >= $limit ) {
326 break;
329 if ( $sleep ) {
330 sleep( $sleep );
334 # Print out some statistics
335 echo "\n";
336 foreach (
338 'count' => 'Found',
339 'limit' => 'Limit',
340 'ignored' => 'Ignored',
341 'added' => 'Added',
342 'skipped' => 'Skipped',
343 'overwritten' => 'Overwritten',
344 'failed' => 'Failed'
345 ] as $var => $desc
347 if ( $$var > 0 ) {
348 echo "{$desc}: {$$var}\n";
351 } else {
352 echo "No suitable files could be found for import.\n";
355 exit( 0 );
357 function showUsage( $reason = false ) {
358 if ( $reason ) {
359 echo $reason . "\n";
362 echo <<<TEXT
363 Imports images and other media files into the wiki
364 USAGE: php importImages.php [options] <dir>
366 <dir> : Path to the directory containing images to be imported
368 Options:
369 --extensions=<exts> Comma-separated list of allowable extensions, defaults
370 to \$wgFileExtensions.
371 --overwrite Overwrite existing images with the same name (default
372 is to skip them).
373 --limit=<num> Limit the number of images to process. Ignored or
374 skipped images are not counted.
375 --from=<name> Ignore all files until the one with the given name.
376 Useful for resuming aborted imports. <name> should be
377 the file's canonical database form.
378 --skip-dupes Skip images that were already uploaded under a different
379 name (check SHA1).
380 --search-recursively Search recursively for files in subdirectories.
381 --sleep=<sec> Sleep between files. Useful mostly for debugging.
382 --user=<username> Set username of uploader, default 'Maintenance script'.
383 --check-userblock Check if the user got blocked during import.
384 --comment=<text> Set file description, default 'Importing file'.
385 --comment-file=<file> Set description to the content of <file>.
386 --comment-ext=<ext> Causes the description for each file to be loaded from a
387 file with the same name, but the extension <ext>. If a
388 global description is also given, it is appended.
389 --license=<code> Use an optional license template.
390 --dry Dry run, don't import anything.
391 --protect=<protect> Specify the protect value (autoconfirmed,sysop).
392 --summary=<summary> Upload summary, description will be used if not
393 provided.
394 --timestamp=<timestamp> Override upload time/date, all MediaWiki timestamp
395 formats are accepted.
396 --unprotect Unprotects all uploaded images.
397 --source-wiki-url If specified, take User and Comment data for each
398 imported file from this URL. For example,
399 --source-wiki-url="http://en.wikipedia.org/."
401 TEXT;
402 exit( 1 );