API: Document a level of 'all' in action=protect
[mediawiki.git] / maintenance / storage / compressOld.php
blobba924bf7af1de0a200e7dba4220d9c1f21468cda
1 <?php
2 /**
3 * Compress the text of a wiki.
5 * Usage:
7 * Non-wikimedia
8 * php compressOld.php [options...]
10 * Wikimedia
11 * php compressOld.php <database> [options...]
13 * Options are:
14 * -t <type> set compression type to either:
15 * gzip: compress revisions independently
16 * concat: concatenate revisions and compress in chunks (default)
17 * -c <chunk-size> maximum number of revisions in a concat chunk
18 * -b <begin-date> earliest date to check for uncompressed revisions
19 * -e <end-date> latest revision date to compress
20 * -s <startid> the id to start from (referring to the text table for
21 * type gzip, and to the page table for type concat)
22 * -n <endid> the page_id to stop at (only when using concat compression type)
23 * --extdb <cluster> store specified revisions in an external cluster (untested)
25 * This program is free software; you can redistribute it and/or modify
26 * it under the terms of the GNU General Public License as published by
27 * the Free Software Foundation; either version 2 of the License, or
28 * (at your option) any later version.
30 * This program is distributed in the hope that it will be useful,
31 * but WITHOUT ANY WARRANTY; without even the implied warranty of
32 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
33 * GNU General Public License for more details.
35 * You should have received a copy of the GNU General Public License along
36 * with this program; if not, write to the Free Software Foundation, Inc.,
37 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
38 * http://www.gnu.org/copyleft/gpl.html
40 * @file
41 * @ingroup Maintenance ExternalStorage
44 require_once __DIR__ . '/../Maintenance.php';
46 /**
47 * Maintenance script that compress the text of a wiki.
49 * @ingroup Maintenance ExternalStorage
51 class CompressOld extends Maintenance {
52 /**
53 * Option to load each revision individually.
56 const LS_INDIVIDUAL = 0;
58 /**
59 * Option to load revisions in chunks.
62 const LS_CHUNKED = 1;
64 public function __construct() {
65 parent::__construct();
66 $this->addDescription( 'Compress the text of a wiki' );
67 $this->addOption( 'type', 'Set compression type to either: gzip|concat', false, true, 't' );
68 $this->addOption(
69 'chunksize',
70 'Maximum number of revisions in a concat chunk',
71 false,
72 true,
73 'c'
75 $this->addOption(
76 'begin-date',
77 'Earliest date to check for uncompressed revisions',
78 false,
79 true,
80 'b'
82 $this->addOption( 'end-date', 'Latest revision date to compress', false, true, 'e' );
83 $this->addOption(
84 'startid',
85 'The id to start from (gzip -> text table, concat -> page table)',
86 false,
87 true,
88 's'
90 $this->addOption(
91 'extdb',
92 'Store specified revisions in an external cluster (untested)',
93 false,
94 true
96 $this->addOption(
97 'endid',
98 'The page_id to stop at (only when using concat compression type)',
99 false,
100 true,
105 public function execute() {
106 global $wgDBname;
107 if ( !function_exists( "gzdeflate" ) ) {
108 $this->error( "You must enable zlib support in PHP to compress old revisions!\n" .
109 "Please see http://www.php.net/manual/en/ref.zlib.php\n", true );
112 $type = $this->getOption( 'type', 'concat' );
113 $chunkSize = $this->getOption( 'chunksize', 20 );
114 $startId = $this->getOption( 'startid', 0 );
115 $beginDate = $this->getOption( 'begin-date', '' );
116 $endDate = $this->getOption( 'end-date', '' );
117 $extDB = $this->getOption( 'extdb', '' );
118 $endId = $this->getOption( 'endid', false );
120 if ( $type != 'concat' && $type != 'gzip' ) {
121 $this->error( "Type \"{$type}\" not supported" );
124 if ( $extDB != '' ) {
125 $this->output( "Compressing database {$wgDBname} to external cluster {$extDB}\n"
126 . str_repeat( '-', 76 ) . "\n\n" );
127 } else {
128 $this->output( "Compressing database {$wgDBname}\n"
129 . str_repeat( '-', 76 ) . "\n\n" );
132 $success = true;
133 if ( $type == 'concat' ) {
134 $success = $this->compressWithConcat( $startId, $chunkSize, $beginDate,
135 $endDate, $extDB, $endId );
136 } else {
137 $this->compressOldPages( $startId, $extDB );
140 if ( $success ) {
141 $this->output( "Done.\n" );
146 * Fetch the text row-by-row to 'compressPage' function for compression.
148 * @param int $start
149 * @param string $extdb
151 private function compressOldPages( $start = 0, $extdb = '' ) {
152 $chunksize = 50;
153 $this->output( "Starting from old_id $start...\n" );
154 $dbw = $this->getDB( DB_MASTER );
155 do {
156 $res = $dbw->select(
157 'text',
158 [ 'old_id', 'old_flags', 'old_text' ],
159 "old_id>=$start",
160 __METHOD__,
161 [ 'ORDER BY' => 'old_id', 'LIMIT' => $chunksize, 'FOR UPDATE' ]
164 if ( $res->numRows() == 0 ) {
165 break;
168 $last = $start;
170 foreach ( $res as $row ) {
171 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n";
172 $this->compressPage( $row, $extdb );
173 $last = $row->old_id;
176 $start = $last + 1; # Deletion may leave long empty stretches
177 $this->output( "$start...\n" );
178 } while ( true );
182 * Compress the text in gzip format.
184 * @param stdClass $row
185 * @param string $extdb
186 * @return bool
188 private function compressPage( $row, $extdb ) {
189 if ( false !== strpos( $row->old_flags, 'gzip' )
190 || false !== strpos( $row->old_flags, 'object' )
192 # print "Already compressed row {$row->old_id}\n";
193 return false;
195 $dbw = $this->getDB( DB_MASTER );
196 $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip";
197 $compress = gzdeflate( $row->old_text );
199 # Store in external storage if required
200 if ( $extdb !== '' ) {
201 $storeObj = new ExternalStoreDB;
202 $compress = $storeObj->store( $extdb, $compress );
203 if ( $compress === false ) {
204 $this->error( "Unable to store object" );
206 return false;
210 # Update text row
211 $dbw->update( 'text',
212 [ /* SET */
213 'old_flags' => $flags,
214 'old_text' => $compress
215 ], [ /* WHERE */
216 'old_id' => $row->old_id
217 ], __METHOD__,
218 [ 'LIMIT' => 1 ]
221 return true;
225 * Compress the text in chunks after concatenating the revisions.
227 * @param int $startId
228 * @param int $maxChunkSize
229 * @param string $beginDate
230 * @param string $endDate
231 * @param string $extdb
232 * @param bool|int $maxPageId
233 * @return bool
235 private function compressWithConcat( $startId, $maxChunkSize, $beginDate,
236 $endDate, $extdb = "", $maxPageId = false
238 $loadStyle = self::LS_CHUNKED;
240 $dbr = $this->getDB( DB_SLAVE );
241 $dbw = $this->getDB( DB_MASTER );
243 # Set up external storage
244 if ( $extdb != '' ) {
245 $storeObj = new ExternalStoreDB;
248 # Get all articles by page_id
249 if ( !$maxPageId ) {
250 $maxPageId = $dbr->selectField( 'page', 'max(page_id)', '', __METHOD__ );
252 $this->output( "Starting from $startId of $maxPageId\n" );
253 $pageConds = [];
256 if ( $exclude_ns0 ) {
257 print "Excluding main namespace\n";
258 $pageConds[] = 'page_namespace<>0';
260 if ( $queryExtra ) {
261 $pageConds[] = $queryExtra;
265 # For each article, get a list of revisions which fit the criteria
267 # No recompression, use a condition on old_flags
268 # Don't compress object type entities, because that might produce data loss when
269 # overwriting bulk storage concat rows. Don't compress external references, because
270 # the script doesn't yet delete rows from external storage.
271 $conds = [
272 'old_flags NOT ' . $dbr->buildLike( $dbr->anyString(), 'object', $dbr->anyString() )
273 . ' AND old_flags NOT '
274 . $dbr->buildLike( $dbr->anyString(), 'external', $dbr->anyString() )
277 if ( $beginDate ) {
278 if ( !preg_match( '/^\d{14}$/', $beginDate ) ) {
279 $this->error( "Invalid begin date \"$beginDate\"\n" );
281 return false;
283 $conds[] = "rev_timestamp>'" . $beginDate . "'";
285 if ( $endDate ) {
286 if ( !preg_match( '/^\d{14}$/', $endDate ) ) {
287 $this->error( "Invalid end date \"$endDate\"\n" );
289 return false;
291 $conds[] = "rev_timestamp<'" . $endDate . "'";
293 if ( $loadStyle == self::LS_CHUNKED ) {
294 $tables = [ 'revision', 'text' ];
295 $fields = [ 'rev_id', 'rev_text_id', 'old_flags', 'old_text' ];
296 $conds[] = 'rev_text_id=old_id';
297 $revLoadOptions = 'FOR UPDATE';
298 } else {
299 $tables = [ 'revision' ];
300 $fields = [ 'rev_id', 'rev_text_id' ];
301 $revLoadOptions = [];
304 # Don't work with current revisions
305 # Don't lock the page table for update either -- TS 2006-04-04
306 # $tables[] = 'page';
307 # $conds[] = 'page_id=rev_page AND rev_id != page_latest';
309 for ( $pageId = $startId; $pageId <= $maxPageId; $pageId++ ) {
310 wfWaitForSlaves();
312 # Wake up
313 $dbr->ping();
315 # Get the page row
316 $pageRes = $dbr->select( 'page',
317 [ 'page_id', 'page_namespace', 'page_title', 'page_latest' ],
318 $pageConds + [ 'page_id' => $pageId ], __METHOD__ );
319 if ( $pageRes->numRows() == 0 ) {
320 continue;
322 $pageRow = $dbr->fetchObject( $pageRes );
324 # Display progress
325 $titleObj = Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title );
326 $this->output( "$pageId\t" . $titleObj->getPrefixedDBkey() . " " );
328 # Load revisions
329 $revRes = $dbw->select( $tables, $fields,
330 array_merge( [
331 'rev_page' => $pageRow->page_id,
332 # Don't operate on the current revision
333 # Use < instead of <> in case the current revision has changed
334 # since the page select, which wasn't locking
335 'rev_id < ' . $pageRow->page_latest
336 ], $conds ),
337 __METHOD__,
338 $revLoadOptions
340 $revs = [];
341 foreach ( $revRes as $revRow ) {
342 $revs[] = $revRow;
345 if ( count( $revs ) < 2 ) {
346 # No revisions matching, no further processing
347 $this->output( "\n" );
348 continue;
351 # For each chunk
352 $i = 0;
353 while ( $i < count( $revs ) ) {
354 if ( $i < count( $revs ) - $maxChunkSize ) {
355 $thisChunkSize = $maxChunkSize;
356 } else {
357 $thisChunkSize = count( $revs ) - $i;
360 $chunk = new ConcatenatedGzipHistoryBlob();
361 $stubs = [];
362 $this->beginTransaction( $dbw, __METHOD__ );
363 $usedChunk = false;
364 $primaryOldid = $revs[$i]->rev_text_id;
366 // @codingStandardsIgnoreStart Ignore avoid function calls in a FOR loop test part warning
367 # Get the text of each revision and add it to the object
368 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++ ) {
369 // @codingStandardsIgnoreEnd
370 $oldid = $revs[$i + $j]->rev_text_id;
372 # Get text
373 if ( $loadStyle == self::LS_INDIVIDUAL ) {
374 $textRow = $dbw->selectRow( 'text',
375 [ 'old_flags', 'old_text' ],
376 [ 'old_id' => $oldid ],
377 __METHOD__,
378 'FOR UPDATE'
380 $text = Revision::getRevisionText( $textRow );
381 } else {
382 $text = Revision::getRevisionText( $revs[$i + $j] );
385 if ( $text === false ) {
386 $this->error( "\nError, unable to get text in old_id $oldid" );
387 # $dbw->delete( 'old', array( 'old_id' => $oldid ) );
390 if ( $extdb == "" && $j == 0 ) {
391 $chunk->setText( $text );
392 $this->output( '.' );
393 } else {
394 # Don't make a stub if it's going to be longer than the article
395 # Stubs are typically about 100 bytes
396 if ( strlen( $text ) < 120 ) {
397 $stub = false;
398 $this->output( 'x' );
399 } else {
400 $stub = new HistoryBlobStub( $chunk->addItem( $text ) );
401 $stub->setLocation( $primaryOldid );
402 $stub->setReferrer( $oldid );
403 $this->output( '.' );
404 $usedChunk = true;
406 $stubs[$j] = $stub;
409 $thisChunkSize = $j;
411 # If we couldn't actually use any stubs because the pages were too small, do nothing
412 if ( $usedChunk ) {
413 if ( $extdb != "" ) {
414 # Move blob objects to External Storage
415 $stored = $storeObj->store( $extdb, serialize( $chunk ) );
416 if ( $stored === false ) {
417 $this->error( "Unable to store object" );
419 return false;
421 # Store External Storage URLs instead of Stub placeholders
422 foreach ( $stubs as $stub ) {
423 if ( $stub === false ) {
424 continue;
426 # $stored should provide base path to a BLOB
427 $url = $stored . "/" . $stub->getHash();
428 $dbw->update( 'text',
429 [ /* SET */
430 'old_text' => $url,
431 'old_flags' => 'external,utf-8',
432 ], [ /* WHERE */
433 'old_id' => $stub->getReferrer(),
437 } else {
438 # Store the main object locally
439 $dbw->update( 'text',
440 [ /* SET */
441 'old_text' => serialize( $chunk ),
442 'old_flags' => 'object,utf-8',
443 ], [ /* WHERE */
444 'old_id' => $primaryOldid
448 # Store the stub objects
449 for ( $j = 1; $j < $thisChunkSize; $j++ ) {
450 # Skip if not compressing and don't overwrite the first revision
451 if ( $stubs[$j] !== false && $revs[$i + $j]->rev_text_id != $primaryOldid ) {
452 $dbw->update( 'text',
453 [ /* SET */
454 'old_text' => serialize( $stubs[$j] ),
455 'old_flags' => 'object,utf-8',
456 ], [ /* WHERE */
457 'old_id' => $revs[$i + $j]->rev_text_id
464 # Done, next
465 $this->output( "/" );
466 $this->commitTransaction( $dbw, __METHOD__ );
467 $i += $thisChunkSize;
468 wfWaitForSlaves();
470 $this->output( "\n" );
473 return true;
477 $maintClass = 'CompressOld';
478 require_once RUN_MAINTENANCE_IF_MAIN;