3 * HTML cache invalidation of all pages linking to a given title.
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
25 * Job wrapper for HTMLCacheUpdate. Gets run whenever a related
26 * job gets called from the queue.
28 * This class is designed to work efficiently with small numbers of links, and
29 * to work reasonably well with up to ~10^5 links. Above ~10^6 links, the memory
30 * and time requirements of loading all backlinked IDs in doUpdate() might become
31 * prohibitive. The requirements measured at Wikimedia are approximately:
33 * memory: 48 bytes per row
34 * time: 16us per row for the query plus processing
36 * The reason this query is done is to support partitioning of the job
37 * by backlinked ID. The memory issue could be allieviated by doing this query in
38 * batches, but of course LIMIT with an offset is inefficient on the DB side.
40 * The class is nevertheless a vast improvement on the previous method of using
41 * File::getLinksTo() and Title::touchArray(), which uses about 2KB of memory per
46 class HTMLCacheUpdateJob
extends Job
{
47 /** @var BacklinkCache */
50 protected $rowsPerJob, $rowsPerQuery;
54 * @param $title Title: the title linked to
55 * @param array $params job parameters (table, start and end page_ids)
56 * @param $id Integer: job id
58 function __construct( $title, $params, $id = 0 ) {
59 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
61 parent
::__construct( 'htmlCacheUpdate', $title, $params, $id );
63 $this->rowsPerJob
= $wgUpdateRowsPerJob;
64 $this->rowsPerQuery
= $wgUpdateRowsPerQuery;
65 $this->blCache
= $title->getBacklinkCache();
68 public function run() {
69 if ( isset( $this->params
['start'] ) && isset( $this->params
['end'] ) ) {
70 # This is hit when a job is actually performed
71 return $this->doPartialUpdate();
73 # This is hit when the jobs have to be inserted
74 return $this->doFullUpdate();
79 * Update all of the backlinks
81 protected function doFullUpdate() {
82 global $wgMaxBacklinksInvalidate;
84 # Get an estimate of the number of rows from the BacklinkCache
85 $max = max( $this->rowsPerJob
* 2, $wgMaxBacklinksInvalidate ) +
1;
86 $numRows = $this->blCache
->getNumLinks( $this->params
['table'], $max );
87 if ( $wgMaxBacklinksInvalidate !== false && $numRows > $wgMaxBacklinksInvalidate ) {
88 wfDebug( "Skipped HTML cache invalidation of {$this->title->getPrefixedText()}." );
92 if ( $numRows > $this->rowsPerJob
* 2 ) {
93 # Do fast cached partition
94 $this->insertPartitionJobs();
96 # Get the links from the DB
97 $titleArray = $this->blCache
->getLinks( $this->params
['table'] );
98 # Check if the row count estimate was correct
99 if ( $titleArray->count() > $this->rowsPerJob
* 2 ) {
100 # Not correct, do accurate partition
101 wfDebug( __METHOD__
. ": row count estimate was incorrect, repartitioning\n" );
102 $this->insertJobsFromTitles( $titleArray );
104 $this->invalidateTitles( $titleArray ); // just do the query
112 * Update some of the backlinks, defined by a page ID range
114 protected function doPartialUpdate() {
115 $titleArray = $this->blCache
->getLinks(
116 $this->params
['table'], $this->params
['start'], $this->params
['end'] );
117 if ( $titleArray->count() <= $this->rowsPerJob
* 2 ) {
118 # This partition is small enough, do the update
119 $this->invalidateTitles( $titleArray );
121 # Partitioning was excessively inaccurate. Divide the job further.
122 # This can occur when a large number of links are added in a short
123 # period of time, say by updating a heavily-used template.
124 $this->insertJobsFromTitles( $titleArray );
130 * Partition the current range given by $this->params['start'] and $this->params['end'],
131 * using a pre-calculated title array which gives the links in that range.
132 * Queue the resulting jobs.
134 * @param $titleArray array
135 * @param $rootJobParams array
138 protected function insertJobsFromTitles( $titleArray, $rootJobParams = array() ) {
139 // Carry over any "root job" information
140 $rootJobParams = $this->getRootJobParams();
141 # We make subpartitions in the sense that the start of the first job
142 # will be the start of the parent partition, and the end of the last
143 # job will be the end of the parent partition.
145 $start = $this->params
['start']; # start of the current job
147 foreach ( $titleArray as $title ) {
148 $id = $title->getArticleID();
149 # $numTitles is now the number of titles in the current job not
150 # including the current ID
151 if ( $numTitles >= $this->rowsPerJob
) {
152 # Add a job up to but not including the current ID
153 $jobs[] = new HTMLCacheUpdateJob( $this->title
,
155 'table' => $this->params
['table'],
158 ) +
$rootJobParams // carry over information for de-duplication
166 $jobs[] = new HTMLCacheUpdateJob( $this->title
,
168 'table' => $this->params
['table'],
170 'end' => $this->params
['end']
171 ) +
$rootJobParams // carry over information for de-duplication
173 wfDebug( __METHOD__
. ": repartitioning into " . count( $jobs ) . " jobs\n" );
175 if ( count( $jobs ) < 2 ) {
176 # I don't think this is possible at present, but handling this case
177 # makes the code a bit more robust against future code updates and
178 # avoids a potential infinite loop of repartitioning
179 wfDebug( __METHOD__
. ": repartitioning failed!\n" );
180 $this->invalidateTitles( $titleArray );
182 JobQueueGroup
::singleton()->push( $jobs );
187 * @param $rootJobParams array
190 protected function insertPartitionJobs( $rootJobParams = array() ) {
191 // Carry over any "root job" information
192 $rootJobParams = $this->getRootJobParams();
194 $batches = $this->blCache
->partition( $this->params
['table'], $this->rowsPerJob
);
195 if ( !count( $batches ) ) {
196 return; // no jobs to insert
200 foreach ( $batches as $batch ) {
201 list( $start, $end ) = $batch;
202 $jobs[] = new HTMLCacheUpdateJob( $this->title
,
204 'table' => $this->params
['table'],
207 ) +
$rootJobParams // carry over information for de-duplication
211 JobQueueGroup
::singleton()->push( $jobs );
215 * Invalidate an array (or iterator) of Title objects, right now
216 * @param $titleArray array
218 protected function invalidateTitles( $titleArray ) {
219 global $wgUseFileCache, $wgUseSquid;
221 $dbw = wfGetDB( DB_MASTER
);
222 $timestamp = $dbw->timestamp();
224 # Get all IDs in this query into an array
226 foreach ( $titleArray as $title ) {
227 $ids[] = $title->getArticleID();
234 # Don't invalidated pages that were already invalidated
235 $touchedCond = isset( $this->params
['rootJobTimestamp'] )
236 ?
array( "page_touched < " .
237 $dbw->addQuotes( $dbw->timestamp( $this->params
['rootJobTimestamp'] ) ) )
240 # Update page_touched
241 $batches = array_chunk( $ids, $this->rowsPerQuery
);
242 foreach ( $batches as $batch ) {
243 $dbw->update( 'page',
244 array( 'page_touched' => $timestamp ),
245 array( 'page_id' => $batch ) +
$touchedCond,
252 $u = SquidUpdate
::newFromTitles( $titleArray );
257 if ( $wgUseFileCache ) {
258 foreach ( $titleArray as $title ) {
259 HTMLFileCache
::clearFileCache( $title );