3 * HTML cache invalidation of all pages linking to a given title.
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
25 * Job to purge the cache for all pages that link to or use another page or file
27 * This job comes in a few variants:
28 * - a) Recursive jobs to purge caches for backlink pages for a given title.
29 * These jobs have (recursive:true,table:<table>) set.
30 * - b) Jobs to purge caches for a set of titles (the job title is ignored).
31 * These jobs have (pages:(<page ID>:(<namespace>,<title>),...) set.
35 class HTMLCacheUpdateJob
extends Job
{
36 function __construct( $title, $params = '' ) {
37 parent
::__construct( 'htmlCacheUpdate', $title, $params );
38 // Base backlink purge jobs can be de-duplicated
39 $this->removeDuplicates
= ( !isset( $params['range'] ) && !isset( $params['pages'] ) );
43 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
45 static $expected = array( 'recursive', 'pages' ); // new jobs have one of these
48 if ( !array_intersect( array_keys( $this->params
), $expected ) ) {
49 // B/C for older job params formats that lack these fields:
50 // a) base jobs with just ("table") and b) range jobs with ("table","start","end")
51 if ( isset( $this->params
['start'] ) && isset( $this->params
['end'] ) ) {
54 $this->params
['recursive'] = true; // base job
58 // Job to purge all (or a range of) backlink pages for a page
59 if ( !empty( $this->params
['recursive'] ) ) {
60 // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title
61 // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks
62 $jobs = BacklinkJobUtils
::partitionBacklinkJob(
65 $wgUpdateRowsPerQuery, // jobs-per-title
66 // Carry over information for de-duplication
67 array( 'params' => $this->getRootJobParams() )
69 JobQueueGroup
::singleton()->push( $jobs );
70 // Job to purge pages for a set of titles
71 } elseif ( isset( $this->params
['pages'] ) ) {
72 $this->invalidateTitles( $this->params
['pages'] );
73 // B/C for job to purge a range of backlink pages for a given page
74 } elseif ( $oldRangeJob ) {
75 $titleArray = $this->title
->getBacklinkCache()->getLinks(
76 $this->params
['table'], $this->params
['start'], $this->params
['end'] );
78 $pages = array(); // same format BacklinkJobUtils uses
79 foreach ( $titleArray as $tl ) {
80 $pages[$tl->getArticleId()] = array( $tl->getNamespace(), $tl->getDbKey() );
84 foreach ( array_chunk( $pages, $wgUpdateRowsPerJob ) as $pageChunk ) {
85 $jobs[] = new HTMLCacheUpdateJob( $this->title
,
87 'table' => $this->params
['table'],
89 ) +
$this->getRootJobParams() // carry over information for de-duplication
92 JobQueueGroup
::singleton()->push( $jobs );
99 * @param array $pages Map of (page ID => (namespace, DB key)) entries
101 protected function invalidateTitles( array $pages ) {
102 global $wgUpdateRowsPerQuery, $wgUseFileCache, $wgUseSquid;
104 // Get all page IDs in this query into an array
105 $pageIds = array_keys( $pages );
110 $dbw = wfGetDB( DB_MASTER
);
112 // The page_touched field will need to be bumped for these pages.
113 // Only bump it to the present time if no "rootJobTimestamp" was known.
114 // If it is known, it can be used instead, which avoids invalidating output
115 // that was in fact generated *after* the relevant dependency change time
116 // (e.g. template edit). This is particularily useful since refreshLinks jobs
117 // save back parser output and usually run along side htmlCacheUpdate jobs;
118 // their saved output would be invalidated by using the current timestamp.
119 if ( isset( $this->params
['rootJobTimestamp'] ) ) {
120 $touchTimestamp = $this->params
['rootJobTimestamp'];
122 $touchTimestamp = wfTimestampNow();
125 // Update page_touched (skipping pages already touched since the root job).
126 // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
127 foreach ( array_chunk( $pageIds, $wgUpdateRowsPerQuery ) as $batch ) {
128 $dbw->update( 'page',
129 array( 'page_touched' => $dbw->timestamp( $touchTimestamp ) ),
130 array( 'page_id' => $batch,
131 // don't invalidated pages that were already invalidated
132 "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $touchTimestamp ) )
137 // Get the list of affected pages (races only mean something else did the purge)
138 $titleArray = TitleArray
::newFromResult( $dbw->select(
140 array( 'page_namespace', 'page_title' ),
141 array( 'page_id' => $pageIds, 'page_touched' => $dbw->timestamp( $touchTimestamp ) ),
147 $u = SquidUpdate
::newFromTitles( $titleArray );
152 if ( $wgUseFileCache ) {
153 foreach ( $titleArray as $title ) {
154 HTMLFileCache
::clearFileCache( $title );
159 public function workItemCount() {
160 return isset( $this->params
['pages'] ) ?
count( $this->params
['pages'] ) : 1;