Was occassionally failing due to race condition caused by mt_rand behaviour:
[mediawiki.git] / includes / HTMLCacheUpdate.php
blobbd63c072ded9fa7293b955577d0a0cd8bc198285
1 <?php
3 /**
4 * Class to invalidate the HTML cache of all the pages linking to a given title.
5 * Small numbers of links will be done immediately, large numbers are pushed onto
6 * the job queue.
8 * This class is designed to work efficiently with small numbers of links, and
9 * to work reasonably well with up to ~10^5 links. Above ~10^6 links, the memory
10 * and time requirements of loading all backlinked IDs in doUpdate() might become
11 * prohibitive. The requirements measured at Wikimedia are approximately:
13 * memory: 48 bytes per row
14 * time: 16us per row for the query plus processing
16 * The reason this query is done is to support partitioning of the job
17 * by backlinked ID. The memory issue could be allieviated by doing this query in
18 * batches, but of course LIMIT with an offset is inefficient on the DB side.
20 * The class is nevertheless a vast improvement on the previous method of using
21 * Image::getLinksTo() and Title::touchArray(), which uses about 2KB of memory per
22 * link.
24 * @ingroup Cache
26 class HTMLCacheUpdate
28 public $mTitle, $mTable, $mPrefix;
29 public $mRowsPerJob, $mRowsPerQuery;
31 function __construct( $titleTo, $table ) {
32 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
34 $this->mTitle = $titleTo;
35 $this->mTable = $table;
36 $this->mRowsPerJob = $wgUpdateRowsPerJob;
37 $this->mRowsPerQuery = $wgUpdateRowsPerQuery;
38 $this->mCache = $this->mTitle->getBacklinkCache();
41 public function doUpdate() {
42 # Fetch the IDs
43 $numRows = $this->mCache->getNumLinks( $this->mTable );
45 if ( $numRows != 0 ) {
46 if ( $numRows > $this->mRowsPerJob ) {
47 $this->insertJobs();
48 } else {
49 $this->invalidate();
52 wfRunHooks( 'HTMLCacheUpdate::doUpdate', array($this->mTitle) );
55 protected function insertJobs() {
56 $batches = $this->mCache->partition( $this->mTable, $this->mRowsPerJob );
57 if ( !$batches ) {
58 return;
60 foreach ( $batches as $batch ) {
61 $params = array(
62 'table' => $this->mTable,
63 'start' => $batch[0],
64 'end' => $batch[1],
66 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle, $params );
68 Job::batchInsert( $jobs );
72 /**
73 * Invalidate a set of pages, right now
75 public function invalidate( $startId = false, $endId = false ) {
76 global $wgUseFileCache, $wgUseSquid;
78 $titleArray = $this->mCache->getLinks( $this->mTable, $startId, $endId );
79 if ( $titleArray->count() == 0 ) {
80 return;
83 $dbw = wfGetDB( DB_MASTER );
84 $timestamp = $dbw->timestamp();
86 # Get all IDs in this query into an array
87 $ids = array();
88 foreach ( $titleArray as $title ) {
89 $ids[] = $title->getArticleID();
91 # Update page_touched
92 $dbw->update( 'page',
93 array( 'page_touched' => $timestamp ),
94 array( 'page_id IN (' . $dbw->makeList( $ids ) . ')' ),
95 __METHOD__
98 # Update squid
99 if ( $wgUseSquid ) {
100 $u = SquidUpdate::newFromTitles( $titleArray );
101 $u->doUpdate();
104 # Update file cache
105 if ( $wgUseFileCache ) {
106 foreach ( $titleArray as $title ) {
107 HTMLFileCache::clearFileCache( $title );
114 * Job wrapper for HTMLCacheUpdate. Gets run whenever a related
115 * job gets called from the queue.
117 * @ingroup JobQueue
119 class HTMLCacheUpdateJob extends Job {
120 var $table, $start, $end;
123 * Construct a job
124 * @param Title $title The title linked to
125 * @param array $params Job parameters (table, start and end page_ids)
126 * @param integer $id job_id
128 function __construct( $title, $params, $id = 0 ) {
129 parent::__construct( 'htmlCacheUpdate', $title, $params, $id );
130 $this->table = $params['table'];
131 $this->start = $params['start'];
132 $this->end = $params['end'];
135 public function run() {
136 $update = new HTMLCacheUpdate( $this->title, $this->table );
137 $update->invalidate( $this->start, $this->end );
138 return true;