4 * Class to invalidate the HTML cache of all the pages linking to a given title.
5 * Small numbers of links will be done immediately, large numbers are pushed onto
8 * This class is designed to work efficiently with small numbers of links, and
9 * to work reasonably well with up to ~10^5 links. Above ~10^6 links, the memory
10 * and time requirements of loading all backlinked IDs in doUpdate() might become
11 * prohibitive. The requirements measured at Wikimedia are approximately:
13 * memory: 48 bytes per row
14 * time: 16us per row for the query plus processing
16 * The reason this query is done is to support partitioning of the job
17 * by backlinked ID. The memory issue could be allieviated by doing this query in
18 * batches, but of course LIMIT with an offset is inefficient on the DB side.
20 * The class is nevertheless a vast improvement on the previous method of using
21 * Image::getLinksTo() and Title::touchArray(), which uses about 2KB of memory per
28 public $mTitle, $mTable, $mPrefix;
29 public $mRowsPerJob, $mRowsPerQuery;
31 function __construct( $titleTo, $table ) {
32 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
34 $this->mTitle
= $titleTo;
35 $this->mTable
= $table;
36 $this->mRowsPerJob
= $wgUpdateRowsPerJob;
37 $this->mRowsPerQuery
= $wgUpdateRowsPerQuery;
38 $this->mCache
= $this->mTitle
->getBacklinkCache();
41 public function doUpdate() {
43 $numRows = $this->mCache
->getNumLinks( $this->mTable
);
45 if ( $numRows != 0 ) {
46 if ( $numRows > $this->mRowsPerJob
) {
52 wfRunHooks( 'HTMLCacheUpdate::doUpdate', array($this->mTitle
) );
55 protected function insertJobs() {
56 $batches = $this->mCache
->partition( $this->mTable
, $this->mRowsPerJob
);
60 foreach ( $batches as $batch ) {
62 'table' => $this->mTable
,
66 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle
, $params );
68 Job
::batchInsert( $jobs );
73 * Invalidate a set of pages, right now
75 public function invalidate( $startId = false, $endId = false ) {
76 global $wgUseFileCache, $wgUseSquid;
78 $titleArray = $this->mCache
->getLinks( $this->mTable
, $startId, $endId );
79 if ( $titleArray->count() == 0 ) {
83 $dbw = wfGetDB( DB_MASTER
);
84 $timestamp = $dbw->timestamp();
86 # Get all IDs in this query into an array
88 foreach ( $titleArray as $title ) {
89 $ids[] = $title->getArticleID();
93 array( 'page_touched' => $timestamp ),
94 array( 'page_id IN (' . $dbw->makeList( $ids ) . ')' ),
100 $u = SquidUpdate
::newFromTitles( $titleArray );
105 if ( $wgUseFileCache ) {
106 foreach ( $titleArray as $title ) {
107 HTMLFileCache
::clearFileCache( $title );
114 * Job wrapper for HTMLCacheUpdate. Gets run whenever a related
115 * job gets called from the queue.
119 class HTMLCacheUpdateJob
extends Job
{
120 var $table, $start, $end;
124 * @param Title $title The title linked to
125 * @param array $params Job parameters (table, start and end page_ids)
126 * @param integer $id job_id
128 function __construct( $title, $params, $id = 0 ) {
129 parent
::__construct( 'htmlCacheUpdate', $title, $params, $id );
130 $this->table
= $params['table'];
131 $this->start
= $params['start'];
132 $this->end
= $params['end'];
135 public function run() {
136 $update = new HTMLCacheUpdate( $this->title
, $this->table
);
137 $update->invalidate( $this->start
, $this->end
);