* API: result data generation cleanup, minor cleaning
[mediawiki.git] / includes / HTMLCacheUpdate.php
blob47703b2005ef70584797cdaf15ebdbdc7128d733
1 <?php
3 /**
4 * Class to invalidate the HTML cache of all the pages linking to a given title.
5 * Small numbers of links will be done immediately, large numbers are pushed onto
6 * the job queue.
8 * This class is designed to work efficiently with small numbers of links, and
9 * to work reasonably well with up to ~10^5 links. Above ~10^6 links, the memory
10 * and time requirements of loading all backlinked IDs in doUpdate() might become
11 * prohibitive. The requirements measured at Wikimedia are approximately:
13 * memory: 48 bytes per row
14 * time: 16us per row for the query plus processing
16 * The reason this query is done is to support partitioning of the job
17 * by backlinked ID. The memory issue could be allieviated by doing this query in
18 * batches, but of course LIMIT with an offset is inefficient on the DB side.
20 * The class is nevertheless a vast improvement on the previous method of using
21 * Image::getLinksTo() and Title::touchArray(), which uses about 2KB of memory per
22 * link.
24 class HTMLCacheUpdate
26 public $mTitle, $mTable, $mPrefix;
27 public $mRowsPerJob, $mRowsPerQuery;
29 function __construct( $titleTo, $table ) {
30 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
32 $this->mTitle = $titleTo;
33 $this->mTable = $table;
34 $this->mRowsPerJob = $wgUpdateRowsPerJob;
35 $this->mRowsPerQuery = $wgUpdateRowsPerQuery;
38 function doUpdate() {
39 # Fetch the IDs
40 $cond = $this->getToCondition();
41 $dbr =& wfGetDB( DB_SLAVE );
42 $res = $dbr->select( $this->mTable, $this->getFromField(), $cond, __METHOD__ );
43 $resWrap = new ResultWrapper( $dbr, $res );
44 if ( $dbr->numRows( $res ) != 0 ) {
45 if ( $dbr->numRows( $res ) > $this->mRowsPerJob ) {
46 $this->insertJobs( $resWrap );
47 } else {
48 $this->invalidateIDs( $resWrap );
51 $dbr->freeResult( $res );
54 function insertJobs( ResultWrapper $res ) {
55 $numRows = $res->numRows();
56 $numBatches = ceil( $numRows / $this->mRowsPerJob );
57 $realBatchSize = $numRows / $numBatches;
58 $boundaries = array();
59 $start = false;
60 $jobs = array();
61 do {
62 for ( $i = 0; $i < $realBatchSize - 1; $i++ ) {
63 $row = $res->fetchRow();
64 if ( $row ) {
65 $id = $row[0];
66 } else {
67 $id = false;
68 break;
71 if ( $id !== false ) {
72 // One less on the end to avoid duplicating the boundary
73 $job = new HTMLCacheUpdateJob( $this->mTitle, $this->mTable, $start, $id - 1 );
74 } else {
75 $job = new HTMLCacheUpdateJob( $this->mTitle, $this->mTable, $start, false );
77 $jobs[] = $job;
79 $start = $id;
80 } while ( $start );
82 Job::batchInsert( $jobs );
85 function getPrefix() {
86 static $prefixes = array(
87 'pagelinks' => 'pl',
88 'imagelinks' => 'il',
89 'categorylinks' => 'cl',
90 'templatelinks' => 'tl',
92 # Not needed
93 # 'externallinks' => 'el',
94 # 'langlinks' => 'll'
97 if ( is_null( $this->mPrefix ) ) {
98 $this->mPrefix = $prefixes[$this->mTable];
99 if ( is_null( $this->mPrefix ) ) {
100 throw new MWException( "Invalid table type \"{$this->mTable}\" in " . __CLASS__ );
103 return $this->mPrefix;
106 function getFromField() {
107 return $this->getPrefix() . '_from';
110 function getToCondition() {
111 switch ( $this->mTable ) {
112 case 'pagelinks':
113 return array(
114 'pl_namespace' => $this->mTitle->getNamespace(),
115 'pl_title' => $this->mTitle->getDBkey()
117 case 'templatelinks':
118 return array(
119 'tl_namespace' => $this->mTitle->getNamespace(),
120 'tl_title' => $this->mTitle->getDBkey()
122 case 'imagelinks':
123 return array( 'il_to' => $this->mTitle->getDBkey() );
124 case 'categorylinks':
125 return array( 'cl_to' => $this->mTitle->getDBkey() );
127 throw new MWException( 'Invalid table type in ' . __CLASS__ );
131 * Invalidate a set of IDs, right now
133 function invalidateIDs( ResultWrapper $res ) {
134 global $wgUseFileCache, $wgUseSquid;
136 if ( $res->numRows() == 0 ) {
137 return;
140 $dbw =& wfGetDB( DB_MASTER );
141 $timestamp = $dbw->timestamp();
142 $done = false;
144 while ( !$done ) {
145 # Get all IDs in this query into an array
146 $ids = array();
147 for ( $i = 0; $i < $this->mRowsPerQuery; $i++ ) {
148 $row = $res->fetchRow();
149 if ( $row ) {
150 $ids[] = $row[0];
151 } else {
152 $done = true;
153 break;
157 if ( !count( $ids ) ) {
158 break;
161 # Update page_touched
162 $dbw->update( 'page',
163 array( 'page_touched' => $timestamp ),
164 array( 'page_id IN (' . $dbw->makeList( $ids ) . ')' ),
165 __METHOD__
168 # Update squid
169 if ( $wgUseSquid || $wgUseFileCache ) {
170 $titles = Title::newFromIDs( $ids );
171 if ( $wgUseSquid ) {
172 $u = SquidUpdate::newFromTitles( $titles );
173 $u->doUpdate();
176 # Update file cache
177 if ( $wgUseFileCache ) {
178 foreach ( $titles as $title ) {
179 $cm = new CacheManager($title);
180 @unlink($cm->fileCacheName());
188 class HTMLCacheUpdateJob extends Job {
189 var $table, $start, $end;
192 * Construct a job
193 * @param Title $title The title linked to
194 * @param string $table The name of the link table.
195 * @param integer $start Beginning page_id or false for open interval
196 * @param integer $end End page_id or false for open interval
197 * @param integer $id job_id
199 function __construct( $title, $table, $start, $end, $id = 0 ) {
200 $params = array(
201 'table' => $table,
202 'start' => $start,
203 'end' => $end );
204 parent::__construct( 'htmlCacheUpdate', $title, $params, $id );
205 $this->table = $table;
206 $this->start = intval( $start );
207 $this->end = intval( $end );
210 function run() {
211 $update = new HTMLCacheUpdate( $this->title, $this->table );
213 $fromField = $update->getFromField();
214 $conds = $update->getToCondition();
215 if ( $this->start ) {
216 $conds[] = "$fromField >= {$this->start}";
218 if ( $this->end ) {
219 $conds[] = "$fromField <= {$this->end}";
222 $dbr =& wfGetDB( DB_SLAVE );
223 $res = $dbr->select( $this->table, $fromField, $conds, __METHOD__ );
224 $update->invalidateIDs( new ResultWrapper( $dbr, $res ) );
225 $dbr->freeResult( $res );
227 return true;