Followup r82727, improve comments, cast return value to bool
[mediawiki.git] / includes / specials / SpecialExport.php
blobfd20d8b17656bf247553810fd602b6fb2964c3a7
1 <?php
2 /**
3 * Implements Special:Export
5 * Copyright © 2003-2008 Brion Vibber <brion@pobox.com>
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * You should have received a copy of the GNU General Public License along
18 * with this program; if not, write to the Free Software Foundation, Inc.,
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20 * http://www.gnu.org/copyleft/gpl.html
22 * @file
23 * @ingroup SpecialPage
26 /**
27 * A special page that allows users to export pages in a XML file
29 * @ingroup SpecialPage
31 class SpecialExport extends SpecialPage {
33 private $curonly, $doExport, $pageLinkDepth, $templates;
34 private $images;
36 public function __construct() {
37 parent::__construct( 'Export' );
40 public function execute( $par ) {
41 global $wgOut, $wgRequest, $wgSitename, $wgExportAllowListContributors;
42 global $wgExportAllowHistory, $wgExportMaxHistory, $wgExportMaxLinkDepth;
43 global $wgExportFromNamespaces, $wgUser;
45 $this->setHeaders();
46 $this->outputHeader();
48 // Set some variables
49 $this->curonly = true;
50 $this->doExport = false;
51 $this->templates = $wgRequest->getCheck( 'templates' );
52 $this->images = $wgRequest->getCheck( 'images' ); // Doesn't do anything yet
53 $this->pageLinkDepth = $this->validateLinkDepth(
54 $wgRequest->getIntOrNull( 'pagelink-depth' )
56 $nsindex = '';
58 if ( $wgRequest->getCheck( 'addcat' ) ) {
59 $page = $wgRequest->getText( 'pages' );
60 $catname = $wgRequest->getText( 'catname' );
62 if ( $catname !== '' && $catname !== null && $catname !== false ) {
63 $t = Title::makeTitleSafe( NS_MAIN, $catname );
64 if ( $t ) {
65 /**
66 * @todo Fixme: this can lead to hitting memory limit for very large
67 * categories. Ideally we would do the lookup synchronously
68 * during the export in a single query.
70 $catpages = $this->getPagesFromCategory( $t );
71 if ( $catpages ) {
72 $page .= "\n" . implode( "\n", $catpages );
77 else if( $wgRequest->getCheck( 'addns' ) && $wgExportFromNamespaces ) {
78 $page = $wgRequest->getText( 'pages' );
79 $nsindex = $wgRequest->getText( 'nsindex', '' );
81 if ( strval( $nsindex ) !== '' ) {
82 /**
83 * Same implementation as above, so same @todo
85 $nspages = $this->getPagesFromNamespace( $nsindex );
86 if ( $nspages ) {
87 $page .= "\n" . implode( "\n", $nspages );
91 else if( $wgRequest->wasPosted() && $par == '' ) {
92 $page = $wgRequest->getText( 'pages' );
93 $this->curonly = $wgRequest->getCheck( 'curonly' );
94 $rawOffset = $wgRequest->getVal( 'offset' );
96 if( $rawOffset ) {
97 $offset = wfTimestamp( TS_MW, $rawOffset );
98 } else {
99 $offset = null;
102 $limit = $wgRequest->getInt( 'limit' );
103 $dir = $wgRequest->getVal( 'dir' );
104 $history = array(
105 'dir' => 'asc',
106 'offset' => false,
107 'limit' => $wgExportMaxHistory,
109 $historyCheck = $wgRequest->getCheck( 'history' );
111 if ( $this->curonly ) {
112 $history = WikiExporter::CURRENT;
113 } elseif ( !$historyCheck ) {
114 if ( $limit > 0 && ($wgExportMaxHistory == 0 || $limit < $wgExportMaxHistory ) ) {
115 $history['limit'] = $limit;
117 if ( !is_null( $offset ) ) {
118 $history['offset'] = $offset;
120 if ( strtolower( $dir ) == 'desc' ) {
121 $history['dir'] = 'desc';
125 if( $page != '' ) {
126 $this->doExport = true;
128 } else {
129 // Default to current-only for GET requests.
130 $page = $wgRequest->getText( 'pages', $par );
131 $historyCheck = $wgRequest->getCheck( 'history' );
133 if( $historyCheck ) {
134 $history = WikiExporter::FULL;
135 } else {
136 $history = WikiExporter::CURRENT;
139 if( $page != '' ) {
140 $this->doExport = true;
144 if( !$wgExportAllowHistory ) {
145 // Override
146 $history = WikiExporter::CURRENT;
149 $list_authors = $wgRequest->getCheck( 'listauthors' );
150 if ( !$this->curonly || !$wgExportAllowListContributors ) {
151 $list_authors = false ;
154 if ( $this->doExport ) {
155 $wgOut->disable();
157 // Cancel output buffering and gzipping if set
158 // This should provide safer streaming for pages with history
159 wfResetOutputBuffers();
160 $wgRequest->response()->header( "Content-type: application/xml; charset=utf-8" );
162 if( $wgRequest->getCheck( 'wpDownload' ) ) {
163 // Provide a sane filename suggestion
164 $filename = urlencode( $wgSitename . '-' . wfTimestampNow() . '.xml' );
165 $wgRequest->response()->header( "Content-disposition: attachment;filename={$filename}" );
168 $this->doExport( $page, $history, $list_authors );
170 return;
173 $wgOut->addWikiMsg( 'exporttext' );
175 $form = Xml::openElement( 'form', array( 'method' => 'post',
176 'action' => $this->getTitle()->getLocalUrl( 'action=submit' ) ) );
177 $form .= Xml::inputLabel( wfMsg( 'export-addcattext' ) , 'catname', 'catname', 40 ) . '&#160;';
178 $form .= Xml::submitButton( wfMsg( 'export-addcat' ), array( 'name' => 'addcat' ) ) . '<br />';
180 if ( $wgExportFromNamespaces ) {
181 $form .= Xml::namespaceSelector( $nsindex, null, 'nsindex', wfMsg( 'export-addnstext' ) ) . '&#160;';
182 $form .= Xml::submitButton( wfMsg( 'export-addns' ), array( 'name' => 'addns' ) ) . '<br />';
185 $form .= Xml::element( 'textarea', array( 'name' => 'pages', 'cols' => 40, 'rows' => 10 ), $page, false );
186 $form .= '<br />';
188 if( $wgExportAllowHistory ) {
189 $form .= Xml::checkLabel( wfMsg( 'exportcuronly' ), 'curonly', 'curonly', true ) . '<br />';
190 } else {
191 $wgOut->addHTML( wfMsgExt( 'exportnohistory', 'parse' ) );
194 $form .= Xml::checkLabel( wfMsg( 'export-templates' ), 'templates', 'wpExportTemplates', false ) . '<br />';
196 if( $wgExportMaxLinkDepth || $this->userCanOverrideExportDepth() ) {
197 $form .= Xml::inputLabel( wfMsg( 'export-pagelinks' ), 'pagelink-depth', 'pagelink-depth', 20, 0 ) . '<br />';
199 // Enable this when we can do something useful exporting/importing image information. :)
200 //$form .= Xml::checkLabel( wfMsg( 'export-images' ), 'images', 'wpExportImages', false ) . '<br />';
201 $form .= Xml::checkLabel( wfMsg( 'export-download' ), 'wpDownload', 'wpDownload', true ) . '<br />';
203 $form .= Xml::submitButton( wfMsg( 'export-submit' ), $wgUser->getSkin()->tooltipAndAccessKeyAttribs( 'export' ) );
204 $form .= Xml::closeElement( 'form' );
206 $wgOut->addHTML( $form );
209 private function userCanOverrideExportDepth() {
210 global $wgUser;
211 return $wgUser->isAllowed( 'override-export-depth' );
215 * Do the actual page exporting
217 * @param $page String: user input on what page(s) to export
218 * @param $history Mixed: one of the WikiExporter history export constants
219 * @param $list_authors Boolean: Whether to add distinct author list (when
220 * not returning full history)
222 private function doExport( $page, $history, $list_authors ) {
223 $pageSet = array(); // Inverted index of all pages to look up
225 // Split up and normalize input
226 foreach( explode( "\n", $page ) as $pageName ) {
227 $pageName = trim( $pageName );
228 $title = Title::newFromText( $pageName );
229 if( $title && $title->getInterwiki() == '' && $title->getText() !== '' ) {
230 // Only record each page once!
231 $pageSet[$title->getPrefixedText()] = true;
235 // Set of original pages to pass on to further manipulation...
236 $inputPages = array_keys( $pageSet );
238 // Look up any linked pages if asked...
239 if( $this->templates ) {
240 $pageSet = $this->getTemplates( $inputPages, $pageSet );
242 $linkDepth = $this->pageLinkDepth;
243 if( $linkDepth ) {
244 $pageSet = $this->getPageLinks( $inputPages, $pageSet, $linkDepth );
248 // Enable this when we can do something useful exporting/importing image information. :)
249 if( $this->images ) ) {
250 $pageSet = $this->getImages( $inputPages, $pageSet );
254 $pages = array_keys( $pageSet );
256 // Normalize titles to the same format and remove dupes, see bug 17374
257 foreach( $pages as $k => $v ) {
258 $pages[$k] = str_replace( " ", "_", $v );
261 $pages = array_unique( $pages );
263 /* Ok, let's get to it... */
264 if( $history == WikiExporter::CURRENT ) {
265 $lb = false;
266 $db = wfGetDB( DB_SLAVE );
267 $buffer = WikiExporter::BUFFER;
268 } else {
269 // Use an unbuffered query; histories may be very long!
270 $lb = wfGetLBFactory()->newMainLB();
271 $db = $lb->getConnection( DB_SLAVE );
272 $buffer = WikiExporter::STREAM;
274 // This might take a while... :D
275 wfSuppressWarnings();
276 set_time_limit(0);
277 wfRestoreWarnings();
280 $exporter = new WikiExporter( $db, $history, $buffer );
281 $exporter->list_authors = $list_authors;
282 $exporter->openStream();
284 foreach( $pages as $page ) {
286 if( $wgExportMaxHistory && !$this->curonly ) {
287 $title = Title::newFromText( $page );
288 if( $title ) {
289 $count = Revision::countByTitle( $db, $title );
290 if( $count > $wgExportMaxHistory ) {
291 wfDebug( __FUNCTION__ .
292 ": Skipped $page, $count revisions too big\n" );
293 continue;
297 #Bug 8824: Only export pages the user can read
298 $title = Title::newFromText( $page );
299 if( is_null( $title ) ) {
300 continue; #TODO: perhaps output an <error> tag or something.
302 if( !$title->userCanRead() ) {
303 continue; #TODO: perhaps output an <error> tag or something.
306 $exporter->pageByTitle( $title );
309 $exporter->closeStream();
311 if( $lb ) {
312 $lb->closeAll();
316 private function getPagesFromCategory( $title ) {
317 global $wgContLang;
319 $name = $title->getDBkey();
321 $dbr = wfGetDB( DB_SLAVE );
322 $res = $dbr->select(
323 array( 'page', 'categorylinks' ),
324 array( 'page_namespace', 'page_title' ),
325 array( 'cl_from=page_id', 'cl_to' => $name ),
326 __METHOD__,
327 array( 'LIMIT' => '5000' )
330 $pages = array();
332 foreach ( $res as $row ) {
333 $n = $row->page_title;
334 if ($row->page_namespace) {
335 $ns = $wgContLang->getNsText( $row->page_namespace );
336 $n = $ns . ':' . $n;
339 $pages[] = $n;
341 return $pages;
344 private function getPagesFromNamespace( $nsindex ) {
345 global $wgContLang;
347 $dbr = wfGetDB( DB_SLAVE );
348 $res = $dbr->select(
349 'page',
350 array( 'page_namespace', 'page_title' ),
351 array( 'page_namespace' => $nsindex ),
352 __METHOD__,
353 array( 'LIMIT' => '5000' )
356 $pages = array();
358 foreach ( $res as $row ) {
359 $n = $row->page_title;
361 if ( $row->page_namespace ) {
362 $ns = $wgContLang->getNsText( $row->page_namespace );
363 $n = $ns . ':' . $n;
366 $pages[] = $n;
368 return $pages;
372 * Expand a list of pages to include templates used in those pages.
373 * @param $inputPages array, list of titles to look up
374 * @param $pageSet array, associative array indexed by titles for output
375 * @return array associative array index by titles
377 private function getTemplates( $inputPages, $pageSet ) {
378 return $this->getLinks( $inputPages, $pageSet,
379 'templatelinks',
380 array( 'tl_namespace AS namespace', 'tl_title AS title' ),
381 array( 'page_id=tl_from' )
386 * Validate link depth setting, if available.
388 private function validateLinkDepth( $depth ) {
389 global $wgExportMaxLinkDepth;
391 if( $depth < 0 ) {
392 return 0;
395 if ( !$this->userCanOverrideExportDepth() ) {
396 if( $depth > $wgExportMaxLinkDepth ) {
397 return $wgExportMaxLinkDepth;
402 * There's a HARD CODED limit of 5 levels of recursion here to prevent a
403 * crazy-big export from being done by someone setting the depth
404 * number too high. In other words, last resort safety net.
406 return intval( min( $depth, 5 ) );
409 /** Expand a list of pages to include pages linked to from that page. */
410 private function getPageLinks( $inputPages, $pageSet, $depth ) {
411 for( ; $depth > 0; --$depth ) {
412 $pageSet = $this->getLinks(
413 $inputPages, $pageSet, 'pagelinks',
414 array( 'pl_namespace AS namespace', 'pl_title AS title' ),
415 array( 'page_id=pl_from' )
417 $inputPages = array_keys( $pageSet );
420 return $pageSet;
424 * Expand a list of pages to include images used in those pages.
426 * @param $inputPages array, list of titles to look up
427 * @param $pageSet array, associative array indexed by titles for output
429 * @return array associative array index by titles
431 private function getImages( $inputPages, $pageSet ) {
432 return $this->getLinks(
433 $inputPages,
434 $pageSet,
435 'imagelinks',
436 array( NS_FILE . ' AS namespace', 'il_to AS title' ),
437 array( 'page_id=il_from' )
442 * Expand a list of pages to include items used in those pages.
444 private function getLinks( $inputPages, $pageSet, $table, $fields, $join ) {
445 $dbr = wfGetDB( DB_SLAVE );
447 foreach( $inputPages as $page ) {
448 $title = Title::newFromText( $page );
450 if( $title ) {
451 $pageSet[$title->getPrefixedText()] = true;
452 /// @todo Fixme: May or may not be more efficient to batch these
453 /// by namespace when given multiple input pages.
454 $result = $dbr->select(
455 array( 'page', $table ),
456 $fields,
457 array_merge(
458 $join,
459 array(
460 'page_namespace' => $title->getNamespace(),
461 'page_title' => $title->getDBkey()
464 __METHOD__
467 foreach( $result as $row ) {
468 $template = Title::makeTitle( $row->namespace, $row->title );
469 $pageSet[$template->getPrefixedText()] = true;
474 return $pageSet;