Expose $wgMaxArticleSize in siteinfo query api
[mediawiki.git] / includes / specials / SpecialExport.php
blob3e66ab08d37887b40abc6a15000ca3f16217c3cd
1 <?php
2 /**
3 * Implements Special:Export
5 * Copyright © 2003-2008 Brion Vibber <brion@pobox.com>
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * You should have received a copy of the GNU General Public License along
18 * with this program; if not, write to the Free Software Foundation, Inc.,
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20 * http://www.gnu.org/copyleft/gpl.html
22 * @file
23 * @ingroup SpecialPage
26 /**
27 * A special page that allows users to export pages in a XML file
29 * @ingroup SpecialPage
31 class SpecialExport extends SpecialPage {
32 private $curonly, $doExport, $pageLinkDepth, $templates;
34 public function __construct() {
35 parent::__construct( 'Export' );
38 public function execute( $par ) {
39 $this->setHeaders();
40 $this->outputHeader();
41 $config = $this->getConfig();
43 // Set some variables
44 $this->curonly = true;
45 $this->doExport = false;
46 $request = $this->getRequest();
47 $this->templates = $request->getCheck( 'templates' );
48 $this->pageLinkDepth = $this->validateLinkDepth(
49 $request->getIntOrNull( 'pagelink-depth' )
51 $nsindex = '';
52 $exportall = false;
54 if ( $request->getCheck( 'addcat' ) ) {
55 $page = $request->getText( 'pages' );
56 $catname = $request->getText( 'catname' );
58 if ( $catname !== '' && $catname !== null && $catname !== false ) {
59 $t = Title::makeTitleSafe( NS_MAIN, $catname );
60 if ( $t ) {
61 /**
62 * @todo FIXME: This can lead to hitting memory limit for very large
63 * categories. Ideally we would do the lookup synchronously
64 * during the export in a single query.
66 $catpages = $this->getPagesFromCategory( $t );
67 if ( $catpages ) {
68 if ( $page !== '' ) {
69 $page .= "\n";
71 $page .= implode( "\n", $catpages );
75 } elseif ( $request->getCheck( 'addns' ) && $config->get( 'ExportFromNamespaces' ) ) {
76 $page = $request->getText( 'pages' );
77 $nsindex = $request->getText( 'nsindex', '' );
79 if ( strval( $nsindex ) !== '' ) {
80 /**
81 * Same implementation as above, so same @todo
83 $nspages = $this->getPagesFromNamespace( $nsindex );
84 if ( $nspages ) {
85 $page .= "\n" . implode( "\n", $nspages );
88 } elseif ( $request->getCheck( 'exportall' ) && $config->get( 'ExportAllowAll' ) ) {
89 $this->doExport = true;
90 $exportall = true;
92 /* Although $page and $history are not used later on, we
93 nevertheless set them to avoid that PHP notices about using
94 undefined variables foul up our XML output (see call to
95 doExport(...) further down) */
96 $page = '';
97 $history = '';
98 } elseif ( $request->wasPosted() && $par == '' ) {
99 $page = $request->getText( 'pages' );
100 $this->curonly = $request->getCheck( 'curonly' );
101 $rawOffset = $request->getVal( 'offset' );
103 if ( $rawOffset ) {
104 $offset = wfTimestamp( TS_MW, $rawOffset );
105 } else {
106 $offset = null;
109 $maxHistory = $config->get( 'ExportMaxHistory' );
110 $limit = $request->getInt( 'limit' );
111 $dir = $request->getVal( 'dir' );
112 $history = [
113 'dir' => 'asc',
114 'offset' => false,
115 'limit' => $maxHistory,
117 $historyCheck = $request->getCheck( 'history' );
119 if ( $this->curonly ) {
120 $history = WikiExporter::CURRENT;
121 } elseif ( !$historyCheck ) {
122 if ( $limit > 0 && ( $maxHistory == 0 || $limit < $maxHistory ) ) {
123 $history['limit'] = $limit;
126 if ( !is_null( $offset ) ) {
127 $history['offset'] = $offset;
130 if ( strtolower( $dir ) == 'desc' ) {
131 $history['dir'] = 'desc';
135 if ( $page != '' ) {
136 $this->doExport = true;
138 } else {
139 // Default to current-only for GET requests.
140 $page = $request->getText( 'pages', $par );
141 $historyCheck = $request->getCheck( 'history' );
143 if ( $historyCheck ) {
144 $history = WikiExporter::FULL;
145 } else {
146 $history = WikiExporter::CURRENT;
149 if ( $page != '' ) {
150 $this->doExport = true;
154 if ( !$config->get( 'ExportAllowHistory' ) ) {
155 // Override
156 $history = WikiExporter::CURRENT;
159 $list_authors = $request->getCheck( 'listauthors' );
160 if ( !$this->curonly || !$config->get( 'ExportAllowListContributors' ) ) {
161 $list_authors = false;
164 if ( $this->doExport ) {
165 $this->getOutput()->disable();
167 // Cancel output buffering and gzipping if set
168 // This should provide safer streaming for pages with history
169 wfResetOutputBuffers();
170 $request->response()->header( "Content-type: application/xml; charset=utf-8" );
171 $request->response()->header( "X-Robots-Tag: noindex,nofollow" );
173 if ( $request->getCheck( 'wpDownload' ) ) {
174 // Provide a sane filename suggestion
175 $filename = urlencode( $config->get( 'Sitename' ) . '-' . wfTimestampNow() . '.xml' );
176 $request->response()->header( "Content-disposition: attachment;filename={$filename}" );
179 $this->doExport( $page, $history, $list_authors, $exportall );
181 return;
184 $out = $this->getOutput();
185 $out->addWikiMsg( 'exporttext' );
187 if ( $page == '' ) {
188 $categoryName = $request->getText( 'catname' );
189 } else {
190 $categoryName = '';
193 $formDescriptor = [
194 'catname' => [
195 'type' => 'textwithbutton',
196 'name' => 'catname',
197 'horizontal-label' => true,
198 'label-message' => 'export-addcattext',
199 'default' => $categoryName,
200 'size' => 40,
201 'buttontype' => 'submit',
202 'buttonname' => 'addcat',
203 'buttondefault' => $this->msg( 'export-addcat' )->text(),
206 if ( $config->get( 'ExportFromNamespaces' ) ) {
207 $formDescriptor += [
208 'nsindex' => [
209 'type' => 'namespaceselectwithbutton',
210 'default' => $nsindex,
211 'label-message' => 'export-addnstext',
212 'horizontal-label' => true,
213 'name' => 'nsindex',
214 'id' => 'namespace',
215 'cssclass' => 'namespaceselector',
216 'buttontype' => 'submit',
217 'buttonname' => 'addns',
218 'buttondefault' => $this->msg( 'export-addns' )->text(),
223 if ( $config->get( 'ExportAllowAll' ) ) {
224 $formDescriptor += [
225 'exportall' => [
226 'type' => 'check',
227 'label-message' => 'exportall',
228 'name' => 'exportall',
229 'id' => 'exportall',
230 'default' => $request->wasPosted() ? $request->getCheck( 'exportall' ) : false,
235 $formDescriptor += [
236 'textarea' => [
237 'class' => 'HTMLTextAreaField',
238 'name' => 'pages',
239 'label-message' => 'export-manual',
240 'nodata' => true,
241 'rows' => 10,
242 'default' => $page,
246 if ( $config->get( 'ExportAllowHistory' ) ) {
247 $formDescriptor += [
248 'curonly' => [
249 'type' => 'check',
250 'label-message' => 'exportcuronly',
251 'name' => 'curonly',
252 'id' => 'curonly',
253 'default' => $request->wasPosted() ? $request->getCheck( 'curonly' ) : true,
256 } else {
257 $out->addWikiMsg( 'exportnohistory' );
260 $formDescriptor += [
261 'templates' => [
262 'type' => 'check',
263 'label-message' => 'export-templates',
264 'name' => 'templates',
265 'id' => 'wpExportTemplates',
266 'default' => $request->wasPosted() ? $request->getCheck( 'templates' ) : false,
270 if ( $config->get( 'ExportMaxLinkDepth' ) || $this->userCanOverrideExportDepth() ) {
271 $formDescriptor += [
272 'pagelink-depth' => [
273 'type' => 'text',
274 'name' => 'pagelink-depth',
275 'id' => 'pagelink-depth',
276 'label-message' => 'export-pagelinks',
277 'default' => '0',
278 'size' => 20,
283 $formDescriptor += [
284 'wpDownload' => [
285 'type' => 'check',
286 'name' =>'wpDownload',
287 'id' => 'wpDownload',
288 'default' => $request->wasPosted() ? $request->getCheck( 'wpDownload' ) : true,
289 'label-message' => 'export-download',
293 if ( $config->get( 'ExportAllowListContributors' ) ) {
294 $formDescriptor += [
295 'listauthors' => [
296 'type' => 'check',
297 'label-message' => 'exportlistauthors',
298 'default' => $request->wasPosted() ? $request->getCheck( 'listauthors' ) : false,
299 'name' => 'listauthors',
300 'id' => 'listauthors',
305 $htmlForm = HTMLForm::factory( 'ooui', $formDescriptor, $this->getContext() );
306 $htmlForm->setSubmitTextMsg( 'export-submit' );
307 $htmlForm->prepareForm()->displayForm( false );
308 $this->addHelpLink( 'Help:Export' );
312 * @return bool
314 private function userCanOverrideExportDepth() {
315 return $this->getUser()->isAllowed( 'override-export-depth' );
319 * Do the actual page exporting
321 * @param string $page User input on what page(s) to export
322 * @param int $history One of the WikiExporter history export constants
323 * @param bool $list_authors Whether to add distinct author list (when
324 * not returning full history)
325 * @param bool $exportall Whether to export everything
327 private function doExport( $page, $history, $list_authors, $exportall ) {
329 // If we are grabbing everything, enable full history and ignore the rest
330 if ( $exportall ) {
331 $history = WikiExporter::FULL;
332 } else {
333 $pageSet = []; // Inverted index of all pages to look up
335 // Split up and normalize input
336 foreach ( explode( "\n", $page ) as $pageName ) {
337 $pageName = trim( $pageName );
338 $title = Title::newFromText( $pageName );
339 if ( $title && !$title->isExternal() && $title->getText() !== '' ) {
340 // Only record each page once!
341 $pageSet[$title->getPrefixedText()] = true;
345 // Set of original pages to pass on to further manipulation...
346 $inputPages = array_keys( $pageSet );
348 // Look up any linked pages if asked...
349 if ( $this->templates ) {
350 $pageSet = $this->getTemplates( $inputPages, $pageSet );
352 $linkDepth = $this->pageLinkDepth;
353 if ( $linkDepth ) {
354 $pageSet = $this->getPageLinks( $inputPages, $pageSet, $linkDepth );
357 $pages = array_keys( $pageSet );
359 // Normalize titles to the same format and remove dupes, see bug 17374
360 foreach ( $pages as $k => $v ) {
361 $pages[$k] = str_replace( " ", "_", $v );
364 $pages = array_unique( $pages );
367 /* Ok, let's get to it... */
368 if ( $history == WikiExporter::CURRENT ) {
369 $lb = false;
370 $db = wfGetDB( DB_SLAVE );
371 $buffer = WikiExporter::BUFFER;
372 } else {
373 // Use an unbuffered query; histories may be very long!
374 $lb = wfGetLBFactory()->newMainLB();
375 $db = $lb->getConnection( DB_SLAVE );
376 $buffer = WikiExporter::STREAM;
378 // This might take a while... :D
379 MediaWiki\suppressWarnings();
380 set_time_limit( 0 );
381 MediaWiki\restoreWarnings();
384 $exporter = new WikiExporter( $db, $history, $buffer );
385 $exporter->list_authors = $list_authors;
386 $exporter->openStream();
388 if ( $exportall ) {
389 $exporter->allPages();
390 } else {
391 foreach ( $pages as $page ) {
392 # Bug 8824: Only export pages the user can read
393 $title = Title::newFromText( $page );
394 if ( is_null( $title ) ) {
395 // @todo Perhaps output an <error> tag or something.
396 continue;
399 if ( !$title->userCan( 'read', $this->getUser() ) ) {
400 // @todo Perhaps output an <error> tag or something.
401 continue;
404 $exporter->pageByTitle( $title );
408 $exporter->closeStream();
410 if ( $lb ) {
411 $lb->closeAll();
416 * @param Title $title
417 * @return array
419 private function getPagesFromCategory( $title ) {
420 global $wgContLang;
422 $maxPages = $this->getConfig()->get( 'ExportPagelistLimit' );
424 $name = $title->getDBkey();
426 $dbr = wfGetDB( DB_SLAVE );
427 $res = $dbr->select(
428 [ 'page', 'categorylinks' ],
429 [ 'page_namespace', 'page_title' ],
430 [ 'cl_from=page_id', 'cl_to' => $name ],
431 __METHOD__,
432 [ 'LIMIT' => $maxPages ]
435 $pages = [];
437 foreach ( $res as $row ) {
438 $n = $row->page_title;
439 if ( $row->page_namespace ) {
440 $ns = $wgContLang->getNsText( $row->page_namespace );
441 $n = $ns . ':' . $n;
444 $pages[] = $n;
447 return $pages;
451 * @param int $nsindex
452 * @return array
454 private function getPagesFromNamespace( $nsindex ) {
455 global $wgContLang;
457 $maxPages = $this->getConfig()->get( 'ExportPagelistLimit' );
459 $dbr = wfGetDB( DB_SLAVE );
460 $res = $dbr->select(
461 'page',
462 [ 'page_namespace', 'page_title' ],
463 [ 'page_namespace' => $nsindex ],
464 __METHOD__,
465 [ 'LIMIT' => $maxPages ]
468 $pages = [];
470 foreach ( $res as $row ) {
471 $n = $row->page_title;
473 if ( $row->page_namespace ) {
474 $ns = $wgContLang->getNsText( $row->page_namespace );
475 $n = $ns . ':' . $n;
478 $pages[] = $n;
481 return $pages;
485 * Expand a list of pages to include templates used in those pages.
486 * @param array $inputPages List of titles to look up
487 * @param array $pageSet Associative array indexed by titles for output
488 * @return array Associative array index by titles
490 private function getTemplates( $inputPages, $pageSet ) {
491 return $this->getLinks( $inputPages, $pageSet,
492 'templatelinks',
493 [ 'namespace' => 'tl_namespace', 'title' => 'tl_title' ],
494 [ 'page_id=tl_from' ]
499 * Validate link depth setting, if available.
500 * @param int $depth
501 * @return int
503 private function validateLinkDepth( $depth ) {
504 if ( $depth < 0 ) {
505 return 0;
508 if ( !$this->userCanOverrideExportDepth() ) {
509 $maxLinkDepth = $this->getConfig()->get( 'ExportMaxLinkDepth' );
510 if ( $depth > $maxLinkDepth ) {
511 return $maxLinkDepth;
516 * There's a HARD CODED limit of 5 levels of recursion here to prevent a
517 * crazy-big export from being done by someone setting the depth
518 * number too high. In other words, last resort safety net.
521 return intval( min( $depth, 5 ) );
525 * Expand a list of pages to include pages linked to from that page.
526 * @param array $inputPages
527 * @param array $pageSet
528 * @param int $depth
529 * @return array
531 private function getPageLinks( $inputPages, $pageSet, $depth ) {
532 // @codingStandardsIgnoreStart Squiz.WhiteSpace.SemicolonSpacing.Incorrect
533 for ( ; $depth > 0; --$depth ) {
534 // @codingStandardsIgnoreEnd
535 $pageSet = $this->getLinks(
536 $inputPages, $pageSet, 'pagelinks',
537 [ 'namespace' => 'pl_namespace', 'title' => 'pl_title' ],
538 [ 'page_id=pl_from' ]
540 $inputPages = array_keys( $pageSet );
543 return $pageSet;
547 * Expand a list of pages to include items used in those pages.
548 * @param array $inputPages Array of page titles
549 * @param array $pageSet
550 * @param string $table
551 * @param array $fields Array of field names
552 * @param array $join
553 * @return array
555 private function getLinks( $inputPages, $pageSet, $table, $fields, $join ) {
556 $dbr = wfGetDB( DB_SLAVE );
558 foreach ( $inputPages as $page ) {
559 $title = Title::newFromText( $page );
561 if ( $title ) {
562 $pageSet[$title->getPrefixedText()] = true;
563 /// @todo FIXME: May or may not be more efficient to batch these
564 /// by namespace when given multiple input pages.
565 $result = $dbr->select(
566 [ 'page', $table ],
567 $fields,
568 array_merge(
569 $join,
571 'page_namespace' => $title->getNamespace(),
572 'page_title' => $title->getDBkey()
575 __METHOD__
578 foreach ( $result as $row ) {
579 $template = Title::makeTitle( $row->namespace, $row->title );
580 $pageSet[$template->getPrefixedText()] = true;
585 return $pageSet;
588 protected function getGroupName() {
589 return 'pagetools';