Special case opus mime detction
[mediawiki.git] / includes / deferred / SearchUpdate.php
blobb9a259b1a43ca161f3bfca91962edee44532d0e9
1 <?php
2 /**
3 * Search index updater
5 * See deferred.txt
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * You should have received a copy of the GNU General Public License along
18 * with this program; if not, write to the Free Software Foundation, Inc.,
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20 * http://www.gnu.org/copyleft/gpl.html
22 * @file
23 * @ingroup Search
26 use MediaWiki\MediaWikiServices;
28 /**
29 * Database independant search index updater
31 * @ingroup Search
33 class SearchUpdate implements DeferrableUpdate {
34 /** @var int Page id being updated */
35 private $id = 0;
37 /** @var Title Title we're updating */
38 private $title;
40 /** @var Content|bool Content of the page (not text) */
41 private $content;
43 /** @var WikiPage **/
44 private $page;
46 /**
47 * Constructor
49 * @param int $id Page id to update
50 * @param Title|string $title Title of page to update
51 * @param Content|string|bool $c Content of the page to update. Default: false.
52 * If a Content object, text will be gotten from it. String is for back-compat.
53 * Passing false tells the backend to just update the title, not the content
55 public function __construct( $id, $title, $c = false ) {
56 if ( is_string( $title ) ) {
57 $nt = Title::newFromText( $title );
58 } else {
59 $nt = $title;
62 if ( $nt ) {
63 $this->id = $id;
64 // is_string() check is back-compat for ApprovedRevs
65 if ( is_string( $c ) ) {
66 $this->content = new TextContent( $c );
67 } else {
68 $this->content = $c ?: false;
70 $this->title = $nt;
71 } else {
72 wfDebug( "SearchUpdate object created with invalid title '$title'\n" );
76 /**
77 * Perform actual update for the entry
79 public function doUpdate() {
80 $config = MediaWikiServices::getInstance()->getSearchEngineConfig();
82 if ( $config->getConfig()->get( 'DisableSearchUpdate' ) || !$this->id ) {
83 return;
86 $seFactory = MediaWikiServices::getInstance()->getSearchEngineFactory();
87 foreach ( $config->getSearchTypes() as $type ) {
88 $search = $seFactory->create( $type );
89 if ( !$search->supports( 'search-update' ) ) {
90 continue;
93 $normalTitle = $this->getNormalizedTitle( $search );
95 if ( $this->getLatestPage() === null ) {
96 $search->delete( $this->id, $normalTitle );
97 continue;
98 } elseif ( $this->content === false ) {
99 $search->updateTitle( $this->id, $normalTitle );
100 continue;
103 $text = $search->getTextFromContent( $this->title, $this->content );
104 if ( !$search->textAlreadyUpdatedForIndex() ) {
105 $text = $this->updateText( $text, $search );
108 # Perform the actual update
109 $search->update( $this->id, $normalTitle, $search->normalizeText( $text ) );
114 * Clean text for indexing. Only really suitable for indexing in databases.
115 * If you're using a real search engine, you'll probably want to override
116 * this behavior and do something nicer with the original wikitext.
117 * @param string $text
118 * @param SearchEngine $se Search engine
119 * @return string
121 public function updateText( $text, SearchEngine $se = null ) {
122 global $wgContLang;
124 # Language-specific strip/conversion
125 $text = $wgContLang->normalizeForSearch( $text );
126 $se = $se ?: MediaWikiServices::getInstance()->newSearchEngine();
127 $lc = $se->legalSearchChars() . '&#;';
129 $text = preg_replace( "/<\\/?\\s*[A-Za-z][^>]*?>/",
130 ' ', $wgContLang->lc( " " . $text . " " ) ); # Strip HTML markup
131 $text = preg_replace( "/(^|\\n)==\\s*([^\\n]+)\\s*==(\\s)/sD",
132 "\\1\\2 \\2 \\2\\3", $text ); # Emphasize headings
134 # Strip external URLs
135 $uc = "A-Za-z0-9_\\/:.,~%\\-+&;#?!=()@\\x80-\\xFF";
136 $protos = "http|https|ftp|mailto|news|gopher";
137 $pat = "/(^|[^\\[])({$protos}):[{$uc}]+([^{$uc}]|$)/";
138 $text = preg_replace( $pat, "\\1 \\3", $text );
140 $p1 = "/([^\\[])\\[({$protos}):[{$uc}]+]/";
141 $p2 = "/([^\\[])\\[({$protos}):[{$uc}]+\\s+([^\\]]+)]/";
142 $text = preg_replace( $p1, "\\1 ", $text );
143 $text = preg_replace( $p2, "\\1 \\3 ", $text );
145 # Internal image links
146 $pat2 = "/\\[\\[image:([{$uc}]+)\\.(gif|png|jpg|jpeg)([^{$uc}])/i";
147 $text = preg_replace( $pat2, " \\1 \\3", $text );
149 $text = preg_replace( "/([^{$lc}])([{$lc}]+)]]([a-z]+)/",
150 "\\1\\2 \\2\\3", $text ); # Handle [[game]]s
152 # Strip all remaining non-search characters
153 $text = preg_replace( "/[^{$lc}]+/", " ", $text );
156 * Handle 's, s'
158 * $text = preg_replace( "/([{$lc}]+)'s /", "\\1 \\1's ", $text );
159 * $text = preg_replace( "/([{$lc}]+)s' /", "\\1s ", $text );
161 * These tail-anchored regexps are insanely slow. The worst case comes
162 * when Japanese or Chinese text (ie, no word spacing) is written on
163 * a wiki configured for Western UTF-8 mode. The Unicode characters are
164 * expanded to hex codes and the "words" are very long paragraph-length
165 * monstrosities. On a large page the above regexps may take over 20
166 * seconds *each* on a 1GHz-level processor.
168 * Following are reversed versions which are consistently fast
169 * (about 3 milliseconds on 1GHz-level processor).
171 $text = strrev( preg_replace( "/ s'([{$lc}]+)/", " s'\\1 \\1", strrev( $text ) ) );
172 $text = strrev( preg_replace( "/ 's([{$lc}]+)/", " s\\1", strrev( $text ) ) );
174 # Strip wiki '' and '''
175 $text = preg_replace( "/''[']*/", " ", $text );
177 return $text;
181 * Get WikiPage for the SearchUpdate $id using WikiPage::READ_LATEST
182 * and ensure using the same WikiPage object if there are multiple
183 * SearchEngine types.
185 * Returns null if a page has been deleted or is not found.
187 * @return WikiPage|null
189 private function getLatestPage() {
190 if ( !isset( $this->page ) ) {
191 $this->page = WikiPage::newFromID( $this->id, WikiPage::READ_LATEST );
194 return $this->page;
198 * Get a normalized string representation of a title suitable for
199 * including in a search index
201 * @param SearchEngine $search
202 * @return string A stripped-down title string ready for the search index
204 private function getNormalizedTitle( SearchEngine $search ) {
205 global $wgContLang;
207 $ns = $this->title->getNamespace();
208 $title = $this->title->getText();
210 $lc = $search->legalSearchChars() . '&#;';
211 $t = $wgContLang->normalizeForSearch( $title );
212 $t = preg_replace( "/[^{$lc}]+/", ' ', $t );
213 $t = $wgContLang->lc( $t );
215 # Handle 's, s'
216 $t = preg_replace( "/([{$lc}]+)'s( |$)/", "\\1 \\1's ", $t );
217 $t = preg_replace( "/([{$lc}]+)s'( |$)/", "\\1s ", $t );
219 $t = preg_replace( "/\\s+/", ' ', $t );
221 if ( $ns == NS_FILE ) {
222 $t = preg_replace( "/ (png|gif|jpg|jpeg|ogg)$/", "", $t );
225 return $search->normalizeText( trim( $t ) );