MediaWiki
REL1_21
|
00001 <?php 00031 class SearchUpdate implements DeferrableUpdate { 00032 00033 private $mId = 0, $mNamespace, $mTitle, $mText; 00034 private $mTitleWords; 00035 00036 function __construct( $id, $title, $text = false ) { 00037 if ( is_string( $title ) ) { 00038 $nt = Title::newFromText( $title ); 00039 } else { 00040 $nt = $title; 00041 } 00042 00043 if( $nt ) { 00044 $this->mId = $id; 00045 $this->mText = $text; 00046 00047 $this->mNamespace = $nt->getNamespace(); 00048 $this->mTitle = $nt->getText(); # Discard namespace 00049 00050 $this->mTitleWords = $this->mTextWords = array(); 00051 } else { 00052 wfDebug( "SearchUpdate object created with invalid title '$title'\n" ); 00053 } 00054 } 00055 00056 function doUpdate() { 00057 global $wgContLang, $wgDisableSearchUpdate; 00058 00059 if( $wgDisableSearchUpdate || !$this->mId ) { 00060 return; 00061 } 00062 00063 wfProfileIn( __METHOD__ ); 00064 00065 $search = SearchEngine::create(); 00066 $lc = SearchEngine::legalSearchChars() . '&#;'; 00067 00068 if( $this->mText === false ) { 00069 $search->updateTitle($this->mId, 00070 $search->normalizeText( Title::indexTitle( $this->mNamespace, $this->mTitle ) ) ); 00071 wfProfileOut( __METHOD__ ); 00072 return; 00073 } 00074 00075 # Language-specific strip/conversion 00076 $text = $wgContLang->normalizeForSearch( $this->mText ); 00077 00078 wfProfileIn( __METHOD__ . '-regexps' ); 00079 $text = preg_replace( "/<\\/?\\s*[A-Za-z][^>]*?>/", 00080 ' ', $wgContLang->lc( " " . $text . " " ) ); # Strip HTML markup 00081 $text = preg_replace( "/(^|\\n)==\\s*([^\\n]+)\\s*==(\\s)/sD", 00082 "\\1\\2 \\2 \\2\\3", $text ); # Emphasize headings 00083 00084 # Strip external URLs 00085 $uc = "A-Za-z0-9_\\/:.,~%\\-+&;#?!=()@\\x80-\\xFF"; 00086 $protos = "http|https|ftp|mailto|news|gopher"; 00087 $pat = "/(^|[^\\[])({$protos}):[{$uc}]+([^{$uc}]|$)/"; 00088 $text = preg_replace( $pat, "\\1 \\3", $text ); 00089 00090 $p1 = "/([^\\[])\\[({$protos}):[{$uc}]+]/"; 00091 $p2 = "/([^\\[])\\[({$protos}):[{$uc}]+\\s+([^\\]]+)]/"; 00092 $text = preg_replace( $p1, "\\1 ", $text ); 00093 $text = preg_replace( $p2, "\\1 \\3 ", $text ); 00094 00095 # Internal image links 00096 $pat2 = "/\\[\\[image:([{$uc}]+)\\.(gif|png|jpg|jpeg)([^{$uc}])/i"; 00097 $text = preg_replace( $pat2, " \\1 \\3", $text ); 00098 00099 $text = preg_replace( "/([^{$lc}])([{$lc}]+)]]([a-z]+)/", 00100 "\\1\\2 \\2\\3", $text ); # Handle [[game]]s 00101 00102 # Strip all remaining non-search characters 00103 $text = preg_replace( "/[^{$lc}]+/", " ", $text ); 00104 00105 # Handle 's, s' 00106 # 00107 # $text = preg_replace( "/([{$lc}]+)'s /", "\\1 \\1's ", $text ); 00108 # $text = preg_replace( "/([{$lc}]+)s' /", "\\1s ", $text ); 00109 # 00110 # These tail-anchored regexps are insanely slow. The worst case comes 00111 # when Japanese or Chinese text (ie, no word spacing) is written on 00112 # a wiki configured for Western UTF-8 mode. The Unicode characters are 00113 # expanded to hex codes and the "words" are very long paragraph-length 00114 # monstrosities. On a large page the above regexps may take over 20 00115 # seconds *each* on a 1GHz-level processor. 00116 # 00117 # Following are reversed versions which are consistently fast 00118 # (about 3 milliseconds on 1GHz-level processor). 00119 # 00120 $text = strrev( preg_replace( "/ s'([{$lc}]+)/", " s'\\1 \\1", strrev( $text ) ) ); 00121 $text = strrev( preg_replace( "/ 's([{$lc}]+)/", " s\\1", strrev( $text ) ) ); 00122 00123 # Strip wiki '' and ''' 00124 $text = preg_replace( "/''[']*/", " ", $text ); 00125 wfProfileOut( __METHOD__ . '-regexps' ); 00126 00127 wfRunHooks( 'SearchUpdate', array( $this->mId, $this->mNamespace, $this->mTitle, &$text ) ); 00128 00129 # Perform the actual update 00130 $search->update( $this->mId, $search->normalizeText( Title::indexTitle( $this->mNamespace, $this->mTitle ) ), 00131 $search->normalizeText( $text ) ); 00132 00133 wfProfileOut( __METHOD__ ); 00134 } 00135 } 00136 00142 class SearchUpdateMyISAM extends SearchUpdate { 00143 # Inherits everything 00144 }