From 183851b06bd6c52f3cae5375f433da720d410447 Mon Sep 17 00:00:00 2001 From: Pierre Schmitz Date: Wed, 11 Oct 2006 18:12:39 +0000 Subject: MediaWiki 1.7.1 wiederhergestellt --- includes/SearchUpdate.php | 115 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 115 insertions(+) create mode 100644 includes/SearchUpdate.php (limited to 'includes/SearchUpdate.php') diff --git a/includes/SearchUpdate.php b/includes/SearchUpdate.php new file mode 100644 index 00000000..37981a67 --- /dev/null +++ b/includes/SearchUpdate.php @@ -0,0 +1,115 @@ +mId = $id; + $this->mText = $text; + + $this->mNamespace = $nt->getNamespace(); + $this->mTitle = $nt->getText(); # Discard namespace + + $this->mTitleWords = $this->mTextWords = array(); + } else { + wfDebug( "SearchUpdate object created with invalid title '$title'\n" ); + } + } + + function doUpdate() { + global $wgContLang, $wgDisableSearchUpdate; + + if( $wgDisableSearchUpdate || !$this->mId ) { + return false; + } + $fname = 'SearchUpdate::doUpdate'; + wfProfileIn( $fname ); + + $search = SearchEngine::create(); + $lc = $search->legalSearchChars() . '&#;'; + + if( $this->mText === false ) { + $search->updateTitle($this->mId, + Title::indexTitle( $this->mNamespace, $this->mTitle )); + wfProfileOut( $fname ); + return; + } + + # Language-specific strip/conversion + $text = $wgContLang->stripForSearch( $this->mText ); + + wfProfileIn( $fname.'-regexps' ); + $text = preg_replace( "/<\\/?\\s*[A-Za-z][A-Za-z0-9]*\\s*([^>]*?)>/", + ' ', strtolower( " " . $text /*$this->mText*/ . " " ) ); # Strip HTML markup + $text = preg_replace( "/(^|\\n)==\\s*([^\\n]+)\\s*==(\\s)/sD", + "\\1\\2 \\2 \\2\\3", $text ); # Emphasize headings + + # Strip external URLs + $uc = "A-Za-z0-9_\\/:.,~%\\-+&;#?!=()@\\xA0-\\xFF"; + $protos = "http|https|ftp|mailto|news|gopher"; + $pat = "/(^|[^\\[])({$protos}):[{$uc}]+([^{$uc}]|$)/"; + $text = preg_replace( $pat, "\\1 \\3", $text ); + + $p1 = "/([^\\[])\\[({$protos}):[{$uc}]+]/"; + $p2 = "/([^\\[])\\[({$protos}):[{$uc}]+\\s+([^\\]]+)]/"; + $text = preg_replace( $p1, "\\1 ", $text ); + $text = preg_replace( $p2, "\\1 \\3 ", $text ); + + # Internal image links + $pat2 = "/\\[\\[image:([{$uc}]+)\\.(gif|png|jpg|jpeg)([^{$uc}])/i"; + $text = preg_replace( $pat2, " \\1 \\3", $text ); + + $text = preg_replace( "/([^{$lc}])([{$lc}]+)]]([a-z]+)/", + "\\1\\2 \\2\\3", $text ); # Handle [[game]]s + + # Strip all remaining non-search characters + $text = preg_replace( "/[^{$lc}]+/", " ", $text ); + + # Handle 's, s' + # + # $text = preg_replace( "/([{$lc}]+)'s /", "\\1 \\1's ", $text ); + # $text = preg_replace( "/([{$lc}]+)s' /", "\\1s ", $text ); + # + # These tail-anchored regexps are insanely slow. The worst case comes + # when Japanese or Chinese text (ie, no word spacing) is written on + # a wiki configured for Western UTF-8 mode. The Unicode characters are + # expanded to hex codes and the "words" are very long paragraph-length + # monstrosities. On a large page the above regexps may take over 20 + # seconds *each* on a 1GHz-level processor. + # + # Following are reversed versions which are consistently fast + # (about 3 milliseconds on 1GHz-level processor). + # + $text = strrev( preg_replace( "/ s'([{$lc}]+)/", " s'\\1 \\1", strrev( $text ) ) ); + $text = strrev( preg_replace( "/ 's([{$lc}]+)/", " s\\1", strrev( $text ) ) ); + + # Strip wiki '' and ''' + $text = preg_replace( "/''[']*/", " ", $text ); + wfProfileOut( "$fname-regexps" ); + $search->update($this->mId, Title::indexTitle( $this->mNamespace, $this->mTitle ), + $text); + wfProfileOut( $fname ); + } +} + +/** + * Placeholder class + * @package MediaWiki + */ +class SearchUpdateMyISAM extends SearchUpdate { + # Inherits everything +} + +?> -- cgit v1.2.3-54-g00ecf