MediaWiki
REL1_22
|
00001 <?php 00044 require_once __DIR__ . '/../Maintenance.php'; 00045 00051 class CompressOld extends Maintenance { 00055 const LS_INDIVIDUAL = 0; 00056 const LS_CHUNKED = 1; 00057 00058 public function __construct() { 00059 parent::__construct(); 00060 $this->mDescription = 'Compress the text of a wiki'; 00061 $this->addOption( 'type', 'Set compression type to either: gzip|concat', false, true, 't' ); 00062 $this->addOption( 'chunksize', 'Maximum number of revisions in a concat chunk', false, true, 'c' ); 00063 $this->addOption( 'begin-date', 'Earliest date to check for uncompressed revisions', false, true, 'b' ); 00064 $this->addOption( 'end-date', 'Latest revision date to compress', false, true, 'e' ); 00065 $this->addOption( 'startid', 'The id to start from (gzip -> text table, concat -> page table)', false, true, 's' ); 00066 $this->addOption( 'extdb', 'Store specified revisions in an external cluster (untested)', false, true ); 00067 $this->addOption( 'endid', 'The page_id to stop at (only when using concat compression type)', false, true, 'n' ); 00068 } 00069 00070 public function execute() { 00071 global $wgDBname; 00072 if ( !function_exists( "gzdeflate" ) ) { 00073 $this->error( "You must enable zlib support in PHP to compress old revisions!\n" . 00074 "Please see http://www.php.net/manual/en/ref.zlib.php\n", true ); 00075 } 00076 00077 $type = $this->getOption( 'type', 'concat' ); 00078 $chunkSize = $this->getOption( 'chunksize', 20 ); 00079 $startId = $this->getOption( 'startid', 0 ); 00080 $beginDate = $this->getOption( 'begin-date', '' ); 00081 $endDate = $this->getOption( 'end-date', '' ); 00082 $extDB = $this->getOption( 'extdb', '' ); 00083 $endId = $this->getOption( 'endid', false ); 00084 00085 if ( $type != 'concat' && $type != 'gzip' ) { 00086 $this->error( "Type \"{$type}\" not supported" ); 00087 } 00088 00089 if ( $extDB != '' ) { 00090 $this->output( "Compressing database {$wgDBname} to external cluster {$extDB}\n" 00091 . str_repeat( '-', 76 ) . "\n\n" ); 00092 } else { 00093 $this->output( "Compressing database {$wgDBname}\n" 00094 . str_repeat( '-', 76 ) . "\n\n" ); 00095 } 00096 00097 $success = true; 00098 if ( $type == 'concat' ) { 00099 $success = $this->compressWithConcat( $startId, $chunkSize, $beginDate, 00100 $endDate, $extDB, $endId ); 00101 } else { 00102 $this->compressOldPages( $startId, $extDB ); 00103 } 00104 00105 if ( $success ) { 00106 $this->output( "Done.\n" ); 00107 } 00108 } 00109 00111 private function compressOldPages( $start = 0, $extdb = '' ) { 00112 $chunksize = 50; 00113 $this->output( "Starting from old_id $start...\n" ); 00114 $dbw = wfGetDB( DB_MASTER ); 00115 do { 00116 $res = $dbw->select( 'text', array( 'old_id', 'old_flags', 'old_text' ), 00117 "old_id>=$start", __METHOD__, array( 'ORDER BY' => 'old_id', 'LIMIT' => $chunksize, 'FOR UPDATE' ) ); 00118 if ( $res->numRows() == 0 ) { 00119 break; 00120 } 00121 $last = $start; 00122 foreach ( $res as $row ) { 00123 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n"; 00124 $this->compressPage( $row, $extdb ); 00125 $last = $row->old_id; 00126 } 00127 $start = $last + 1; # Deletion may leave long empty stretches 00128 $this->output( "$start...\n" ); 00129 } while ( true ); 00130 } 00131 00138 private function compressPage( $row, $extdb ) { 00139 if ( false !== strpos( $row->old_flags, 'gzip' ) || false !== strpos( $row->old_flags, 'object' ) ) { 00140 #print "Already compressed row {$row->old_id}\n"; 00141 return false; 00142 } 00143 $dbw = wfGetDB( DB_MASTER ); 00144 $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip"; 00145 $compress = gzdeflate( $row->old_text ); 00146 00147 # Store in external storage if required 00148 if ( $extdb !== '' ) { 00149 $storeObj = new ExternalStoreDB; 00150 $compress = $storeObj->store( $extdb, $compress ); 00151 if ( $compress === false ) { 00152 $this->error( "Unable to store object" ); 00153 return false; 00154 } 00155 } 00156 00157 # Update text row 00158 $dbw->update( 'text', 00159 array( /* SET */ 00160 'old_flags' => $flags, 00161 'old_text' => $compress 00162 ), array( /* WHERE */ 00163 'old_id' => $row->old_id 00164 ), __METHOD__, 00165 array( 'LIMIT' => 1 ) 00166 ); 00167 return true; 00168 } 00169 00179 private function compressWithConcat( $startId, $maxChunkSize, $beginDate, 00180 $endDate, $extdb = "", $maxPageId = false ) 00181 { 00182 $loadStyle = self::LS_CHUNKED; 00183 00184 $dbr = wfGetDB( DB_SLAVE ); 00185 $dbw = wfGetDB( DB_MASTER ); 00186 00187 # Set up external storage 00188 if ( $extdb != '' ) { 00189 $storeObj = new ExternalStoreDB; 00190 } 00191 00192 # Get all articles by page_id 00193 if ( !$maxPageId ) { 00194 $maxPageId = $dbr->selectField( 'page', 'max(page_id)', '', __METHOD__ ); 00195 } 00196 $this->output( "Starting from $startId of $maxPageId\n" ); 00197 $pageConds = array(); 00198 00199 /* 00200 if ( $exclude_ns0 ) { 00201 print "Excluding main namespace\n"; 00202 $pageConds[] = 'page_namespace<>0'; 00203 } 00204 if ( $queryExtra ) { 00205 $pageConds[] = $queryExtra; 00206 } 00207 */ 00208 00209 # For each article, get a list of revisions which fit the criteria 00210 00211 # No recompression, use a condition on old_flags 00212 # Don't compress object type entities, because that might produce data loss when 00213 # overwriting bulk storage concat rows. Don't compress external references, because 00214 # the script doesn't yet delete rows from external storage. 00215 $conds = array( 00216 'old_flags NOT ' . $dbr->buildLike( $dbr->anyString(), 'object', $dbr->anyString() ) . ' AND old_flags NOT ' 00217 . $dbr->buildLike( $dbr->anyString(), 'external', $dbr->anyString() ) ); 00218 00219 if ( $beginDate ) { 00220 if ( !preg_match( '/^\d{14}$/', $beginDate ) ) { 00221 $this->error( "Invalid begin date \"$beginDate\"\n" ); 00222 return false; 00223 } 00224 $conds[] = "rev_timestamp>'" . $beginDate . "'"; 00225 } 00226 if ( $endDate ) { 00227 if ( !preg_match( '/^\d{14}$/', $endDate ) ) { 00228 $this->error( "Invalid end date \"$endDate\"\n" ); 00229 return false; 00230 } 00231 $conds[] = "rev_timestamp<'" . $endDate . "'"; 00232 } 00233 if ( $loadStyle == self::LS_CHUNKED ) { 00234 $tables = array( 'revision', 'text' ); 00235 $fields = array( 'rev_id', 'rev_text_id', 'old_flags', 'old_text' ); 00236 $conds[] = 'rev_text_id=old_id'; 00237 $revLoadOptions = 'FOR UPDATE'; 00238 } else { 00239 $tables = array( 'revision' ); 00240 $fields = array( 'rev_id', 'rev_text_id' ); 00241 $revLoadOptions = array(); 00242 } 00243 00244 # Don't work with current revisions 00245 # Don't lock the page table for update either -- TS 2006-04-04 00246 #$tables[] = 'page'; 00247 #$conds[] = 'page_id=rev_page AND rev_id != page_latest'; 00248 00249 for ( $pageId = $startId; $pageId <= $maxPageId; $pageId++ ) { 00250 wfWaitForSlaves(); 00251 00252 # Wake up 00253 $dbr->ping(); 00254 00255 # Get the page row 00256 $pageRes = $dbr->select( 'page', 00257 array( 'page_id', 'page_namespace', 'page_title', 'page_latest' ), 00258 $pageConds + array( 'page_id' => $pageId ), __METHOD__ ); 00259 if ( $pageRes->numRows() == 0 ) { 00260 continue; 00261 } 00262 $pageRow = $dbr->fetchObject( $pageRes ); 00263 00264 # Display progress 00265 $titleObj = Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title ); 00266 $this->output( "$pageId\t" . $titleObj->getPrefixedDBkey() . " " ); 00267 00268 # Load revisions 00269 $revRes = $dbw->select( $tables, $fields, 00270 array_merge( array( 00271 'rev_page' => $pageRow->page_id, 00272 # Don't operate on the current revision 00273 # Use < instead of <> in case the current revision has changed 00274 # since the page select, which wasn't locking 00275 'rev_id < ' . $pageRow->page_latest 00276 ), $conds ), 00277 __METHOD__, 00278 $revLoadOptions 00279 ); 00280 $revs = array(); 00281 foreach ( $revRes as $revRow ) { 00282 $revs[] = $revRow; 00283 } 00284 00285 if ( count( $revs ) < 2 ) { 00286 # No revisions matching, no further processing 00287 $this->output( "\n" ); 00288 continue; 00289 } 00290 00291 # For each chunk 00292 $i = 0; 00293 while ( $i < count( $revs ) ) { 00294 if ( $i < count( $revs ) - $maxChunkSize ) { 00295 $thisChunkSize = $maxChunkSize; 00296 } else { 00297 $thisChunkSize = count( $revs ) - $i; 00298 } 00299 00300 $chunk = new ConcatenatedGzipHistoryBlob(); 00301 $stubs = array(); 00302 $dbw->begin( __METHOD__ ); 00303 $usedChunk = false; 00304 $primaryOldid = $revs[$i]->rev_text_id; 00305 00306 # Get the text of each revision and add it to the object 00307 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++ ) { 00308 $oldid = $revs[$i + $j]->rev_text_id; 00309 00310 # Get text 00311 if ( $loadStyle == self::LS_INDIVIDUAL ) { 00312 $textRow = $dbw->selectRow( 'text', 00313 array( 'old_flags', 'old_text' ), 00314 array( 'old_id' => $oldid ), 00315 __METHOD__, 00316 'FOR UPDATE' 00317 ); 00318 $text = Revision::getRevisionText( $textRow ); 00319 } else { 00320 $text = Revision::getRevisionText( $revs[$i + $j] ); 00321 } 00322 00323 if ( $text === false ) { 00324 $this->error( "\nError, unable to get text in old_id $oldid" ); 00325 #$dbw->delete( 'old', array( 'old_id' => $oldid ) ); 00326 } 00327 00328 if ( $extdb == "" && $j == 0 ) { 00329 $chunk->setText( $text ); 00330 $this->output( '.' ); 00331 } else { 00332 # Don't make a stub if it's going to be longer than the article 00333 # Stubs are typically about 100 bytes 00334 if ( strlen( $text ) < 120 ) { 00335 $stub = false; 00336 $this->output( 'x' ); 00337 } else { 00338 $stub = new HistoryBlobStub( $chunk->addItem( $text ) ); 00339 $stub->setLocation( $primaryOldid ); 00340 $stub->setReferrer( $oldid ); 00341 $this->output( '.' ); 00342 $usedChunk = true; 00343 } 00344 $stubs[$j] = $stub; 00345 } 00346 } 00347 $thisChunkSize = $j; 00348 00349 # If we couldn't actually use any stubs because the pages were too small, do nothing 00350 if ( $usedChunk ) { 00351 if ( $extdb != "" ) { 00352 # Move blob objects to External Storage 00353 $stored = $storeObj->store( $extdb, serialize( $chunk )); 00354 if ( $stored === false ) { 00355 $this->error( "Unable to store object" ); 00356 return false; 00357 } 00358 # Store External Storage URLs instead of Stub placeholders 00359 foreach ( $stubs as $stub ) { 00360 if ( $stub === false ) { 00361 continue; 00362 } 00363 # $stored should provide base path to a BLOB 00364 $url = $stored . "/" . $stub->getHash(); 00365 $dbw->update( 'text', 00366 array( /* SET */ 00367 'old_text' => $url, 00368 'old_flags' => 'external,utf-8', 00369 ), array( /* WHERE */ 00370 'old_id' => $stub->getReferrer(), 00371 ) 00372 ); 00373 } 00374 } else { 00375 # Store the main object locally 00376 $dbw->update( 'text', 00377 array( /* SET */ 00378 'old_text' => serialize( $chunk ), 00379 'old_flags' => 'object,utf-8', 00380 ), array( /* WHERE */ 00381 'old_id' => $primaryOldid 00382 ) 00383 ); 00384 00385 # Store the stub objects 00386 for ( $j = 1; $j < $thisChunkSize; $j++ ) { 00387 # Skip if not compressing and don't overwrite the first revision 00388 if ( $stubs[$j] !== false && $revs[$i + $j]->rev_text_id != $primaryOldid ) { 00389 $dbw->update( 'text', 00390 array( /* SET */ 00391 'old_text' => serialize( $stubs[$j] ), 00392 'old_flags' => 'object,utf-8', 00393 ), array( /* WHERE */ 00394 'old_id' => $revs[$i + $j]->rev_text_id 00395 ) 00396 ); 00397 } 00398 } 00399 } 00400 } 00401 # Done, next 00402 $this->output( "/" ); 00403 $dbw->commit( __METHOD__ ); 00404 $i += $thisChunkSize; 00405 wfWaitForSlaves(); 00406 } 00407 $this->output( "\n" ); 00408 } 00409 return true; 00410 } 00411 00412 } 00413 00414 $maintClass = 'CompressOld'; 00415 require_once RUN_MAINTENANCE_IF_MAIN;