MediaWiki
REL1_20
|
00001 <?php 00044 require_once( __DIR__ . '/../Maintenance.php' ); 00045 00046 class CompressOld extends Maintenance { 00050 const LS_INDIVIDUAL = 0; 00051 const LS_CHUNKED = 1; 00052 00053 public function __construct() { 00054 parent::__construct(); 00055 $this->mDescription = 'Compress the text of a wiki'; 00056 $this->addOption( 'type', 'Set compression type to either: gzip|concat', false, true, 't' ); 00057 $this->addOption( 'chunksize', 'Maximum number of revisions in a concat chunk', false, true, 'c' ); 00058 $this->addOption( 'begin-date', 'Earliest date to check for uncompressed revisions', false, true, 'b' ); 00059 $this->addOption( 'end-date', 'Latest revision date to compress', false, true, 'e' ); 00060 $this->addOption( 'startid', 'The id to start from (gzip -> text table, concat -> page table)', false, true, 's' ); 00061 $this->addOption( 'extdb', 'Store specified revisions in an external cluster (untested)', false, true ); 00062 $this->addOption( 'endid', 'The page_id to stop at (only when using concat compression type)', false, true, 'n' ); 00063 } 00064 00065 public function execute() { 00066 global $wgDBname; 00067 if ( !function_exists( "gzdeflate" ) ) { 00068 $this->error( "You must enable zlib support in PHP to compress old revisions!\n" . 00069 "Please see http://www.php.net/manual/en/ref.zlib.php\n", true ); 00070 } 00071 00072 $type = $this->getOption( 'type', 'concat' ); 00073 $chunkSize = $this->getOption( 'chunksize', 20 ); 00074 $startId = $this->getOption( 'startid', 0 ); 00075 $beginDate = $this->getOption( 'begin-date', '' ); 00076 $endDate = $this->getOption( 'end-date', '' ); 00077 $extDB = $this->getOption( 'extdb', '' ); 00078 $endId = $this->getOption( 'endid', false ); 00079 00080 if ( $type != 'concat' && $type != 'gzip' ) { 00081 $this->error( "Type \"{$type}\" not supported" ); 00082 } 00083 00084 if ( $extDB != '' ) { 00085 $this->output( "Compressing database {$wgDBname} to external cluster {$extDB}\n" 00086 . str_repeat( '-', 76 ) . "\n\n" ); 00087 } else { 00088 $this->output( "Compressing database {$wgDBname}\n" 00089 . str_repeat( '-', 76 ) . "\n\n" ); 00090 } 00091 00092 $success = true; 00093 if ( $type == 'concat' ) { 00094 $success = $this->compressWithConcat( $startId, $chunkSize, $beginDate, 00095 $endDate, $extDB, $endId ); 00096 } else { 00097 $this->compressOldPages( $startId, $extDB ); 00098 } 00099 00100 if ( $success ) { 00101 $this->output( "Done.\n" ); 00102 } 00103 } 00104 00106 private function compressOldPages( $start = 0, $extdb = '' ) { 00107 $chunksize = 50; 00108 $this->output( "Starting from old_id $start...\n" ); 00109 $dbw = wfGetDB( DB_MASTER ); 00110 do { 00111 $res = $dbw->select( 'text', array( 'old_id','old_flags','old_text' ), 00112 "old_id>=$start", __METHOD__, array( 'ORDER BY' => 'old_id', 'LIMIT' => $chunksize, 'FOR UPDATE' ) ); 00113 if( $dbw->numRows( $res ) == 0 ) { 00114 break; 00115 } 00116 $last = $start; 00117 foreach ( $res as $row ) { 00118 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n"; 00119 $this->compressPage( $row, $extdb ); 00120 $last = $row->old_id; 00121 } 00122 $start = $last + 1; # Deletion may leave long empty stretches 00123 $this->output( "$start...\n" ); 00124 } while( true ); 00125 } 00126 00133 private function compressPage( $row, $extdb ) { 00134 if ( false !== strpos( $row->old_flags, 'gzip' ) || false !== strpos( $row->old_flags, 'object' ) ) { 00135 #print "Already compressed row {$row->old_id}\n"; 00136 return false; 00137 } 00138 $dbw = wfGetDB( DB_MASTER ); 00139 $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip"; 00140 $compress = gzdeflate( $row->old_text ); 00141 00142 # Store in external storage if required 00143 if ( $extdb !== '' ) { 00144 $storeObj = new ExternalStoreDB; 00145 $compress = $storeObj->store( $extdb, $compress ); 00146 if ( $compress === false ) { 00147 $this->error( "Unable to store object" ); 00148 return false; 00149 } 00150 } 00151 00152 # Update text row 00153 $dbw->update( 'text', 00154 array( /* SET */ 00155 'old_flags' => $flags, 00156 'old_text' => $compress 00157 ), array( /* WHERE */ 00158 'old_id' => $row->old_id 00159 ), __METHOD__, 00160 array( 'LIMIT' => 1 ) 00161 ); 00162 return true; 00163 } 00164 00174 private function compressWithConcat( $startId, $maxChunkSize, $beginDate, 00175 $endDate, $extdb = "", $maxPageId = false ) 00176 { 00177 $loadStyle = self::LS_CHUNKED; 00178 00179 $dbr = wfGetDB( DB_SLAVE ); 00180 $dbw = wfGetDB( DB_MASTER ); 00181 00182 # Set up external storage 00183 if ( $extdb != '' ) { 00184 $storeObj = new ExternalStoreDB; 00185 } 00186 00187 # Get all articles by page_id 00188 if ( !$maxPageId ) { 00189 $maxPageId = $dbr->selectField( 'page', 'max(page_id)', '', __METHOD__ ); 00190 } 00191 $this->output( "Starting from $startId of $maxPageId\n" ); 00192 $pageConds = array(); 00193 00194 /* 00195 if ( $exclude_ns0 ) { 00196 print "Excluding main namespace\n"; 00197 $pageConds[] = 'page_namespace<>0'; 00198 } 00199 if ( $queryExtra ) { 00200 $pageConds[] = $queryExtra; 00201 } 00202 */ 00203 00204 # For each article, get a list of revisions which fit the criteria 00205 00206 # No recompression, use a condition on old_flags 00207 # Don't compress object type entities, because that might produce data loss when 00208 # overwriting bulk storage concat rows. Don't compress external references, because 00209 # the script doesn't yet delete rows from external storage. 00210 $conds = array( 00211 'old_flags NOT ' . $dbr->buildLike( $dbr->anyString(), 'object', $dbr->anyString() ) . ' AND old_flags NOT ' 00212 . $dbr->buildLike( $dbr->anyString(), 'external', $dbr->anyString() ) ); 00213 00214 if ( $beginDate ) { 00215 if ( !preg_match( '/^\d{14}$/', $beginDate ) ) { 00216 $this->error( "Invalid begin date \"$beginDate\"\n" ); 00217 return false; 00218 } 00219 $conds[] = "rev_timestamp>'" . $beginDate . "'"; 00220 } 00221 if ( $endDate ) { 00222 if ( !preg_match( '/^\d{14}$/', $endDate ) ) { 00223 $this->error( "Invalid end date \"$endDate\"\n" ); 00224 return false; 00225 } 00226 $conds[] = "rev_timestamp<'" . $endDate . "'"; 00227 } 00228 if ( $loadStyle == self::LS_CHUNKED ) { 00229 $tables = array( 'revision', 'text' ); 00230 $fields = array( 'rev_id', 'rev_text_id', 'old_flags', 'old_text' ); 00231 $conds[] = 'rev_text_id=old_id'; 00232 $revLoadOptions = 'FOR UPDATE'; 00233 } else { 00234 $tables = array( 'revision' ); 00235 $fields = array( 'rev_id', 'rev_text_id' ); 00236 $revLoadOptions = array(); 00237 } 00238 00239 # Don't work with current revisions 00240 # Don't lock the page table for update either -- TS 2006-04-04 00241 #$tables[] = 'page'; 00242 #$conds[] = 'page_id=rev_page AND rev_id != page_latest'; 00243 00244 for ( $pageId = $startId; $pageId <= $maxPageId; $pageId++ ) { 00245 wfWaitForSlaves(); 00246 00247 # Wake up 00248 $dbr->ping(); 00249 00250 # Get the page row 00251 $pageRes = $dbr->select( 'page', 00252 array('page_id', 'page_namespace', 'page_title','page_latest'), 00253 $pageConds + array('page_id' => $pageId), __METHOD__ ); 00254 if ( $dbr->numRows( $pageRes ) == 0 ) { 00255 continue; 00256 } 00257 $pageRow = $dbr->fetchObject( $pageRes ); 00258 00259 # Display progress 00260 $titleObj = Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title ); 00261 $this->output( "$pageId\t" . $titleObj->getPrefixedDBkey() . " " ); 00262 00263 # Load revisions 00264 $revRes = $dbw->select( $tables, $fields, 00265 array_merge( array( 00266 'rev_page' => $pageRow->page_id, 00267 # Don't operate on the current revision 00268 # Use < instead of <> in case the current revision has changed 00269 # since the page select, which wasn't locking 00270 'rev_id < ' . $pageRow->page_latest 00271 ), $conds ), 00272 __METHOD__, 00273 $revLoadOptions 00274 ); 00275 $revs = array(); 00276 foreach ( $revRes as $revRow ) { 00277 $revs[] = $revRow; 00278 } 00279 00280 if ( count( $revs ) < 2) { 00281 # No revisions matching, no further processing 00282 $this->output( "\n" ); 00283 continue; 00284 } 00285 00286 # For each chunk 00287 $i = 0; 00288 while ( $i < count( $revs ) ) { 00289 if ( $i < count( $revs ) - $maxChunkSize ) { 00290 $thisChunkSize = $maxChunkSize; 00291 } else { 00292 $thisChunkSize = count( $revs ) - $i; 00293 } 00294 00295 $chunk = new ConcatenatedGzipHistoryBlob(); 00296 $stubs = array(); 00297 $dbw->begin( __METHOD__ ); 00298 $usedChunk = false; 00299 $primaryOldid = $revs[$i]->rev_text_id; 00300 00301 # Get the text of each revision and add it to the object 00302 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++ ) { 00303 $oldid = $revs[$i + $j]->rev_text_id; 00304 00305 # Get text 00306 if ( $loadStyle == self::LS_INDIVIDUAL ) { 00307 $textRow = $dbw->selectRow( 'text', 00308 array( 'old_flags', 'old_text' ), 00309 array( 'old_id' => $oldid ), 00310 __METHOD__, 00311 'FOR UPDATE' 00312 ); 00313 $text = Revision::getRevisionText( $textRow ); 00314 } else { 00315 $text = Revision::getRevisionText( $revs[$i + $j] ); 00316 } 00317 00318 if ( $text === false ) { 00319 $this->error( "\nError, unable to get text in old_id $oldid" ); 00320 #$dbw->delete( 'old', array( 'old_id' => $oldid ) ); 00321 } 00322 00323 if ( $extdb == "" && $j == 0 ) { 00324 $chunk->setText( $text ); 00325 $this->output( '.' ); 00326 } else { 00327 # Don't make a stub if it's going to be longer than the article 00328 # Stubs are typically about 100 bytes 00329 if ( strlen( $text ) < 120 ) { 00330 $stub = false; 00331 $this->output( 'x' ); 00332 } else { 00333 $stub = new HistoryBlobStub( $chunk->addItem( $text ) ); 00334 $stub->setLocation( $primaryOldid ); 00335 $stub->setReferrer( $oldid ); 00336 $this->output( '.' ); 00337 $usedChunk = true; 00338 } 00339 $stubs[$j] = $stub; 00340 } 00341 } 00342 $thisChunkSize = $j; 00343 00344 # If we couldn't actually use any stubs because the pages were too small, do nothing 00345 if ( $usedChunk ) { 00346 if ( $extdb != "" ) { 00347 # Move blob objects to External Storage 00348 $stored = $storeObj->store( $extdb, serialize( $chunk )); 00349 if ($stored === false) { 00350 $this->error( "Unable to store object" ); 00351 return false; 00352 } 00353 # Store External Storage URLs instead of Stub placeholders 00354 foreach ($stubs as $stub) { 00355 if ($stub===false) 00356 continue; 00357 # $stored should provide base path to a BLOB 00358 $url = $stored."/".$stub->getHash(); 00359 $dbw->update( 'text', 00360 array( /* SET */ 00361 'old_text' => $url, 00362 'old_flags' => 'external,utf-8', 00363 ), array ( /* WHERE */ 00364 'old_id' => $stub->getReferrer(), 00365 ) 00366 ); 00367 } 00368 } else { 00369 # Store the main object locally 00370 $dbw->update( 'text', 00371 array( /* SET */ 00372 'old_text' => serialize( $chunk ), 00373 'old_flags' => 'object,utf-8', 00374 ), array( /* WHERE */ 00375 'old_id' => $primaryOldid 00376 ) 00377 ); 00378 00379 # Store the stub objects 00380 for ( $j = 1; $j < $thisChunkSize; $j++ ) { 00381 # Skip if not compressing and don't overwrite the first revision 00382 if ( $stubs[$j] !== false && $revs[$i + $j]->rev_text_id != $primaryOldid ) { 00383 $dbw->update( 'text', 00384 array( /* SET */ 00385 'old_text' => serialize($stubs[$j]), 00386 'old_flags' => 'object,utf-8', 00387 ), array( /* WHERE */ 00388 'old_id' => $revs[$i + $j]->rev_text_id 00389 ) 00390 ); 00391 } 00392 } 00393 } 00394 } 00395 # Done, next 00396 $this->output( "/" ); 00397 $dbw->commit( __METHOD__ ); 00398 $i += $thisChunkSize; 00399 wfWaitForSlaves(); 00400 } 00401 $this->output( "\n" ); 00402 } 00403 return true; 00404 } 00405 00406 } 00407 00408 $maintClass = 'CompressOld'; 00409 require_once( RUN_MAINTENANCE_IF_MAIN );