MediaWiki
REL1_19
|
00001 <?php 00043 require_once( dirname( __FILE__ ) . '/../Maintenance.php' ); 00044 00045 class CompressOld extends Maintenance { 00049 const LS_INDIVIDUAL = 0; 00050 const LS_CHUNKED = 1; 00051 00052 public function __construct() { 00053 parent::__construct(); 00054 $this->mDescription = 'Compress the text of a wiki'; 00055 $this->addOption( 'type', 'Set compression type to either: gzip|concat', false, true, 't' ); 00056 $this->addOption( 'chunksize', 'Maximum number of revisions in a concat chunk', false, true, 'c' ); 00057 $this->addOption( 'begin-date', 'Earliest date to check for uncompressed revisions', false, true, 'b' ); 00058 $this->addOption( 'end-date', 'Latest revision date to compress', false, true, 'e' ); 00059 $this->addOption( 'startid', 'The old_id to start from', false, true, 's' ); 00060 $this->addOption( 'extdb', 'Store specified revisions in an external cluster (untested)', false, true ); 00061 $this->addOption( 'endid', 'Stop at this old_id', false, true, 'n' ); 00062 } 00063 00064 public function execute() { 00065 global $wgDBname; 00066 if ( !function_exists( "gzdeflate" ) ) { 00067 $this->error( "You must enable zlib support in PHP to compress old revisions!\n" . 00068 "Please see http://www.php.net/manual/en/ref.zlib.php\n", true ); 00069 } 00070 00071 $type = $this->getOption( 'type', 'concat' ); 00072 $chunkSize = $this->getOption( 'chunksize', 20 ); 00073 $startId = $this->getOption( 'startid', 0 ); 00074 $beginDate = $this->getOption( 'begin-date', '' ); 00075 $endDate = $this->getOption( 'end-date', '' ); 00076 $extDB = $this->getOption( 'extdb', '' ); 00077 $endId = $this->getOption( 'endid', false ); 00078 00079 if ( $type != 'concat' && $type != 'gzip' ) { 00080 $this->error( "Type \"{$type}\" not supported" ); 00081 } 00082 00083 if ( $extDB != '' ) { 00084 $this->output( "Compressing database {$wgDBname} to external cluster {$extDB}\n" 00085 . str_repeat( '-', 76 ) . "\n\n" ); 00086 } else { 00087 $this->output( "Compressing database {$wgDBname}\n" 00088 . str_repeat( '-', 76 ) . "\n\n" ); 00089 } 00090 00091 $success = true; 00092 if ( $type == 'concat' ) { 00093 $success = $this->compressWithConcat( $startId, $chunkSize, $beginDate, 00094 $endDate, $extDB, $endId ); 00095 } else { 00096 $this->compressOldPages( $startId, $extDB ); 00097 } 00098 00099 if ( $success ) { 00100 $this->output( "Done.\n" ); 00101 } 00102 } 00103 00105 private function compressOldPages( $start = 0, $extdb = '' ) { 00106 $chunksize = 50; 00107 $this->output( "Starting from old_id $start...\n" ); 00108 $dbw = wfGetDB( DB_MASTER ); 00109 do { 00110 $res = $dbw->select( 'text', array( 'old_id','old_flags','old_text' ), 00111 "old_id>=$start", __METHOD__, array( 'ORDER BY' => 'old_id', 'LIMIT' => $chunksize, 'FOR UPDATE' ) ); 00112 if( $dbw->numRows( $res ) == 0 ) { 00113 break; 00114 } 00115 $last = $start; 00116 foreach ( $res as $row ) { 00117 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n"; 00118 $this->compressPage( $row, $extdb ); 00119 $last = $row->old_id; 00120 } 00121 $start = $last + 1; # Deletion may leave long empty stretches 00122 $this->output( "$start...\n" ); 00123 } while( true ); 00124 } 00125 00132 private function compressPage( $row, $extdb ) { 00133 if ( false !== strpos( $row->old_flags, 'gzip' ) || false !== strpos( $row->old_flags, 'object' ) ) { 00134 #print "Already compressed row {$row->old_id}\n"; 00135 return false; 00136 } 00137 $dbw = wfGetDB( DB_MASTER ); 00138 $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip"; 00139 $compress = gzdeflate( $row->old_text ); 00140 00141 # Store in external storage if required 00142 if ( $extdb !== '' ) { 00143 $storeObj = new ExternalStoreDB; 00144 $compress = $storeObj->store( $extdb, $compress ); 00145 if ( $compress === false ) { 00146 $this->error( "Unable to store object" ); 00147 return false; 00148 } 00149 } 00150 00151 # Update text row 00152 $dbw->update( 'text', 00153 array( /* SET */ 00154 'old_flags' => $flags, 00155 'old_text' => $compress 00156 ), array( /* WHERE */ 00157 'old_id' => $row->old_id 00158 ), __METHOD__, 00159 array( 'LIMIT' => 1 ) 00160 ); 00161 return true; 00162 } 00163 00173 private function compressWithConcat( $startId, $maxChunkSize, $beginDate, 00174 $endDate, $extdb = "", $maxPageId = false ) 00175 { 00176 $loadStyle = self::LS_CHUNKED; 00177 00178 $dbr = wfGetDB( DB_SLAVE ); 00179 $dbw = wfGetDB( DB_MASTER ); 00180 00181 # Set up external storage 00182 if ( $extdb != '' ) { 00183 $storeObj = new ExternalStoreDB; 00184 } 00185 00186 # Get all articles by page_id 00187 if ( !$maxPageId ) { 00188 $maxPageId = $dbr->selectField( 'page', 'max(page_id)', '', __METHOD__ ); 00189 } 00190 $this->output( "Starting from $startId of $maxPageId\n" ); 00191 $pageConds = array(); 00192 00193 /* 00194 if ( $exclude_ns0 ) { 00195 print "Excluding main namespace\n"; 00196 $pageConds[] = 'page_namespace<>0'; 00197 } 00198 if ( $queryExtra ) { 00199 $pageConds[] = $queryExtra; 00200 } 00201 */ 00202 00203 # For each article, get a list of revisions which fit the criteria 00204 00205 # No recompression, use a condition on old_flags 00206 # Don't compress object type entities, because that might produce data loss when 00207 # overwriting bulk storage concat rows. Don't compress external references, because 00208 # the script doesn't yet delete rows from external storage. 00209 $conds = array( 00210 'old_flags NOT ' . $dbr->buildLike( $dbr->anyString(), 'object', $dbr->anyString() ) . ' AND old_flags NOT ' 00211 . $dbr->buildLike( $dbr->anyString(), 'external', $dbr->anyString() ) ); 00212 00213 if ( $beginDate ) { 00214 if ( !preg_match( '/^\d{14}$/', $beginDate ) ) { 00215 $this->error( "Invalid begin date \"$beginDate\"\n" ); 00216 return false; 00217 } 00218 $conds[] = "rev_timestamp>'" . $beginDate . "'"; 00219 } 00220 if ( $endDate ) { 00221 if ( !preg_match( '/^\d{14}$/', $endDate ) ) { 00222 $this->error( "Invalid end date \"$endDate\"\n" ); 00223 return false; 00224 } 00225 $conds[] = "rev_timestamp<'" . $endDate . "'"; 00226 } 00227 if ( $loadStyle == self::LS_CHUNKED ) { 00228 $tables = array( 'revision', 'text' ); 00229 $fields = array( 'rev_id', 'rev_text_id', 'old_flags', 'old_text' ); 00230 $conds[] = 'rev_text_id=old_id'; 00231 $revLoadOptions = 'FOR UPDATE'; 00232 } else { 00233 $tables = array( 'revision' ); 00234 $fields = array( 'rev_id', 'rev_text_id' ); 00235 $revLoadOptions = array(); 00236 } 00237 00238 # Don't work with current revisions 00239 # Don't lock the page table for update either -- TS 2006-04-04 00240 #$tables[] = 'page'; 00241 #$conds[] = 'page_id=rev_page AND rev_id != page_latest'; 00242 00243 for ( $pageId = $startId; $pageId <= $maxPageId; $pageId++ ) { 00244 wfWaitForSlaves(); 00245 00246 # Wake up 00247 $dbr->ping(); 00248 00249 # Get the page row 00250 $pageRes = $dbr->select( 'page', 00251 array('page_id', 'page_namespace', 'page_title','page_latest'), 00252 $pageConds + array('page_id' => $pageId), __METHOD__ ); 00253 if ( $dbr->numRows( $pageRes ) == 0 ) { 00254 continue; 00255 } 00256 $pageRow = $dbr->fetchObject( $pageRes ); 00257 00258 # Display progress 00259 $titleObj = Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title ); 00260 $this->output( "$pageId\t" . $titleObj->getPrefixedDBkey() . " " ); 00261 00262 # Load revisions 00263 $revRes = $dbw->select( $tables, $fields, 00264 array_merge( array( 00265 'rev_page' => $pageRow->page_id, 00266 # Don't operate on the current revision 00267 # Use < instead of <> in case the current revision has changed 00268 # since the page select, which wasn't locking 00269 'rev_id < ' . $pageRow->page_latest 00270 ), $conds ), 00271 __METHOD__, 00272 $revLoadOptions 00273 ); 00274 $revs = array(); 00275 foreach ( $revRes as $revRow ) { 00276 $revs[] = $revRow; 00277 } 00278 00279 if ( count( $revs ) < 2) { 00280 # No revisions matching, no further processing 00281 $this->output( "\n" ); 00282 continue; 00283 } 00284 00285 # For each chunk 00286 $i = 0; 00287 while ( $i < count( $revs ) ) { 00288 if ( $i < count( $revs ) - $maxChunkSize ) { 00289 $thisChunkSize = $maxChunkSize; 00290 } else { 00291 $thisChunkSize = count( $revs ) - $i; 00292 } 00293 00294 $chunk = new ConcatenatedGzipHistoryBlob(); 00295 $stubs = array(); 00296 $dbw->begin(); 00297 $usedChunk = false; 00298 $primaryOldid = $revs[$i]->rev_text_id; 00299 00300 # Get the text of each revision and add it to the object 00301 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++ ) { 00302 $oldid = $revs[$i + $j]->rev_text_id; 00303 00304 # Get text 00305 if ( $loadStyle == self::LS_INDIVIDUAL ) { 00306 $textRow = $dbw->selectRow( 'text', 00307 array( 'old_flags', 'old_text' ), 00308 array( 'old_id' => $oldid ), 00309 __METHOD__, 00310 'FOR UPDATE' 00311 ); 00312 $text = Revision::getRevisionText( $textRow ); 00313 } else { 00314 $text = Revision::getRevisionText( $revs[$i + $j] ); 00315 } 00316 00317 if ( $text === false ) { 00318 $this->error( "\nError, unable to get text in old_id $oldid" ); 00319 #$dbw->delete( 'old', array( 'old_id' => $oldid ) ); 00320 } 00321 00322 if ( $extdb == "" && $j == 0 ) { 00323 $chunk->setText( $text ); 00324 $this->output( '.' ); 00325 } else { 00326 # Don't make a stub if it's going to be longer than the article 00327 # Stubs are typically about 100 bytes 00328 if ( strlen( $text ) < 120 ) { 00329 $stub = false; 00330 $this->output( 'x' ); 00331 } else { 00332 $stub = new HistoryBlobStub( $chunk->addItem( $text ) ); 00333 $stub->setLocation( $primaryOldid ); 00334 $stub->setReferrer( $oldid ); 00335 $this->output( '.' ); 00336 $usedChunk = true; 00337 } 00338 $stubs[$j] = $stub; 00339 } 00340 } 00341 $thisChunkSize = $j; 00342 00343 # If we couldn't actually use any stubs because the pages were too small, do nothing 00344 if ( $usedChunk ) { 00345 if ( $extdb != "" ) { 00346 # Move blob objects to External Storage 00347 $stored = $storeObj->store( $extdb, serialize( $chunk )); 00348 if ($stored === false) { 00349 $this->error( "Unable to store object" ); 00350 return false; 00351 } 00352 # Store External Storage URLs instead of Stub placeholders 00353 foreach ($stubs as $stub) { 00354 if ($stub===false) 00355 continue; 00356 # $stored should provide base path to a BLOB 00357 $url = $stored."/".$stub->getHash(); 00358 $dbw->update( 'text', 00359 array( /* SET */ 00360 'old_text' => $url, 00361 'old_flags' => 'external,utf-8', 00362 ), array ( /* WHERE */ 00363 'old_id' => $stub->getReferrer(), 00364 ) 00365 ); 00366 } 00367 } else { 00368 # Store the main object locally 00369 $dbw->update( 'text', 00370 array( /* SET */ 00371 'old_text' => serialize( $chunk ), 00372 'old_flags' => 'object,utf-8', 00373 ), array( /* WHERE */ 00374 'old_id' => $primaryOldid 00375 ) 00376 ); 00377 00378 # Store the stub objects 00379 for ( $j = 1; $j < $thisChunkSize; $j++ ) { 00380 # Skip if not compressing and don't overwrite the first revision 00381 if ( $stubs[$j] !== false && $revs[$i + $j]->rev_text_id != $primaryOldid ) { 00382 $dbw->update( 'text', 00383 array( /* SET */ 00384 'old_text' => serialize($stubs[$j]), 00385 'old_flags' => 'object,utf-8', 00386 ), array( /* WHERE */ 00387 'old_id' => $revs[$i + $j]->rev_text_id 00388 ) 00389 ); 00390 } 00391 } 00392 } 00393 } 00394 # Done, next 00395 $this->output( "/" ); 00396 $dbw->commit(); 00397 $i += $thisChunkSize; 00398 wfWaitForSlaves(); 00399 } 00400 $this->output( "\n" ); 00401 } 00402 return true; 00403 } 00404 00405 } 00406 00407 $maintClass = 'CompressOld'; 00408 require_once( RUN_MAINTENANCE_IF_MAIN );