MediaWiki  REL1_24
FileOpBatch.php
Go to the documentation of this file.
00001 <?php
00034 class FileOpBatch {
00035     /* Timeout related parameters */
00036     const MAX_BATCH_SIZE = 1000; // integer
00037 
00057     public static function attempt( array $performOps, array $opts, FileJournal $journal ) {
00058         $section = new ProfileSection( __METHOD__ );
00059         $status = Status::newGood();
00060 
00061         $n = count( $performOps );
00062         if ( $n > self::MAX_BATCH_SIZE ) {
00063             $status->fatal( 'backend-fail-batchsize', $n, self::MAX_BATCH_SIZE );
00064 
00065             return $status;
00066         }
00067 
00068         $batchId = $journal->getTimestampedUUID();
00069         $ignoreErrors = !empty( $opts['force'] );
00070         $journaled = empty( $opts['nonJournaled'] );
00071         $maxConcurrency = isset( $opts['concurrency'] ) ? $opts['concurrency'] : 1;
00072 
00073         $entries = array(); // file journal entry list
00074         $predicates = FileOp::newPredicates(); // account for previous ops in prechecks
00075         $curBatch = array(); // concurrent FileOp sub-batch accumulation
00076         $curBatchDeps = FileOp::newDependencies(); // paths used in FileOp sub-batch
00077         $pPerformOps = array(); // ordered list of concurrent FileOp sub-batches
00078         $lastBackend = null; // last op backend name
00079         // Do pre-checks for each operation; abort on failure...
00080         foreach ( $performOps as $index => $fileOp ) {
00081             $backendName = $fileOp->getBackend()->getName();
00082             $fileOp->setBatchId( $batchId ); // transaction ID
00083             // Decide if this op can be done concurrently within this sub-batch
00084             // or if a new concurrent sub-batch must be started after this one...
00085             if ( $fileOp->dependsOn( $curBatchDeps )
00086                 || count( $curBatch ) >= $maxConcurrency
00087                 || ( $backendName !== $lastBackend && count( $curBatch ) )
00088             ) {
00089                 $pPerformOps[] = $curBatch; // push this batch
00090                 $curBatch = array(); // start a new sub-batch
00091                 $curBatchDeps = FileOp::newDependencies();
00092             }
00093             $lastBackend = $backendName;
00094             $curBatch[$index] = $fileOp; // keep index
00095             // Update list of affected paths in this batch
00096             $curBatchDeps = $fileOp->applyDependencies( $curBatchDeps );
00097             // Simulate performing the operation...
00098             $oldPredicates = $predicates;
00099             $subStatus = $fileOp->precheck( $predicates ); // updates $predicates
00100             $status->merge( $subStatus );
00101             if ( $subStatus->isOK() ) {
00102                 if ( $journaled ) { // journal log entries
00103                     $entries = array_merge( $entries,
00104                         $fileOp->getJournalEntries( $oldPredicates, $predicates ) );
00105                 }
00106             } else { // operation failed?
00107                 $status->success[$index] = false;
00108                 ++$status->failCount;
00109                 if ( !$ignoreErrors ) {
00110                     return $status; // abort
00111                 }
00112             }
00113         }
00114         // Push the last sub-batch
00115         if ( count( $curBatch ) ) {
00116             $pPerformOps[] = $curBatch;
00117         }
00118 
00119         // Log the operations in the file journal...
00120         if ( count( $entries ) ) {
00121             $subStatus = $journal->logChangeBatch( $entries, $batchId );
00122             if ( !$subStatus->isOK() ) {
00123                 return $subStatus; // abort
00124             }
00125         }
00126 
00127         if ( $ignoreErrors ) { // treat precheck() fatals as mere warnings
00128             $status->setResult( true, $status->value );
00129         }
00130 
00131         // Attempt each operation (in parallel if allowed and possible)...
00132         self::runParallelBatches( $pPerformOps, $status );
00133 
00134         return $status;
00135     }
00136 
00148     protected static function runParallelBatches( array $pPerformOps, Status $status ) {
00149         $aborted = false; // set to true on unexpected errors
00150         foreach ( $pPerformOps as $performOpsBatch ) {
00151             if ( $aborted ) { // check batch op abort flag...
00152                 // We can't continue (even with $ignoreErrors) as $predicates is wrong.
00153                 // Log the remaining ops as failed for recovery...
00154                 foreach ( $performOpsBatch as $i => $fileOp ) {
00155                     $status->success[$i] = false;
00156                     ++$status->failCount;
00157                     $performOpsBatch[$i]->logFailure( 'attempt_aborted' );
00158                 }
00159                 continue;
00160             }
00161             $statuses = array();
00162             $opHandles = array();
00163             // Get the backend; all sub-batch ops belong to a single backend
00164             $backend = reset( $performOpsBatch )->getBackend();
00165             // Get the operation handles or actually do it if there is just one.
00166             // If attemptAsync() returns a Status, it was either due to an error
00167             // or the backend does not support async ops and did it synchronously.
00168             foreach ( $performOpsBatch as $i => $fileOp ) {
00169                 if ( !isset( $status->success[$i] ) ) { // didn't already fail in precheck()
00170                     // Parallel ops may be disabled in config due to missing dependencies,
00171                     // (e.g. needing popen()). When they are, $performOpsBatch has size 1.
00172                     $subStatus = ( count( $performOpsBatch ) > 1 )
00173                         ? $fileOp->attemptAsync()
00174                         : $fileOp->attempt();
00175                     if ( $subStatus->value instanceof FileBackendStoreOpHandle ) {
00176                         $opHandles[$i] = $subStatus->value; // deferred
00177                     } else {
00178                         $statuses[$i] = $subStatus; // done already
00179                     }
00180                 }
00181             }
00182             // Try to do all the operations concurrently...
00183             $statuses = $statuses + $backend->executeOpHandlesInternal( $opHandles );
00184             // Marshall and merge all the responses (blocking)...
00185             foreach ( $performOpsBatch as $i => $fileOp ) {
00186                 if ( !isset( $status->success[$i] ) ) { // didn't already fail in precheck()
00187                     $subStatus = $statuses[$i];
00188                     $status->merge( $subStatus );
00189                     if ( $subStatus->isOK() ) {
00190                         $status->success[$i] = true;
00191                         ++$status->successCount;
00192                     } else {
00193                         $status->success[$i] = false;
00194                         ++$status->failCount;
00195                         $aborted = true; // set abort flag; we can't continue
00196                     }
00197                 }
00198             }
00199         }
00200     }
00201 }