This is a simple example you can re-use for splitting up processing of data across processes for faster execution. Put all of the data into the $set and fill in function process with what you want to do on the data, and let ‘er loose! I’m personally using it for telnet scripts because the amount of time spent waiting for a single telnet session is horrible and I can run many sessions at once while I wait for the responses.
/** * Splits the given set into $count subsets that are of approximately equal size */ function array_split($set, $count) { $subset_size = ceil(count($set) / $count); return array_chunk($set, $subset_size); } /** * Forks into $process_count separate processes and executes the function * named in $job in each process to split up handling of the data in * $set across the processes. */ function fork_exec($set, $job, $process_count) { $subsets = array_split($set, $process_count); $children = array(); // launch all of the children and store process list foreach ( $subsets as $a_set ) { $pid = pcntl_fork(); if ( $pid == -1 ) die("Error forking"); else if ( $pid == 0 ) { call_user_func($job, $a_set); exit(0); } else $children[] = $pid; } // wait for each process to end while ( count($children) > 0 ) { $pid = array_shift($children); pcntl_waitpid($pid, $status); } } // example set to work on $set = array('a','b','c','d','e','f','g','h','i','j'); // Process the job with 3 threads and time it $time = microtime(true); fork_exec($set, 'process', 3); $diff = microtime(true) - $time; echo $diff . ' seconds for full run'."\n"; // This is the job to run on the set. Make sure it is multi-process safe! function process($set) { foreach ( $set as $item ) { echo "Process [" . posix_getpid() . "] executing '" . $item . "'\n"; sleep(1); } }