mirror of
https://github.com/danog/byte-stream.git
synced 2024-11-30 04:19:23 +01:00
Solve performance issues with very large chunks
Very large chunks need to be copied every time there's a partial write, which is pretty problematic. Instead of doing an almost full copy of the full chunk every time, this patch splits very large chunks into multiple smaller chunks automatically. Fixes #41.
This commit is contained in:
parent
2dda482685
commit
37b9ab16bb
@ -14,6 +14,7 @@ use Amp\Success;
|
||||
final class ResourceOutputStream implements OutputStream
|
||||
{
|
||||
const MAX_CONSECUTIVE_EMPTY_WRITES = 3;
|
||||
const LARGE_CHUNK_SIZE = 128 * 1024;
|
||||
|
||||
/** @var resource */
|
||||
private $resource;
|
||||
@ -203,6 +204,16 @@ final class ResourceOutputStream implements OutputStream
|
||||
}
|
||||
|
||||
$deferred = new Deferred;
|
||||
|
||||
if ($length - $written > self::LARGE_CHUNK_SIZE) {
|
||||
$chunks = \str_split($data, self::LARGE_CHUNK_SIZE);
|
||||
$data = \array_pop($chunks);
|
||||
foreach ($chunks as $chunk) {
|
||||
$this->writes->push([$chunk, $written, new Deferred]);
|
||||
$written += self::LARGE_CHUNK_SIZE;
|
||||
}
|
||||
}
|
||||
|
||||
$this->writes->push([$data, $written, $deferred]);
|
||||
Loop::enable($this->watcher);
|
||||
$promise = $deferred->promise();
|
||||
|
Loading…
Reference in New Issue
Block a user