1
0
mirror of https://github.com/danog/byte-stream.git synced 2024-11-29 20:09:07 +01:00

Solve performance issues with very large chunks

Very large chunks need to be copied every time there's a partial write, which is pretty problematic. Instead of doing an almost full copy of the full chunk every time, this patch splits very large chunks into multiple smaller chunks automatically.

Fixes #41.
This commit is contained in:
Niklas Keller 2018-05-23 19:39:34 +02:00 committed by Aaron Piotrowski
parent 2dda482685
commit 37b9ab16bb
No known key found for this signature in database
GPG Key ID: ADD1EF783EDE9EEB

View File

@ -14,6 +14,7 @@ use Amp\Success;
final class ResourceOutputStream implements OutputStream
{
const MAX_CONSECUTIVE_EMPTY_WRITES = 3;
const LARGE_CHUNK_SIZE = 128 * 1024;
/** @var resource */
private $resource;
@ -203,6 +204,16 @@ final class ResourceOutputStream implements OutputStream
}
$deferred = new Deferred;
if ($length - $written > self::LARGE_CHUNK_SIZE) {
$chunks = \str_split($data, self::LARGE_CHUNK_SIZE);
$data = \array_pop($chunks);
foreach ($chunks as $chunk) {
$this->writes->push([$chunk, $written, new Deferred]);
$written += self::LARGE_CHUNK_SIZE;
}
}
$this->writes->push([$data, $written, $deferred]);
Loop::enable($this->watcher);
$promise = $deferred->promise();