From 37b9ab16bb8f69c825c3c4e553fe00da73dd6926 Mon Sep 17 00:00:00 2001 From: Niklas Keller Date: Wed, 23 May 2018 19:39:34 +0200 Subject: [PATCH] Solve performance issues with very large chunks Very large chunks need to be copied every time there's a partial write, which is pretty problematic. Instead of doing an almost full copy of the full chunk every time, this patch splits very large chunks into multiple smaller chunks automatically. Fixes #41. --- lib/ResourceOutputStream.php | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/lib/ResourceOutputStream.php b/lib/ResourceOutputStream.php index 6e031fc..f34184b 100644 --- a/lib/ResourceOutputStream.php +++ b/lib/ResourceOutputStream.php @@ -14,6 +14,7 @@ use Amp\Success; final class ResourceOutputStream implements OutputStream { const MAX_CONSECUTIVE_EMPTY_WRITES = 3; + const LARGE_CHUNK_SIZE = 128 * 1024; /** @var resource */ private $resource; @@ -203,6 +204,16 @@ final class ResourceOutputStream implements OutputStream } $deferred = new Deferred; + + if ($length - $written > self::LARGE_CHUNK_SIZE) { + $chunks = \str_split($data, self::LARGE_CHUNK_SIZE); + $data = \array_pop($chunks); + foreach ($chunks as $chunk) { + $this->writes->push([$chunk, $written, new Deferred]); + $written += self::LARGE_CHUNK_SIZE; + } + } + $this->writes->push([$data, $written, $deferred]); Loop::enable($this->watcher); $promise = $deferred->promise();