aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
authorJulius Knorr <jus@bitgrid.net>2024-12-06 09:22:07 +0100
committerGitHub <noreply@github.com>2024-12-06 09:22:07 +0100
commit3328cea2ea756bd91b445a5aaf988e60d86f64ed (patch)
tree4ab2a3b39aa3c111be79f0dac6e3d6dd8a93d826 /lib
parent9bd7304ad6da18d8ec32b5c37a03dc2cbc2e80fb (diff)
parent6cf66f95ce9dadffd9fa25203e737917cef0cd8c (diff)
downloadnextcloud-server-3328cea2ea756bd91b445a5aaf988e60d86f64ed.tar.gz
nextcloud-server-3328cea2ea756bd91b445a5aaf988e60d86f64ed.zip
Merge pull request #49352 from nextcloud/s3-disable-multipart
improve handling of large single-part s3 uploads
Diffstat (limited to 'lib')
-rw-r--r--lib/private/Files/ObjectStore/ObjectStoreStorage.php8
-rw-r--r--lib/private/Files/ObjectStore/S3ObjectTrait.php33
2 files changed, 31 insertions, 10 deletions
diff --git a/lib/private/Files/ObjectStore/ObjectStoreStorage.php b/lib/private/Files/ObjectStore/ObjectStoreStorage.php
index 0963ffbb28f..78eb52a53d9 100644
--- a/lib/private/Files/ObjectStore/ObjectStoreStorage.php
+++ b/lib/private/Files/ObjectStore/ObjectStoreStorage.php
@@ -457,6 +457,14 @@ class ObjectStoreStorage extends \OC\Files\Storage\Common implements IChunkedFil
}
public function writeStream(string $path, $stream, ?int $size = null): int {
+ if ($size === null) {
+ $stats = fstat($stream);
+ if (is_array($stats) && isset($stats['size'])) {
+ $size = $stats['size'];
+ $this->logger->warning("stream size $size");
+ }
+ }
+
$stat = $this->stat($path);
if (empty($stat)) {
// create new file
diff --git a/lib/private/Files/ObjectStore/S3ObjectTrait.php b/lib/private/Files/ObjectStore/S3ObjectTrait.php
index 2e625033751..9d7cfa644e6 100644
--- a/lib/private/Files/ObjectStore/S3ObjectTrait.php
+++ b/lib/private/Files/ObjectStore/S3ObjectTrait.php
@@ -140,20 +140,33 @@ trait S3ObjectTrait {
* @since 7.0.0
*/
public function writeObject($urn, $stream, ?string $mimetype = null) {
+ $canSeek = fseek($stream, 0, SEEK_CUR) === 0;
$psrStream = Utils::streamFor($stream);
- // ($psrStream->isSeekable() && $psrStream->getSize() !== null) evaluates to true for a On-Seekable stream
- // so the optimisation does not apply
- $buffer = new Psr7\Stream(fopen('php://memory', 'rwb+'));
- Utils::copyToStream($psrStream, $buffer, $this->putSizeLimit);
- $buffer->seek(0);
- if ($buffer->getSize() < $this->putSizeLimit) {
- // buffer is fully seekable, so use it directly for the small upload
- $this->writeSingle($urn, $buffer, $mimetype);
+
+ $size = $psrStream->getSize();
+ if ($size === null || !$canSeek) {
+ // The s3 single-part upload requires the size to be known for the stream.
+ // So for input streams that don't have a known size, we need to copy (part of)
+ // the input into a temporary stream so the size can be determined
+ $buffer = new Psr7\Stream(fopen('php://temp', 'rw+'));
+ Utils::copyToStream($psrStream, $buffer, $this->putSizeLimit);
+ $buffer->seek(0);
+ if ($buffer->getSize() < $this->putSizeLimit) {
+ // buffer is fully seekable, so use it directly for the small upload
+ $this->writeSingle($urn, $buffer, $mimetype);
+ } else {
+ $loadStream = new Psr7\AppendStream([$buffer, $psrStream]);
+ $this->writeMultiPart($urn, $loadStream, $mimetype);
+ }
} else {
- $loadStream = new Psr7\AppendStream([$buffer, $psrStream]);
- $this->writeMultiPart($urn, $loadStream, $mimetype);
+ if ($size < $this->putSizeLimit) {
+ $this->writeSingle($urn, $psrStream, $mimetype);
+ } else {
+ $this->writeMultiPart($urn, $psrStream, $mimetype);
+ }
}
+ $psrStream->close();
}
/**