diff options
Diffstat (limited to 'lib/private/Files/ObjectStore')
4 files changed, 49 insertions, 29 deletions
diff --git a/lib/private/Files/ObjectStore/ObjectStoreStorage.php b/lib/private/Files/ObjectStore/ObjectStoreStorage.php index 36b1a7a1c95..752c3cf4fb7 100644 --- a/lib/private/Files/ObjectStore/ObjectStoreStorage.php +++ b/lib/private/Files/ObjectStore/ObjectStoreStorage.php @@ -413,16 +413,6 @@ class ObjectStoreStorage extends \OC\Files\Storage\Common implements IChunkedFil //create a empty file, need to have at least on char to make it // work with all object storage implementations $this->file_put_contents($path, ' '); - $mimeType = \OC::$server->getMimeTypeDetector()->detectPath($path); - $stat = [ - 'etag' => $this->getETag($path), - 'mimetype' => $mimeType, - 'size' => 0, - 'mtime' => $mtime, - 'storage_mtime' => $mtime, - 'permissions' => \OCP\Constants::PERMISSION_ALL - \OCP\Constants::PERMISSION_CREATE, - ]; - $this->getCache()->put($path, $stat); } catch (\Exception $ex) { $this->logger->error( 'Could not create object for ' . $path, diff --git a/lib/private/Files/ObjectStore/S3ConfigTrait.php b/lib/private/Files/ObjectStore/S3ConfigTrait.php index 63f14ac2d00..5b086db8f77 100644 --- a/lib/private/Files/ObjectStore/S3ConfigTrait.php +++ b/lib/private/Files/ObjectStore/S3ConfigTrait.php @@ -18,6 +18,10 @@ trait S3ConfigTrait { /** Maximum number of concurrent multipart uploads */ protected int $concurrency; + /** Timeout, in seconds, for the connection to S3 server, not for the + * request. */ + protected float $connectTimeout; + protected int $timeout; protected string|false $proxy; diff --git a/lib/private/Files/ObjectStore/S3ConnectionTrait.php b/lib/private/Files/ObjectStore/S3ConnectionTrait.php index b7017583dc2..062d2e4bde4 100644 --- a/lib/private/Files/ObjectStore/S3ConnectionTrait.php +++ b/lib/private/Files/ObjectStore/S3ConnectionTrait.php @@ -39,6 +39,7 @@ trait S3ConnectionTrait { // Default to 5 like the S3 SDK does $this->concurrency = $params['concurrency'] ?? 5; $this->proxy = $params['proxy'] ?? false; + $this->connectTimeout = $params['connect_timeout'] ?? 5; $this->timeout = $params['timeout'] ?? 15; $this->storageClass = !empty($params['storageClass']) ? $params['storageClass'] : 'STANDARD'; $this->uploadPartSize = $params['uploadPartSize'] ?? 524288000; @@ -102,8 +103,7 @@ trait S3ConnectionTrait { 'use_arn_region' => false, 'http' => [ 'verify' => $this->getCertificateBundlePath(), - // Timeout for the connection to S3 server, not for the request. - 'connect_timeout' => 5 + 'connect_timeout' => $this->connectTimeout, ], 'use_aws_shared_config_files' => false, 'retries' => [ diff --git a/lib/private/Files/ObjectStore/S3ObjectTrait.php b/lib/private/Files/ObjectStore/S3ObjectTrait.php index 61e8158b863..5e6dcf88a42 100644 --- a/lib/private/Files/ObjectStore/S3ObjectTrait.php +++ b/lib/private/Files/ObjectStore/S3ObjectTrait.php @@ -119,28 +119,54 @@ trait S3ObjectTrait { protected function writeMultiPart(string $urn, StreamInterface $stream, array $metaData): void { $mimetype = $metaData['mimetype'] ?? null; unset($metaData['mimetype']); - $uploader = new MultipartUploader($this->getConnection(), $stream, [ - 'bucket' => $this->bucket, - 'concurrency' => $this->concurrency, - 'key' => $urn, - 'part_size' => $this->uploadPartSize, - 'params' => [ - 'ContentType' => $mimetype, - 'Metadata' => $this->buildS3Metadata($metaData), - 'StorageClass' => $this->storageClass, - ] + $this->getSSECParameters(), - ]); - try { - $uploader->upload(); - } catch (S3MultipartUploadException $e) { + $attempts = 0; + $uploaded = false; + $concurrency = $this->concurrency; + $exception = null; + $state = null; + + // retry multipart upload once with concurrency at half on failure + while (!$uploaded && $attempts <= 1) { + $uploader = new MultipartUploader($this->getConnection(), $stream, [ + 'bucket' => $this->bucket, + 'concurrency' => $concurrency, + 'key' => $urn, + 'part_size' => $this->uploadPartSize, + 'state' => $state, + 'params' => [ + 'ContentType' => $mimetype, + 'Metadata' => $this->buildS3Metadata($metaData), + 'StorageClass' => $this->storageClass, + ] + $this->getSSECParameters(), + ]); + + try { + $uploader->upload(); + $uploaded = true; + } catch (S3MultipartUploadException $e) { + $exception = $e; + $attempts++; + + if ($concurrency > 1) { + $concurrency = round($concurrency / 2); + } + + if ($stream->isSeekable()) { + $stream->rewind(); + } + } + } + + if (!$uploaded) { // if anything goes wrong with multipart, make sure that you don“t poison and // slow down s3 bucket with orphaned fragments - $uploadInfo = $e->getState()->getId(); - if ($e->getState()->isInitiated() && (array_key_exists('UploadId', $uploadInfo))) { + $uploadInfo = $exception->getState()->getId(); + if ($exception->getState()->isInitiated() && (array_key_exists('UploadId', $uploadInfo))) { $this->getConnection()->abortMultipartUpload($uploadInfo); } - throw new \OCA\DAV\Connector\Sabre\Exception\BadGateway('Error while uploading to S3 bucket', 0, $e); + + throw new \OCA\DAV\Connector\Sabre\Exception\BadGateway('Error while uploading to S3 bucket', 0, $exception); } } |