diff options
author | Arthur Schiwon <blizzz@arthur-schiwon.de> | 2023-03-09 13:55:57 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-03-09 13:55:57 +0100 |
commit | 315510df8464a97656f614b6b4ba15a3fb11d337 (patch) | |
tree | 1e1c0513c82c6401b03937e55578b9982ea91189 /build | |
parent | 691aa8d0257e9361bbc5a64b09cd634435dbd40a (diff) | |
parent | d2a05716753cc9297fa38e88a43e39dec71f3b40 (diff) | |
download | nextcloud-server-315510df8464a97656f614b6b4ba15a3fb11d337.tar.gz nextcloud-server-315510df8464a97656f614b6b4ba15a3fb11d337.zip |
Merge pull request #27034 from nextcloud/enh/s3-multipart-upload-api
Use MultipartUpload for uploading chunks to s3
Diffstat (limited to 'build')
-rw-r--r-- | build/integration/features/bootstrap/BasicStructure.php | 4 | ||||
-rw-r--r-- | build/integration/features/bootstrap/WebDav.php | 114 | ||||
-rw-r--r-- | build/integration/features/webdav-related.feature | 104 |
3 files changed, 216 insertions, 6 deletions
diff --git a/build/integration/features/bootstrap/BasicStructure.php b/build/integration/features/bootstrap/BasicStructure.php index 9060c85c756..e12a40ac6b4 100644 --- a/build/integration/features/bootstrap/BasicStructure.php +++ b/build/integration/features/bootstrap/BasicStructure.php @@ -179,7 +179,7 @@ trait BasicStructure { $options['auth'] = [$this->currentUser, $this->regularUser]; } $options['headers'] = [ - 'OCS_APIREQUEST' => 'true' + 'OCS-APIRequest' => 'true' ]; if ($body instanceof TableNode) { $fd = $body->getRowsHash(); @@ -306,7 +306,7 @@ trait BasicStructure { * @param string $user */ public function loggingInUsingWebAs($user) { - $loginUrl = substr($this->baseUrl, 0, -5) . '/login'; + $loginUrl = substr($this->baseUrl, 0, -5) . '/index.php/login'; // Request a new session and extract CSRF token $client = new Client(); $response = $client->get( diff --git a/build/integration/features/bootstrap/WebDav.php b/build/integration/features/bootstrap/WebDav.php index 680db01a260..9c81a5817b4 100644 --- a/build/integration/features/bootstrap/WebDav.php +++ b/build/integration/features/bootstrap/WebDav.php @@ -54,6 +54,9 @@ trait WebDav { /** @var int */ private $storedFileID = null; + private string $s3MultipartDestination; + private string $uploadId; + /** * @Given /^using dav path "([^"]*)"$/ */ @@ -751,6 +754,7 @@ trait WebDav { * @Given user :user creates a new chunking upload with id :id */ public function userCreatesANewChunkingUploadWithId($user, $id) { + $this->parts = []; $destination = '/uploads/' . $user . '/' . $id; $this->makeDavRequest($user, 'MKCOL', $destination, [], null, "uploads"); } @@ -792,6 +796,60 @@ trait WebDav { } } + + /** + * @Given user :user creates a new chunking v2 upload with id :id and destination :targetDestination + */ + public function userCreatesANewChunkingv2UploadWithIdAndDestination($user, $id, $targetDestination) { + $this->s3MultipartDestination = $this->getTargetDestination($user, $targetDestination); + $this->newUploadId(); + $destination = '/uploads/' . $user . '/' . $this->getUploadId($id); + $this->response = $this->makeDavRequest($user, 'MKCOL', $destination, [ + 'Destination' => $this->s3MultipartDestination, + ], null, "uploads"); + } + + /** + * @Given user :user uploads new chunk v2 file :num to id :id + */ + public function userUploadsNewChunkv2FileToIdAndDestination($user, $num, $id) { + $data = \GuzzleHttp\Psr7\Utils::streamFor(fopen('/tmp/part-upload-' . $num, 'r')); + $destination = '/uploads/' . $user . '/' . $this->getUploadId($id) . '/' . $num; + $this->response = $this->makeDavRequest($user, 'PUT', $destination, [ + 'Destination' => $this->s3MultipartDestination + ], $data, "uploads"); + } + + /** + * @Given user :user moves new chunk v2 file with id :id + */ + public function userMovesNewChunkv2FileWithIdToMychunkedfileAndDestination($user, $id) { + $source = '/uploads/' . $user . '/' . $this->getUploadId($id) . '/.file'; + try { + $this->response = $this->makeDavRequest($user, 'MOVE', $source, [ + 'Destination' => $this->s3MultipartDestination, + ], null, "uploads"); + } catch (\GuzzleHttp\Exception\ServerException $e) { + // 5xx responses cause a server exception + $this->response = $e->getResponse(); + } catch (\GuzzleHttp\Exception\ClientException $e) { + // 4xx responses cause a client exception + $this->response = $e->getResponse(); + } + } + + private function getTargetDestination(string $user, string $destination): string { + return substr($this->baseUrl, 0, -4) . $this->getDavFilesPath($user) . $destination; + } + + private function getUploadId(string $id): string { + return $id . '-' . $this->uploadId; + } + + private function newUploadId() { + $this->uploadId = (string)time(); + } + /** * @Given /^Downloading file "([^"]*)" as "([^"]*)"$/ */ @@ -980,4 +1038,60 @@ trait WebDav { $currentFileID = $this->getFileIdForPath($user, $path); Assert::assertEquals($currentFileID, $this->storedFileID); } + + /** + * @Given /^user "([^"]*)" creates a file locally with "([^"]*)" x 5 MB chunks$/ + */ + public function userCreatesAFileLocallyWithChunks($arg1, $chunks) { + $this->parts = []; + for ($i = 1;$i <= (int)$chunks;$i++) { + $randomletter = substr(str_shuffle("abcdefghijklmnopqrstuvwxyz"), 0, 1); + file_put_contents('/tmp/part-upload-' . $i, str_repeat($randomletter, 5 * 1024 * 1024)); + $this->parts[] = '/tmp/part-upload-' . $i; + } + } + + /** + * @Given user :user creates the chunk :id with a size of :size MB + */ + public function userCreatesAChunk($user, $id, $size) { + $randomletter = substr(str_shuffle("abcdefghijklmnopqrstuvwxyz"), 0, 1); + file_put_contents('/tmp/part-upload-' . $id, str_repeat($randomletter, (int)$size * 1024 * 1024)); + $this->parts[] = '/tmp/part-upload-' . $id; + } + + /** + * @Then /^Downloaded content should be the created file$/ + */ + public function downloadedContentShouldBeTheCreatedFile() { + $content = ''; + sort($this->parts); + foreach ($this->parts as $part) { + $content .= file_get_contents($part); + } + Assert::assertEquals($content, (string)$this->response->getBody()); + } + + /** + * @Then /^the S3 multipart upload was successful with status "([^"]*)"$/ + */ + public function theSmultipartUploadWasSuccessful($status) { + Assert::assertEquals((int)$status, $this->response->getStatusCode()); + } + + /** + * @Then /^the upload should fail on object storage$/ + */ + public function theUploadShouldFailOnObjectStorage() { + $descriptor = [ + 0 => ['pipe', 'r'], + 1 => ['pipe', 'w'], + 2 => ['pipe', 'w'], + ]; + $process = proc_open('php occ config:system:get objectstore --no-ansi', $descriptor, $pipes, '../../'); + $lastCode = proc_close($process); + if ($lastCode === 0) { + $this->theHTTPStatusCodeShouldBe(500); + } + } } diff --git a/build/integration/features/webdav-related.feature b/build/integration/features/webdav-related.feature index 21e195af115..28a0cad619b 100644 --- a/build/integration/features/webdav-related.feature +++ b/build/integration/features/webdav-related.feature @@ -191,10 +191,10 @@ Feature: webdav-related And As an "user1" And user "user1" created a folder "/testquota" And as "user1" creating a share with - | path | testquota | - | shareType | 0 | - | permissions | 31 | - | shareWith | user0 | + | path | testquota | + | shareType | 0 | + | permissions | 31 | + | shareWith | user0 | And user "user0" accepts last share And As an "user0" When User "user0" uploads file "data/textfile.txt" to "/testquota/asdf.txt" @@ -630,3 +630,99 @@ Feature: webdav-related And As an "user1" And user "user1" created a folder "/testshare " Then the HTTP status code should be "400" + + @s3-multipart + Scenario: Upload chunked file asc with new chunking v2 + Given using new dav path + And user "user0" exists + And user "user0" creates a file locally with "3" x 5 MB chunks + And user "user0" creates a new chunking v2 upload with id "chunking-42" and destination "/myChunkedFile1.txt" + And user "user0" uploads new chunk v2 file "1" to id "chunking-42" + And user "user0" uploads new chunk v2 file "2" to id "chunking-42" + And user "user0" uploads new chunk v2 file "3" to id "chunking-42" + And user "user0" moves new chunk v2 file with id "chunking-42" + Then the S3 multipart upload was successful with status "201" + When As an "user0" + And Downloading file "/myChunkedFile1.txt" + Then Downloaded content should be the created file + + @s3-multipart + Scenario: Upload chunked file desc with new chunking v2 + Given using new dav path + And user "user0" exists + And user "user0" creates a file locally with "3" x 5 MB chunks + And user "user0" creates a new chunking v2 upload with id "chunking-42" and destination "/myChunkedFile.txt" + And user "user0" uploads new chunk v2 file "3" to id "chunking-42" + And user "user0" uploads new chunk v2 file "2" to id "chunking-42" + And user "user0" uploads new chunk v2 file "1" to id "chunking-42" + And user "user0" moves new chunk v2 file with id "chunking-42" + Then the S3 multipart upload was successful with status "201" + When As an "user0" + And Downloading file "/myChunkedFile.txt" + Then Downloaded content should be the created file + + @s3-multipart + Scenario: Upload chunked file with random chunk sizes + Given using new dav path + And user "user0" exists + And user "user0" creates a new chunking v2 upload with id "chunking-random" and destination "/myChunkedFile.txt" + And user user0 creates the chunk 1 with a size of 5 MB + And user user0 creates the chunk 2 with a size of 7 MB + And user user0 creates the chunk 3 with a size of 9 MB + And user user0 creates the chunk 4 with a size of 1 MB + And user "user0" uploads new chunk v2 file "1" to id "chunking-random" + And user "user0" uploads new chunk v2 file "3" to id "chunking-random" + And user "user0" uploads new chunk v2 file "2" to id "chunking-random" + And user "user0" uploads new chunk v2 file "4" to id "chunking-random" + And user "user0" moves new chunk v2 file with id "chunking-random" + Then the S3 multipart upload was successful with status "201" + When As an "user0" + And Downloading file "/myChunkedFile.txt" + Then Downloaded content should be the created file + + @s3-multipart + Scenario: Upload chunked file with too low chunk sizes + Given using new dav path + And user "user0" exists + And user "user0" creates a new chunking v2 upload with id "chunking-random" and destination "/myChunkedFile.txt" + And user user0 creates the chunk 1 with a size of 5 MB + And user user0 creates the chunk 2 with a size of 2 MB + And user user0 creates the chunk 3 with a size of 5 MB + And user user0 creates the chunk 4 with a size of 1 MB + And user "user0" uploads new chunk v2 file "1" to id "chunking-random" + And user "user0" uploads new chunk v2 file "3" to id "chunking-random" + And user "user0" uploads new chunk v2 file "2" to id "chunking-random" + And user "user0" uploads new chunk v2 file "4" to id "chunking-random" + And user "user0" moves new chunk v2 file with id "chunking-random" + Then the upload should fail on object storage + + @s3-multipart + Scenario: Upload chunked file with special characters with new chunking v2 + Given using new dav path + And user "user0" exists + And user "user0" creates a file locally with "3" x 5 MB chunks + And user "user0" creates a new chunking v2 upload with id "chunking-42" and destination "/äöü.txt" + And user "user0" uploads new chunk v2 file "1" to id "chunking-42" + And user "user0" uploads new chunk v2 file "2" to id "chunking-42" + And user "user0" uploads new chunk v2 file "3" to id "chunking-42" + And user "user0" moves new chunk v2 file with id "chunking-42" + Then the S3 multipart upload was successful with status "201" + When As an "user0" + And Downloading file "/äöü.txt" + Then Downloaded content should be the created file + + @s3-multipart + Scenario: Upload chunked file with special characters in path with new chunking v2 + Given using new dav path + And user "user0" exists + And User "user0" created a folder "üäöé" + And user "user0" creates a file locally with "3" x 5 MB chunks + And user "user0" creates a new chunking v2 upload with id "chunking-42" and destination "/üäöé/äöü.txt" + And user "user0" uploads new chunk v2 file "1" to id "chunking-42" + And user "user0" uploads new chunk v2 file "2" to id "chunking-42" + And user "user0" uploads new chunk v2 file "3" to id "chunking-42" + And user "user0" moves new chunk v2 file with id "chunking-42" + Then the S3 multipart upload was successful with status "201" + When As an "user0" + And Downloading file "/üäöé/äöü.txt" + Then Downloaded content should be the created file |