only do multipart upload for large files
Signed-off-by: Robin Appelman <robin@icewind.nl>
This commit is contained in:
parent
e4e5e735db
commit
4ae46d8876
1 changed files with 24 additions and 0 deletions
|
@ -26,6 +26,8 @@ use Aws\S3\MultipartUploader;
|
|||
use Aws\S3\S3Client;
|
||||
use Psr\Http\Message\StreamInterface;
|
||||
|
||||
const S3_UPLOAD_PART_SIZE = 5368709120;
|
||||
|
||||
trait S3ObjectTrait {
|
||||
/**
|
||||
* Returns the connection
|
||||
|
@ -62,17 +64,39 @@ trait S3ObjectTrait {
|
|||
* @since 7.0.0
|
||||
*/
|
||||
function writeObject($urn, $stream) {
|
||||
$stat = fstat($stream);
|
||||
|
||||
if ($stat['size'] && $stat['size'] < S3_UPLOAD_PART_SIZE) {
|
||||
$this->singlePartUpload($urn, $stream);
|
||||
} else {
|
||||
$this->multiPartUpload($urn, $stream);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private function singlePartUpload($urn, $stream) {
|
||||
$this->getConnection()->putObject([
|
||||
'Bucket' => $this->bucket,
|
||||
'Key' => $urn,
|
||||
'Body' => $stream
|
||||
]);
|
||||
}
|
||||
|
||||
private function multiPartUpload($urn, $stream) {
|
||||
$uploader = new MultipartUploader($this->getConnection(), $stream, [
|
||||
'bucket' => $this->bucket,
|
||||
'key' => $urn,
|
||||
]);
|
||||
|
||||
$tries = 0;
|
||||
|
||||
do {
|
||||
try {
|
||||
$result = $uploader->upload();
|
||||
} catch (MultipartUploadException $e) {
|
||||
rewind($stream);
|
||||
$tries++;
|
||||
|
||||
if ($tries < 5) {
|
||||
$uploader = new MultipartUploader($this->getConnection(), $stream, [
|
||||
'state' => $e->getState()
|
||||
|
|
Loading…
Reference in a new issue