-
Notifications
You must be signed in to change notification settings - Fork 588
HDDS-3979. Make bufferSize configurable for stream copy #1212
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
6449f66
635a874
ae006e9
9edbae5
ca7c9c0
9ec1c78
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -17,6 +17,8 @@ | |
| */ | ||
| package org.apache.hadoop.ozone.s3.endpoint; | ||
|
|
||
| import javax.annotation.PostConstruct; | ||
| import javax.inject.Inject; | ||
| import javax.ws.rs.Consumes; | ||
| import javax.ws.rs.DELETE; | ||
| import javax.ws.rs.DefaultValue; | ||
|
|
@@ -49,6 +51,8 @@ | |
|
|
||
| import org.apache.hadoop.hdds.client.ReplicationFactor; | ||
| import org.apache.hadoop.hdds.client.ReplicationType; | ||
| import org.apache.hadoop.hdds.conf.OzoneConfiguration; | ||
| import org.apache.hadoop.hdds.conf.StorageUnit; | ||
| import org.apache.hadoop.ozone.client.OzoneBucket; | ||
| import org.apache.hadoop.ozone.client.OzoneKeyDetails; | ||
| import org.apache.hadoop.ozone.client.OzoneMultipartUploadPartListParts; | ||
|
|
@@ -77,6 +81,9 @@ | |
| import org.apache.commons.io.IOUtils; | ||
|
|
||
| import org.apache.commons.lang3.tuple.Pair; | ||
|
|
||
| import static org.apache.hadoop.ozone.s3.S3GatewayConfigKeys.OZONE_S3G_CLIENT_BUFFER_SIZE_DEFAULT; | ||
| import static org.apache.hadoop.ozone.s3.S3GatewayConfigKeys.OZONE_S3G_CLIENT_BUFFER_SIZE_KEY; | ||
| import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.ENTITY_TOO_SMALL; | ||
| import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.INVALID_REQUEST; | ||
| import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.NO_SUCH_UPLOAD; | ||
|
|
@@ -104,6 +111,7 @@ public class ObjectEndpoint extends EndpointBase { | |
| private HttpHeaders headers; | ||
|
|
||
| private List<String> customizableGetHeaders = new ArrayList<>(); | ||
| private int bufferSize; | ||
|
|
||
| public ObjectEndpoint() { | ||
| customizableGetHeaders.add("Content-Type"); | ||
|
|
@@ -114,6 +122,16 @@ public ObjectEndpoint() { | |
| customizableGetHeaders.add("Content-Encoding"); | ||
| } | ||
|
|
||
| @Inject | ||
| private OzoneConfiguration ozoneConfiguration; | ||
|
|
||
| @PostConstruct | ||
| public void init() { | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. NIT: It can be slightly better to use And you don't need to expose (Didn't try, but it should work, IMHO).
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Thank you for let me know this high technology |
||
| bufferSize = (int) ozoneConfiguration.getStorageSize( | ||
| OZONE_S3G_CLIENT_BUFFER_SIZE_KEY, | ||
| OZONE_S3G_CLIENT_BUFFER_SIZE_DEFAULT, StorageUnit.BYTES); | ||
| } | ||
|
|
||
| /** | ||
| * Rest endpoint to upload object to a bucket. | ||
| * <p> | ||
|
|
@@ -259,7 +277,8 @@ public Response get( | |
| try (S3WrapperInputStream s3WrapperInputStream = | ||
| new S3WrapperInputStream( | ||
| key.getInputStream())) { | ||
| s3WrapperInputStream.copyLarge(dest, startOffset, copyLength); | ||
| IOUtils.copyLarge(s3WrapperInputStream, dest, startOffset, | ||
| copyLength, new byte[bufferSize]); | ||
| } | ||
| }; | ||
| responseBuilder = Response | ||
|
|
@@ -400,7 +419,6 @@ public Response delete( | |
| return Response | ||
| .status(Status.NO_CONTENT) | ||
| .build(); | ||
|
|
||
| } | ||
|
|
||
| /** | ||
|
|
@@ -539,16 +557,9 @@ private Response createMultipartKey(String bucket, String key, long length, | |
| if (range != null) { | ||
| RangeHeader rangeHeader = | ||
| RangeHeaderParserUtil.parseRangeHeader(range, 0); | ||
|
|
||
| long copyLength = rangeHeader.getEndOffset() - | ||
| rangeHeader.getStartOffset(); | ||
|
|
||
| try (S3WrapperInputStream s3WrapperInputStream = | ||
| new S3WrapperInputStream( | ||
| sourceObject.getInputStream())) { | ||
| s3WrapperInputStream.copyLarge(ozoneOutputStream, | ||
| rangeHeader.getStartOffset(), copyLength); | ||
| } | ||
| IOUtils.copyLarge(sourceObject, ozoneOutputStream, | ||
| rangeHeader.getStartOffset(), | ||
| rangeHeader.getEndOffset() - rangeHeader.getStartOffset()); | ||
| } else { | ||
| IOUtils.copy(sourceObject, ozoneOutputStream); | ||
| } | ||
|
|
@@ -578,7 +589,6 @@ private Response createMultipartKey(String bucket, String key, long length, | |
| } | ||
| throw ex; | ||
| } | ||
|
|
||
| } | ||
|
|
||
| /** | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.