Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@

import static org.apache.hadoop.ozone.OzoneAcl.AclScope.ACCESS;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.NOT_IMPLEMENTED;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.newError;
import static org.apache.hadoop.ozone.s3.util.S3Consts.ENCODING_TYPE;

/**
Expand Down Expand Up @@ -148,7 +149,7 @@ public Response get(
}
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED, bucketName);
throw newError(S3ErrorTable.ACCESS_DENIED, bucketName, ex);
} else {
throw ex;
}
Expand Down Expand Up @@ -242,8 +243,7 @@ public Response put(@PathParam("bucket") String bucketName,
.build();
} catch (OMException exception) {
if (exception.getResult() == ResultCodes.INVALID_BUCKET_NAME) {
throw S3ErrorTable.newError(S3ErrorTable.INVALID_BUCKET_NAME,
bucketName);
throw newError(S3ErrorTable.INVALID_BUCKET_NAME, bucketName, exception);
}
LOG.error("Error in Create Bucket Request for bucket: {}", bucketName,
exception);
Expand All @@ -263,8 +263,7 @@ public Response listMultipartUploads(
ozoneMultipartUploadList = bucket.listMultipartUploads(prefix);
} catch (OMException exception) {
if (exception.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED,
prefix);
throw newError(S3ErrorTable.ACCESS_DENIED, prefix, exception);
}
throw exception;
}
Expand Down Expand Up @@ -309,13 +308,11 @@ public Response delete(@PathParam("bucket") String bucketName)
deleteS3Bucket(bucketName);
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.BUCKET_NOT_EMPTY) {
throw S3ErrorTable.newError(S3ErrorTable
.BUCKET_NOT_EMPTY, bucketName);
throw newError(S3ErrorTable.BUCKET_NOT_EMPTY, bucketName, ex);
} else if (ex.getResult() == ResultCodes.BUCKET_NOT_FOUND) {
throw S3ErrorTable.newError(S3ErrorTable
.NO_SUCH_BUCKET, bucketName);
throw newError(S3ErrorTable.NO_SUCH_BUCKET, bucketName, ex);
} else if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED, bucketName);
throw newError(S3ErrorTable.ACCESS_DENIED, bucketName, ex);
} else {
throw ex;
}
Expand Down Expand Up @@ -402,14 +399,12 @@ public S3BucketAcl getAcl(String bucketName)
return result;
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.BUCKET_NOT_FOUND) {
throw S3ErrorTable.newError(S3ErrorTable
.NO_SUCH_BUCKET, bucketName);
throw newError(S3ErrorTable.NO_SUCH_BUCKET, bucketName, ex);
} else if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable
.ACCESS_DENIED, bucketName);
throw newError(S3ErrorTable.ACCESS_DENIED, bucketName, ex);
} else {
LOG.error("Failed to get acl of Bucket " + bucketName, ex);
throw S3ErrorTable.newError(S3ErrorTable.INTERNAL_ERROR, bucketName);
throw newError(S3ErrorTable.INTERNAL_ERROR, bucketName, ex);
}
}
}
Expand Down Expand Up @@ -503,11 +498,9 @@ public Response putAcl(String bucketName, HttpHeaders httpHeaders,
}
} catch (OMException exception) {
if (exception.getResult() == ResultCodes.BUCKET_NOT_FOUND) {
throw S3ErrorTable.newError(S3ErrorTable.NO_SUCH_BUCKET,
bucketName);
throw newError(S3ErrorTable.NO_SUCH_BUCKET, bucketName, exception);
} else if (exception.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable
.ACCESS_DENIED, bucketName);
throw newError(S3ErrorTable.ACCESS_DENIED, bucketName, exception);
}
LOG.error("Error in set ACL Request for bucket: {}", bucketName,
exception);
Expand All @@ -531,13 +524,13 @@ private List<OzoneAcl> getAndConvertAclOnBucket(String value,
for (String acl: subValues) {
String[] part = acl.split("=");
if (part.length != 2) {
throw S3ErrorTable.newError(S3ErrorTable.INVALID_ARGUMENT, acl);
throw newError(S3ErrorTable.INVALID_ARGUMENT, acl);
}
S3Acl.ACLIdentityType type =
S3Acl.ACLIdentityType.getTypeFromHeaderType(part[0]);
if (type == null || !type.isSupported()) {
LOG.warn("S3 grantee {} is null or not supported", part[0]);
throw S3ErrorTable.newError(NOT_IMPLEMENTED, part[0]);
throw newError(NOT_IMPLEMENTED, part[0]);
}
// Build ACL on Bucket
BitSet aclsOnBucket =
Expand All @@ -564,13 +557,13 @@ private List<OzoneAcl> getAndConvertAclOnVolume(String value,
for (String acl: subValues) {
String[] part = acl.split("=");
if (part.length != 2) {
throw S3ErrorTable.newError(S3ErrorTable.INVALID_ARGUMENT, acl);
throw newError(S3ErrorTable.INVALID_ARGUMENT, acl);
}
S3Acl.ACLIdentityType type =
S3Acl.ACLIdentityType.getTypeFromHeaderType(part[0]);
if (type == null || !type.isSupported()) {
LOG.warn("S3 grantee {} is null or not supported", part[0]);
throw S3ErrorTable.newError(NOT_IMPLEMENTED, part[0]);
throw newError(NOT_IMPLEMENTED, part[0]);
}
// Build ACL on Volume
BitSet aclsOnVolume =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.newError;

/**
* Basic helpers for all the REST endpoints.
*/
Expand All @@ -57,7 +59,7 @@ protected OzoneBucket getBucket(OzoneVolume volume, String bucketName)
bucket = volume.getBucket(bucketName);
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.KEY_NOT_FOUND) {
throw S3ErrorTable.newError(S3ErrorTable.NO_SUCH_BUCKET, bucketName);
throw newError(S3ErrorTable.NO_SUCH_BUCKET, bucketName, ex);
} else {
throw ex;
}
Expand Down Expand Up @@ -88,9 +90,9 @@ protected OzoneBucket getBucket(String bucketName)
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.BUCKET_NOT_FOUND
|| ex.getResult() == ResultCodes.VOLUME_NOT_FOUND) {
throw S3ErrorTable.newError(S3ErrorTable.NO_SUCH_BUCKET, bucketName);
throw newError(S3ErrorTable.NO_SUCH_BUCKET, bucketName, ex);
} else if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED, bucketName);
throw newError(S3ErrorTable.ACCESS_DENIED, bucketName, ex);
} else {
throw ex;
}
Expand All @@ -117,7 +119,7 @@ protected String createS3Bucket(String bucketName) throws
client.getObjectStore().createS3Bucket(bucketName);
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED, bucketName);
throw newError(S3ErrorTable.ACCESS_DENIED, bucketName, ex);
} else if (ex.getResult() != ResultCodes.BUCKET_ALREADY_EXISTS) {
// S3 does not return error for bucket already exists, it just
// returns the location.
Expand All @@ -138,8 +140,7 @@ public void deleteS3Bucket(String s3BucketName)
client.getObjectStore().deleteS3Bucket(s3BucketName);
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED,
s3BucketName);
throw newError(S3ErrorTable.ACCESS_DENIED, s3BucketName, ex);
}
throw ex;
}
Expand Down Expand Up @@ -182,8 +183,7 @@ private Iterator<? extends OzoneBucket> iterateBuckets(
if (e.getResult() == ResultCodes.VOLUME_NOT_FOUND) {
return Collections.emptyIterator();
} else if (e.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED,
"listBuckets");
throw newError(S3ErrorTable.ACCESS_DENIED, "listBuckets", e);
} else {
throw e;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.INVALID_REQUEST;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.NO_SUCH_UPLOAD;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.PRECOND_FAILED;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.newError;
import static org.apache.hadoop.ozone.s3.util.S3Consts.ACCEPT_RANGE_HEADER;
import static org.apache.hadoop.ozone.s3.util.S3Consts.CONTENT_RANGE_HEADER;
import static org.apache.hadoop.ozone.s3.util.S3Consts.COPY_SOURCE_HEADER;
Expand Down Expand Up @@ -210,16 +211,15 @@ public Response put(
.build();
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.NOT_A_FILE) {
OS3Exception os3Exception = S3ErrorTable.newError(INVALID_REQUEST,
keyPath);
OS3Exception os3Exception = newError(INVALID_REQUEST, keyPath, ex);
os3Exception.setErrorMessage("An error occurred (InvalidRequest) " +
"when calling the PutObject/MPU PartUpload operation: " +
OZONE_OM_ENABLE_FILESYSTEM_PATHS + " is enabled Keys are" +
" considered as Unix Paths. Path has Violated FS Semantics " +
"which caused put operation to fail.");
throw os3Exception;
} else if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED, keyPath);
throw newError(S3ErrorTable.ACCESS_DENIED, keyPath, ex);
}
LOG.error("Exception occurred in PutObject", ex);
throw ex;
Expand Down Expand Up @@ -274,8 +274,7 @@ public Response get(
length);
LOG.debug("range Header provided: {}", rangeHeader);
if (rangeHeader.isInValidRange()) {
throw S3ErrorTable.newError(
S3ErrorTable.INVALID_RANGE, rangeHeaderVal);
throw newError(S3ErrorTable.INVALID_RANGE, rangeHeaderVal);
}
}
ResponseBuilder responseBuilder;
Expand Down Expand Up @@ -326,10 +325,9 @@ public Response get(
return responseBuilder.build();
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.KEY_NOT_FOUND) {
throw S3ErrorTable.newError(S3ErrorTable
.NO_SUCH_KEY, keyPath);
throw newError(S3ErrorTable.NO_SUCH_KEY, keyPath, ex);
} else if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED, keyPath);
throw newError(S3ErrorTable.ACCESS_DENIED, keyPath, ex);
} else {
throw ex;
}
Expand Down Expand Up @@ -368,7 +366,7 @@ public Response head(
// Just return 404 with no content
return Response.status(Status.NOT_FOUND).build();
} else if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED, keyPath);
throw newError(S3ErrorTable.ACCESS_DENIED, keyPath, ex);
} else {
throw ex;
}
Expand Down Expand Up @@ -398,7 +396,7 @@ private Response abortMultipartUpload(String bucket, String key, String
ozoneBucket.abortMultipartUpload(key, uploadId);
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.NO_SUCH_MULTIPART_UPLOAD_ERROR) {
throw S3ErrorTable.newError(S3ErrorTable.NO_SUCH_UPLOAD, uploadId);
throw newError(S3ErrorTable.NO_SUCH_UPLOAD, uploadId, ex);
}
throw ex;
}
Expand Down Expand Up @@ -433,8 +431,7 @@ public Response delete(
bucket.deleteKey(keyPath);
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.BUCKET_NOT_FOUND) {
throw S3ErrorTable.newError(S3ErrorTable
.NO_SUCH_BUCKET, bucketName);
throw newError(S3ErrorTable.NO_SUCH_BUCKET, bucketName, ex);
} else if (ex.getResult() == ResultCodes.KEY_NOT_FOUND) {
//NOT_FOUND is not a problem, AWS doesn't throw exception for missing
// keys. Just return 204
Expand All @@ -444,7 +441,7 @@ public Response delete(
// NOT_FOUND is not a problem, AWS doesn't throw exception for missing
// keys. Just return 204
} else if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED, keyPath);
throw newError(S3ErrorTable.ACCESS_DENIED, keyPath, ex);
} else {
throw ex;
}
Expand Down Expand Up @@ -495,7 +492,7 @@ public Response initializeMultipartUpload(
multipartUploadInitiateResponse).build();
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED, key);
throw newError(S3ErrorTable.ACCESS_DENIED, key, ex);
}
LOG.error("Error in Initiate Multipart Upload Request for bucket: {}, " +
"key: {}", bucket, key, ex);
Expand Down Expand Up @@ -542,21 +539,21 @@ public Response completeMultipartUpload(@PathParam("bucket") String bucket,
.build();
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.INVALID_PART) {
throw S3ErrorTable.newError(S3ErrorTable.INVALID_PART, key);
throw newError(S3ErrorTable.INVALID_PART, key, ex);
} else if (ex.getResult() == ResultCodes.INVALID_PART_ORDER) {
throw S3ErrorTable.newError(S3ErrorTable.INVALID_PART_ORDER, key);
throw newError(S3ErrorTable.INVALID_PART_ORDER, key, ex);
} else if (ex.getResult() == ResultCodes.NO_SUCH_MULTIPART_UPLOAD_ERROR) {
throw S3ErrorTable.newError(NO_SUCH_UPLOAD, uploadID);
throw newError(NO_SUCH_UPLOAD, uploadID, ex);
} else if (ex.getResult() == ResultCodes.ENTITY_TOO_SMALL) {
throw S3ErrorTable.newError(ENTITY_TOO_SMALL, key);
throw newError(ENTITY_TOO_SMALL, key, ex);
} else if (ex.getResult() == ResultCodes.INVALID_REQUEST) {
OS3Exception os3Exception = S3ErrorTable.newError(INVALID_REQUEST, key);
OS3Exception os3Exception = newError(INVALID_REQUEST, key, ex);
os3Exception.setErrorMessage("An error occurred (InvalidRequest) " +
"when calling the CompleteMultipartUpload operation: You must " +
"specify at least one part");
throw os3Exception;
} else if (ex.getResult() == ResultCodes.NOT_A_FILE) {
OS3Exception os3Exception = S3ErrorTable.newError(INVALID_REQUEST, key);
OS3Exception os3Exception = newError(INVALID_REQUEST, key, ex);
os3Exception.setErrorMessage("An error occurred (InvalidRequest) " +
"when calling the CompleteMultipartUpload operation: " +
OZONE_OM_ENABLE_FILESYSTEM_PATHS + " is enabled Keys are " +
Expand Down Expand Up @@ -602,8 +599,7 @@ private Response createMultipartKey(String bucket, String key, long length,
headers.getHeaderString(COPY_SOURCE_IF_UNMODIFIED_SINCE);
if (!checkCopySourceModificationTime(sourceKeyModificationTime,
copySourceIfModifiedSince, copySourceIfUnmodifiedSince)) {
throw S3ErrorTable.newError(PRECOND_FAILED,
sourceBucket + "/" + sourceKey);
throw newError(PRECOND_FAILED, sourceBucket + "/" + sourceKey);
}

try (OzoneInputStream sourceObject =
Expand Down Expand Up @@ -651,11 +647,9 @@ private Response createMultipartKey(String bucket, String key, long length,

} catch (OMException ex) {
if (ex.getResult() == ResultCodes.NO_SUCH_MULTIPART_UPLOAD_ERROR) {
throw S3ErrorTable.newError(NO_SUCH_UPLOAD,
uploadID);
throw newError(NO_SUCH_UPLOAD, uploadID, ex);
} else if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED,
bucket + "/" + key);
throw newError(S3ErrorTable.ACCESS_DENIED, bucket + "/" + key, ex);
}
throw ex;
}
Expand Down Expand Up @@ -710,11 +704,10 @@ private Response listParts(String bucket, String key, String uploadID,

} catch (OMException ex) {
if (ex.getResult() == ResultCodes.NO_SUCH_MULTIPART_UPLOAD_ERROR) {
throw S3ErrorTable.newError(NO_SUCH_UPLOAD,
uploadID);
throw newError(NO_SUCH_UPLOAD, uploadID, ex);
} else if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED,
bucket + "/" + key + "/" + uploadID);
throw newError(S3ErrorTable.ACCESS_DENIED,
bucket + "/" + key + "/" + uploadID, ex);
}
throw ex;
}
Expand Down Expand Up @@ -750,8 +743,7 @@ private CopyObjectResponse copyObject(String copyHeader,
// options like storage type are provided or not when source and
// dest are given same
if (storageTypeDefault) {
OS3Exception ex = S3ErrorTable.newError(S3ErrorTable
.INVALID_REQUEST, copyHeader);
OS3Exception ex = newError(S3ErrorTable.INVALID_REQUEST, copyHeader);
ex.setErrorMessage("This copy request is illegal because it is " +
"trying to copy an object to it self itself without changing " +
"the object's metadata, storage class, website redirect " +
Expand Down Expand Up @@ -797,12 +789,12 @@ private CopyObjectResponse copyObject(String copyHeader,
return copyObjectResponse;
} catch (OMException ex) {
if (ex.getResult() == ResultCodes.KEY_NOT_FOUND) {
throw S3ErrorTable.newError(S3ErrorTable.NO_SUCH_KEY, sourceKey);
throw newError(S3ErrorTable.NO_SUCH_KEY, sourceKey, ex);
} else if (ex.getResult() == ResultCodes.BUCKET_NOT_FOUND) {
throw S3ErrorTable.newError(S3ErrorTable.NO_SUCH_BUCKET, sourceBucket);
throw newError(S3ErrorTable.NO_SUCH_BUCKET, sourceBucket, ex);
} else if (ex.getResult() == ResultCodes.PERMISSION_DENIED) {
throw S3ErrorTable.newError(S3ErrorTable.ACCESS_DENIED,
destBucket + "/" + destkey);
throw newError(S3ErrorTable.ACCESS_DENIED,
destBucket + "/" + destkey, ex);
}
throw ex;
} finally {
Expand All @@ -829,7 +821,7 @@ public static Pair<String, String> parseSourceHeader(String copyHeader)
}
int pos = header.indexOf('/');
if (pos == -1) {
OS3Exception ex = S3ErrorTable.newError(INVALID_ARGUMENT, header);
OS3Exception ex = newError(INVALID_ARGUMENT, header);
ex.setErrorMessage("Copy Source must mention the source bucket and " +
"key: sourcebucket/sourcekey");
throw ex;
Expand All @@ -840,7 +832,7 @@ public static Pair<String, String> parseSourceHeader(String copyHeader)
String key = urlDecode(header.substring(pos + 1));
return Pair.of(bucket, key);
} catch (UnsupportedEncodingException e) {
OS3Exception ex = S3ErrorTable.newError(INVALID_ARGUMENT, header);
OS3Exception ex = newError(INVALID_ARGUMENT, header, e);
ex.setErrorMessage("Copy Source header could not be url-decoded");
throw ex;
}
Expand All @@ -851,8 +843,7 @@ private static S3StorageType toS3StorageType(String storageType)
try {
return S3StorageType.valueOf(storageType);
} catch (IllegalArgumentException ex) {
throw S3ErrorTable.newError(INVALID_ARGUMENT,
storageType);
throw newError(INVALID_ARGUMENT, storageType, ex);
}
}

Expand Down
Loading