Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions presto-docs/src/main/sphinx/connector/hive.rst
Original file line number Diff line number Diff line change
Expand Up @@ -327,6 +327,10 @@ Property Name Description
set this to the AWS region-specific endpoint
(e.g., ``http[s]://<bucket>.s3-<AWS-region>.amazonaws.com``).

``hive.s3.storage-class`` The S3 storage class to use when writing the data. Currently only
``STANDARD`` and ``INTELLIGENT_TIERING`` storage classes are supported.
Default storage class is ``STANDARD``

``hive.s3.signer-type`` Specify a different signer type for S3-compatible storage.
Example: ``S3SignerType`` for v2 signer type

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ public class HiveS3Config
private String s3AwsAccessKey;
private String s3AwsSecretKey;
private String s3Endpoint;
private PrestoS3StorageClass s3StorageClass = PrestoS3StorageClass.STANDARD;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we directly import the enum ?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@Praveen2112 Importing Enum directly might look off since PrestoS3SignerType and PrestoS3AclType are used without importing Enum directly.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if static import removes readability, it's should not be used;

here it is OK to static import, except it would be inconsistent. Please keep as is (inconsistency decreases readability)

private PrestoS3SignerType s3SignerType;
private String s3SignerClass;
private boolean s3PathStyleAccess;
Expand Down Expand Up @@ -99,6 +100,20 @@ public HiveS3Config setS3Endpoint(String s3Endpoint)
return this;
}

@NotNull
public PrestoS3StorageClass getS3StorageClass()
{
return s3StorageClass;
}

@Config("hive.s3.storage-class")
@ConfigDescription("AWS S3 storage class to use when writing the data")
public HiveS3Config setS3StorageClass(PrestoS3StorageClass s3StorageClass)
{
this.s3StorageClass = s3StorageClass;
return this;
}

public PrestoS3SignerType getS3SignerType()
{
return s3SignerType;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SSE_TYPE;
import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_SSL_ENABLED;
import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_STAGING_DIRECTORY;
import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_STORAGE_CLASS;
import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_USER_AGENT_PREFIX;
import static io.prestosql.plugin.hive.s3.PrestoS3FileSystem.S3_USE_INSTANCE_CREDENTIALS;

Expand All @@ -58,6 +59,7 @@ public class PrestoS3ConfigurationInitializer
private final String awsAccessKey;
private final String awsSecretKey;
private final String endpoint;
private final PrestoS3StorageClass s3StorageClass;
private final PrestoS3SignerType signerType;
private final boolean pathStyleAccess;
private final boolean useInstanceCredentials;
Expand Down Expand Up @@ -91,6 +93,7 @@ public PrestoS3ConfigurationInitializer(HiveS3Config config)
this.awsAccessKey = config.getS3AwsAccessKey();
this.awsSecretKey = config.getS3AwsSecretKey();
this.endpoint = config.getS3Endpoint();
this.s3StorageClass = config.getS3StorageClass();
this.signerType = config.getS3SignerType();
this.signerClass = config.getS3SignerClass();
this.pathStyleAccess = config.isS3PathStyleAccess();
Expand Down Expand Up @@ -136,6 +139,7 @@ public void initializeConfiguration(Configuration config)
if (endpoint != null) {
config.set(S3_ENDPOINT, endpoint);
}
config.set(S3_STORAGE_CLASS, s3StorageClass.name());
if (signerType != null) {
config.set(S3_SIGNER_TYPE, signerType.name());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
import com.amazonaws.services.s3.model.S3ObjectInputStream;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.amazonaws.services.s3.model.SSEAwsKeyManagementParams;
import com.amazonaws.services.s3.model.StorageClass;
import com.amazonaws.services.s3.transfer.Transfer;
import com.amazonaws.services.s3.transfer.TransferManager;
import com.amazonaws.services.s3.transfer.TransferManagerBuilder;
Expand Down Expand Up @@ -164,6 +165,7 @@ public class PrestoS3FileSystem
public static final String S3_ACL_TYPE = "presto.s3.upload-acl-type";
public static final String S3_SKIP_GLACIER_OBJECTS = "presto.s3.skip-glacier-objects";
public static final String S3_REQUESTER_PAYS_ENABLED = "presto.s3.requester-pays.enabled";
public static final String S3_STORAGE_CLASS = "presto.s3.storage-class";

static final String S3_DIRECTORY_OBJECT_CONTENT_TYPE = "application/x-directory";

Expand Down Expand Up @@ -198,6 +200,7 @@ public class PrestoS3FileSystem
private PrestoS3AclType s3AclType;
private boolean skipGlacierObjects;
private boolean requesterPaysEnabled;
private PrestoS3StorageClass s3StorageClass;

@Override
public void initialize(URI uri, Configuration conf)
Expand Down Expand Up @@ -243,6 +246,7 @@ public void initialize(URI uri, Configuration conf)
String userAgentPrefix = conf.get(S3_USER_AGENT_PREFIX, defaults.getS3UserAgentPrefix());
this.skipGlacierObjects = conf.getBoolean(S3_SKIP_GLACIER_OBJECTS, defaults.isSkipGlacierObjects());
this.requesterPaysEnabled = conf.getBoolean(S3_REQUESTER_PAYS_ENABLED, defaults.isRequesterPaysEnabled());
this.s3StorageClass = conf.getEnum(S3_STORAGE_CLASS, defaults.getS3StorageClass());

ClientConfiguration configuration = new ClientConfiguration()
.withMaxErrorRetry(maxErrorRetries)
Expand Down Expand Up @@ -404,7 +408,7 @@ public FSDataOutputStream create(Path path, FsPermission permission, boolean ove

String key = keyFromPath(qualifiedPath(path));
return new FSDataOutputStream(
new PrestoS3OutputStream(s3, getBucketName(uri), key, tempFile, sseEnabled, sseType, sseKmsKeyId, multiPartUploadMinFileSize, multiPartUploadMinPartSize, s3AclType, requesterPaysEnabled),
new PrestoS3OutputStream(s3, getBucketName(uri), key, tempFile, sseEnabled, sseType, sseKmsKeyId, multiPartUploadMinFileSize, multiPartUploadMinPartSize, s3AclType, requesterPaysEnabled, s3StorageClass),
statistics);
}

Expand Down Expand Up @@ -1171,6 +1175,7 @@ private static class PrestoS3OutputStream
private final String sseKmsKeyId;
private final CannedAccessControlList aclType;
private final boolean requesterPaysEnabled;
private final StorageClass s3StorageClass;

private boolean closed;

Expand All @@ -1185,7 +1190,8 @@ public PrestoS3OutputStream(
long multiPartUploadMinFileSize,
long multiPartUploadMinPartSize,
PrestoS3AclType aclType,
boolean requesterPaysEnabled)
boolean requesterPaysEnabled,
PrestoS3StorageClass s3StorageClass)
throws IOException
{
super(new BufferedOutputStream(new FileOutputStream(requireNonNull(tempFile, "tempFile is null"))));
Expand All @@ -1196,6 +1202,7 @@ public PrestoS3OutputStream(
.withMultipartUploadThreshold(multiPartUploadMinFileSize).build();

requireNonNull(aclType, "aclType is null");
requireNonNull(s3StorageClass, "s3StorageClass is null");
this.aclType = aclType.getCannedACL();
this.host = requireNonNull(host, "host is null");
this.key = requireNonNull(key, "key is null");
Expand All @@ -1204,6 +1211,7 @@ public PrestoS3OutputStream(
this.sseType = requireNonNull(sseType, "sseType is null");
this.sseKmsKeyId = sseKmsKeyId;
this.requesterPaysEnabled = requesterPaysEnabled;
this.s3StorageClass = s3StorageClass.getS3StorageClass();

log.debug("OutputStream for key '%s' using file: %s", key, tempFile);
}
Expand Down Expand Up @@ -1257,6 +1265,7 @@ private void uploadObject()
break;
}
}
request.withStorageClass(s3StorageClass);

request.withCannedAcl(aclType);

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.hive.s3;

import com.amazonaws.services.s3.model.StorageClass;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
import com.amazonaws.services.s3.model.StorageClass;
import com.amazonaws.services.s3.model.StorageClass.Standard;
import com.amazonaws.services.s3.model.StorageClass.IntelligentTiering;

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@Praveen2112 These statement below might look confusing if I import the storage classes Standard and IntelligentTiering directly

   STANDARD(Standard),
   INTELLIGENT_TIERING(IntelligentTiering);

Should I still change this?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

#2686 (comment) -- ie do not import them


import static java.util.Objects.requireNonNull;

public enum PrestoS3StorageClass {
STANDARD(StorageClass.Standard),
INTELLIGENT_TIERING(StorageClass.IntelligentTiering);

private StorageClass s3StorageClass;

PrestoS3StorageClass(StorageClass s3StorageClass)
{
this.s3StorageClass = requireNonNull(s3StorageClass, "s3StorageClass is null");
}

public StorageClass getS3StorageClass()
{
return s3StorageClass;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ public void testDefaults()
.setS3PathStyleAccess(false)
.setS3UseInstanceCredentials(true)
.setS3IamRole(null)
.setS3StorageClass(PrestoS3StorageClass.STANDARD)
.setS3SslEnabled(true)
.setS3SseEnabled(false)
.setS3SseType(PrestoS3SseType.S3)
Expand Down Expand Up @@ -77,6 +78,7 @@ public void testExplicitPropertyMappings()
.put("hive.s3.path-style-access", "true")
.put("hive.s3.use-instance-credentials", "false")
.put("hive.s3.iam-role", "roleArn")
.put("hive.s3.storage-class", "INTELLIGENT_TIERING")
.put("hive.s3.ssl.enabled", "false")
.put("hive.s3.sse.enabled", "true")
.put("hive.s3.sse.type", "KMS")
Expand Down Expand Up @@ -109,6 +111,7 @@ public void testExplicitPropertyMappings()
.setS3PathStyleAccess(true)
.setS3UseInstanceCredentials(false)
.setS3IamRole("roleArn")
.setS3StorageClass(PrestoS3StorageClass.INTELLIGENT_TIERING)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess we can import the enum directly

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@Praveen2112 Importing Enum directly might look off since PrestoS3SignerType and PrestoS3AclType are used without importing Enum directly.

.setS3SslEnabled(false)
.setS3SseEnabled(true)
.setS3SseType(PrestoS3SseType.KMS)
Expand Down