```scala
- import collection.JavaConverters._
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.kinesis.KinesisInputDStream
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kinesis.KinesisInitialPositions
- import com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration
- import com.amazonaws.services.kinesis.metrics.interfaces.MetricsLevel
+ import software.amazon.kinesis.metrics.{MetricsLevel, MetricsUtil}
val kinesisStream = KinesisInputDStream.builder
.streamingContext(streamingContext)
@@ -138,21 +136,23 @@ A Kinesis stream can be set up at one of the valid Kinesis endpoints with 1 or m
.checkpointInterval([checkpoint interval])
.storageLevel(StorageLevel.MEMORY_AND_DISK_2)
.metricsLevel(MetricsLevel.DETAILED)
- .metricsEnabledDimensions(KinesisClientLibConfiguration.DEFAULT_METRICS_ENABLED_DIMENSIONS.asScala.toSet)
+ .metricsEnabledDimensions(
+ Set(MetricsUtil.OPERATION_DIMENSION_NAME, MetricsUtil.SHARD_ID_DIMENSION_NAME))
.buildWithMessageHandler([message handler])
```
```java
+ import java.util.Set;
+ import scala.jdk.javaapi.CollectionConverters;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.kinesis.KinesisInputDStream;
import org.apache.spark.streaming.Seconds;
import org.apache.spark.streaming.StreamingContext;
import org.apache.spark.streaming.kinesis.KinesisInitialPositions;
- import com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration;
- import com.amazonaws.services.kinesis.metrics.interfaces.MetricsLevel;
- import scala.collection.JavaConverters;
+ import software.amazon.kinesis.metrics.MetricsLevel;
+ import software.amazon.kinesis.metrics.MetricsUtil;
KinesisInputDStream
kinesisStream = KinesisInputDStream.builder()
.streamingContext(streamingContext)
@@ -165,11 +165,10 @@ A Kinesis stream can be set up at one of the valid Kinesis endpoints with 1 or m
.storageLevel(StorageLevel.MEMORY_AND_DISK_2)
.metricsLevel(MetricsLevel.DETAILED)
.metricsEnabledDimensions(
- JavaConverters.asScalaSetConverter(
- KinesisClientLibConfiguration.DEFAULT_METRICS_ENABLED_DIMENSIONS
- )
- .asScala().toSet()
- )
+ CollectionConverters.asScala(
+ Set.of(
+ MetricsUtil.OPERATION_DIMENSION_NAME,
+ MetricsUtil.SHARD_ID_DIMENSION_NAME)).toSet())
.buildWithMessageHandler([message handler]);
```
@@ -194,7 +193,7 @@ A Kinesis stream can be set up at one of the valid Kinesis endpoints with 1 or m
- `[initial position]`: Can be either `KinesisInitialPositions.TrimHorizon` or `KinesisInitialPositions.Latest` or `KinesisInitialPositions.AtTimestamp` (see [`Kinesis Checkpointing`](#kinesis-checkpointing) section and [`Amazon Kinesis API documentation`](http://docs.aws.amazon.com/streams/latest/dev/developing-consumers-with-sdk.html) for more details).
- - `[message handler]`: A function that takes a Kinesis `Record` and outputs generic `T`.
+ - `[message handler]`: A function that takes a Kinesis `KinesisClientRecord` and outputs generic `T`.
In other versions of the API, you can also specify the AWS access key and secret key directly.
diff --git a/pom.xml b/pom.xml
index ae38b87c3f957..143808b488419 100644
--- a/pom.xml
+++ b/pom.xml
@@ -159,12 +159,11 @@
4.2.37
1.12.1
- 1.15.3
+ 2.7.2
- 1.12.681
2.35.4
- 1.0.5
+ 1.0.6
hadoop3-2.2.29
1.3.0