Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.stream.binder.Binder;
Expand All @@ -28,12 +27,16 @@
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;

/**
* Binder configuration for ReactorKafka.
*
* @author Gary Russell
* @author Chris Bono
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingBean(Binder.class)
@Import({ KafkaAutoConfiguration.class })
@EnableConfigurationProperties({ KafkaExtendedBindingProperties.class })
@EnableConfigurationProperties({ KafkaProperties.class, KafkaExtendedBindingProperties.class })
public class ReactorKafkaBinderConfiguration {

@Bean
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,16 @@
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import reactor.core.publisher.Flux;
import reactor.kafka.receiver.ReceiverRecord;

import org.springframework.boot.SpringApplication;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.integration.support.MessageBuilder;
Expand All @@ -42,76 +44,86 @@
import org.springframework.kafka.support.converter.MessagingMessageConverter;
import org.springframework.kafka.support.converter.RecordMessageConverter;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.condition.EmbeddedKafkaCondition;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.messaging.Message;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit.jupiter.SpringExtension;

import static org.assertj.core.api.Assertions.assertThat;

/**
* Integration tests for {@link ReactorKafkaBinder}.
*
* @author Soby Chacko
* @author Gary Russell
* @author Chris Bono
*/
@ExtendWith(SpringExtension.class)
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD)
@EmbeddedKafka(topics = { "uppercased-words", "lowercased-words" })
public class ReactorKafkaBinderIntegrationTests {

private static final EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaCondition.getBroker();
class ReactorKafkaBinderIntegrationTests {

private static Consumer<String, String> consumer1;
@Autowired
private EmbeddedKafkaBroker embeddedKafka;

private static Consumer<String, String> consumer2;
@ParameterizedTest
@ValueSource(booleans = { false, true })
void endToEndReactorKafkaBinder(boolean excludeKafkaAutoConfig) {

@BeforeAll
public static void setUp() {
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group", "false",
embeddedKafka);
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("group1", "false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
consumer1 = cf.createConsumer();
Consumer<String, String> consumer1 = cf.createConsumer();
embeddedKafka.consumeFromEmbeddedTopics(consumer1, "uppercased-words");
consumer2 = cf.createConsumer("group2", null);
Consumer<String, String> consumer2 = cf.createConsumer("group2", null);
embeddedKafka.consumeFromEmbeddedTopics(consumer2, "lowercased-words");
}

@Test
void testEndtoEndReactorKafkaBinder() throws Exception {
SpringApplication app = new SpringApplication(ReactiveKafkaApplication.class);
app.setWebApplicationType(WebApplicationType.NONE);

try (ConfigurableApplicationContext context = app.run(
try (ConfigurableApplicationContext context = new SpringApplicationBuilder(ReactiveKafkaApplication.class)
.web(WebApplicationType.NONE).run(
"--server.port=0",
"--spring.jmx.enabled=false",
"--spring.cloud.function.definition=uppercase;lowercase",
"--spring.cloud.stream.function.reactive.uppercase=true",
"--spring.cloud.stream.function.reactive.lowercase=true",
"--spring.cloud.stream.bindings.uppercase-in-0.group=grp1",
"--spring.cloud.stream.bindings.uppercase-in-0.destination=words",
"--spring.cloud.stream.bindings.uppercase-in-0.destination=words1",
"--spring.cloud.stream.bindings.uppercase-out-0.destination=uppercased-words",
"--spring.cloud.stream.bindings.lowercase-in-0.group=grp2",
"--spring.cloud.stream.bindings.lowercase-in-0.destination=words1",
"--spring.cloud.stream.bindings.lowercase-in-0.destination=words2",
"--spring.cloud.stream.bindings.lowercase-out-0.destination=lowercased-words",
"--spring.cloud.stream.kafka.bindings.lowercase-in-0.consumer.converterBeanName=fullRR",
"--spring.cloud.stream.kafka.binder.brokers=" + embeddedKafka.getBrokersAsString())) {
"--spring.cloud.stream.kafka.binder.brokers=" + embeddedKafka.getBrokersAsString(),
excludeKafkaAutoConfigParam(excludeKafkaAutoConfig))) {

Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
try {
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.send("words", "foobar");
template.send("words1", "BAZQUX");

ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer1, "uppercased-words");
assertThat(cr.value()).isEqualTo("FOOBAR");
cr = KafkaTestUtils.getSingleRecord(consumer2, "lowercased-words");
assertThat(cr.value()).isEqualTo("bazqux");
template.send("words1", "foobar");
template.send("words2", "BAZQUX");

assertThat(KafkaTestUtils.getSingleRecord(consumer1, "uppercased-words"))
.isNotNull()
.extracting(ConsumerRecord::value)
.isEqualTo("FOOBAR");

assertThat(KafkaTestUtils.getSingleRecord(consumer2, "lowercased-words"))
.isNotNull()
.extracting(ConsumerRecord::value)
.isEqualTo("bazqux");
}
finally {
pf.destroy();
}
}
}

private String excludeKafkaAutoConfigParam(boolean excludeKafkaAutoConfig) {
return excludeKafkaAutoConfig ?
"--spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration" : "foo=bar";
}

@EnableAutoConfiguration
public static class ReactiveKafkaApplication {

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2018-2021 the original author or authors.
* Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -22,8 +22,8 @@
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.provisioning.AdminClientConfigCustomizer;
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
Expand All @@ -37,13 +37,14 @@
* Configuration for GlobalKTable binder.
*
* @author Soby Chacko
* @author Chris Bono
* @since 2.1.0
*/
@Configuration
@Import({ KafkaAutoConfiguration.class,
MultiBinderPropertiesConfiguration.class,
@Import({ MultiBinderPropertiesConfiguration.class,
KafkaStreamsBinderHealthIndicatorConfiguration.class,
KafkaStreamsJaasConfiguration.class})
@EnableConfigurationProperties(KafkaProperties.class)
public class GlobalKTableBinderConfiguration {

@Bean
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2017-2021 the original author or authors.
* Copyright 2017-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -19,8 +19,8 @@
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.provisioning.AdminClientConfigCustomizer;
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
Expand All @@ -36,12 +36,13 @@
* @author Marius Bogoevici
* @author Gary Russell
* @author Soby Chacko
* @author Chris Bono
*/
@Configuration
@Import({ KafkaAutoConfiguration.class,
MultiBinderPropertiesConfiguration.class,
@Import({ MultiBinderPropertiesConfiguration.class,
KafkaStreamsBinderHealthIndicatorConfiguration.class,
KafkaStreamsJaasConfiguration.class})
@EnableConfigurationProperties(KafkaProperties.class)
public class KStreamBinderConfiguration {

@Bean
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2018-2021 the original author or authors.
* Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -22,8 +22,8 @@
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.stream.binder.kafka.provisioning.AdminClientConfigCustomizer;
import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner;
import org.springframework.cloud.stream.binder.kafka.streams.properties.KafkaStreamsBinderConfigurationProperties;
Expand All @@ -37,13 +37,14 @@
* Configuration for KTable binder.
*
* @author Soby Chacko
* @author Chris Bono
*/
@SuppressWarnings("ALL")
@Configuration
@Import({ KafkaAutoConfiguration.class,
MultiBinderPropertiesConfiguration.class,
@Import({ MultiBinderPropertiesConfiguration.class,
KafkaStreamsBinderHealthIndicatorConfiguration.class,
KafkaStreamsJaasConfiguration.class})
@EnableConfigurationProperties(KafkaProperties.class)
public class KTableBinderConfiguration {

@Bean
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@
* @author Gary Russell
*/
@Configuration
@EnableConfigurationProperties(KafkaStreamsExtendedBindingProperties.class)
@EnableConfigurationProperties({ KafkaProperties.class, KafkaStreamsExtendedBindingProperties.class })
@ConditionalOnBean(BindingService.class)
@AutoConfigureAfter(BindingServiceConfiguration.class)
public class KafkaStreamsBinderSupportAutoConfiguration {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@
import org.apache.kafka.streams.kstream.KTable;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;

import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.SpringBootApplication;
Expand All @@ -44,11 +46,14 @@
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;

/**
* Integration tests to verify the bootstrap of a SpringBoot application using the KafkaStreams binder.
*
* @author Soby Chacko
* @author Eduard Domínguez
* @author Chris Bono
*/
@EmbeddedKafka
public class KafkaStreamsBinderBootstrapTest {
class KafkaStreamsBinderBootstrapTest {

private static final EmbeddedKafkaBroker embeddedKafka = EmbeddedKafkaCondition.getBroker();

Expand All @@ -57,54 +62,61 @@ public void before() {
System.clearProperty(JaasUtils.JAVA_LOGIN_CONFIG_PARAM);
}

@Test
void testKStreamBinderWithCustomEnvironmentCanStart() {
ConfigurableApplicationContext applicationContext = new SpringApplicationBuilder(
SimpleKafkaStreamsApplication.class).web(WebApplicationType.NONE).run(
"--spring.cloud.function.definition=input1;input2;input3",
"--spring.cloud.stream.kafka.streams.bindings.input1-in-0.consumer.application-id"
+ "=testKStreamBinderWithCustomEnvironmentCanStart",
@ParameterizedTest
@ValueSource(booleans = { false, true })
void kafkaStreamsBinderWithStandardConfigCanStart(boolean excludeKafkaAutoConfig) {
try (ConfigurableApplicationContext context = new SpringApplicationBuilder(SimpleKafkaStreamsApplication.class)
.web(WebApplicationType.NONE).run(
"--spring.cloud.function.definition=input1;input2;input3",
"--spring.cloud.stream.kafka.streams.bindings.input1-in-0.consumer.application-id"
+ "=testKafkaStreamsBinderWithStandardConfigurationCanStart",
"--spring.cloud.stream.kafka.streams.bindings.input2-in-0.consumer.application-id"
+ "=testKafkaStreamsBinderWithStandardConfigurationCanStart-foo",
"--spring.cloud.stream.kafka.streams.bindings.input3-in-0.consumer.application-id"
+ "=testKafkaStreamsBinderWithStandardConfigurationCanStart-foobar",
"--spring.cloud.stream.kafka.streams.binder.brokers="
+ embeddedKafka.getBrokersAsString(),
excludeKafkaAutoConfigParam(excludeKafkaAutoConfig))) { // @checkstyle:off
} // @checkstyle:on
}

@ParameterizedTest
@ValueSource(booleans = { false, true })
void kafkaStreamsBinderWithCustomConfigCanStart(boolean excludeKafkaAutoConfig) {
try (ConfigurableApplicationContext context = new SpringApplicationBuilder(SimpleKafkaStreamsApplication.class)
.web(WebApplicationType.NONE).run(
"--spring.cloud.function.definition=input1;input2;input3",
"--spring.cloud.stream.kafka.streams.bindings.input1-in-0.consumer.application-id"
+ "=testKStreamBinderWithCustomEnvironmentCanStart",
"--spring.cloud.stream.kafka.streams.bindings.input2-in-0.consumer.application-id"
+ "=testKStreamBinderWithCustomEnvironmentCanStart-foo",
+ "=testKStreamBinderWithCustomEnvironmentCanStart-foo",
"--spring.cloud.stream.kafka.streams.bindings.input3-in-0.consumer.application-id"
+ "=testKStreamBinderWithCustomEnvironmentCanStart-foobar",
"--spring.cloud.stream.bindings.input1-in-0.destination=foo",
"--spring.cloud.stream.bindings.input1-in-0.binder=kstreamBinder",
"--spring.cloud.stream.binders.kstreamBinder.type=kstream",
"--spring.cloud.stream.binders.kstreamBinder.environment"
+ ".spring.cloud.stream.kafka.streams.binder.brokers"
+ "=" + embeddedKafka.getBrokersAsString(),
+ "=testKStreamBinderWithCustomEnvironmentCanStart-foobar",
"--spring.cloud.stream.bindings.input1-in-0.destination=foo",
"--spring.cloud.stream.bindings.input1-in-0.binder=kstreamBinder",
"--spring.cloud.stream.binders.kstreamBinder.type=kstream",
"--spring.cloud.stream.binders.kstreamBinder.environment"
+ ".spring.cloud.stream.kafka.streams.binder.brokers"
+ "=" + embeddedKafka.getBrokersAsString(),
"--spring.cloud.stream.bindings.input2-in-0.destination=bar",
"--spring.cloud.stream.bindings.input2-in-0.binder=ktableBinder",
"--spring.cloud.stream.binders.ktableBinder.type=ktable",
"--spring.cloud.stream.binders.ktableBinder.environment"
+ ".spring.cloud.stream.kafka.streams.binder.brokers"
+ "=" + embeddedKafka.getBrokersAsString(),
+ ".spring.cloud.stream.kafka.streams.binder.brokers"
+ "=" + embeddedKafka.getBrokersAsString(),
"--spring.cloud.stream.bindings.input3-in-0.destination=foobar",
"--spring.cloud.stream.bindings.input3-in-0.binder=globalktableBinder",
"--spring.cloud.stream.binders.globalktableBinder.type=globalktable",
"--spring.cloud.stream.binders.globalktableBinder.environment"
+ ".spring.cloud.stream.kafka.streams.binder.brokers"
+ "=" + embeddedKafka.getBrokersAsString());

applicationContext.close();
+ ".spring.cloud.stream.kafka.streams.binder.brokers"
+ "=" + embeddedKafka.getBrokersAsString(),
excludeKafkaAutoConfigParam(excludeKafkaAutoConfig))) { // @checkstyle:off
} // @checkstyle:on
}

@Test
void testKafkaStreamsBinderWithStandardConfigurationCanStart() {
ConfigurableApplicationContext applicationContext = new SpringApplicationBuilder(
SimpleKafkaStreamsApplication.class).web(WebApplicationType.NONE).run(
"--spring.cloud.function.definition=input1;input2;input3",
"--spring.cloud.stream.kafka.streams.bindings.input1-in-0.consumer.application-id"
+ "=testKafkaStreamsBinderWithStandardConfigurationCanStart",
"--spring.cloud.stream.kafka.streams.bindings.input2-in-0.consumer.application-id"
+ "=testKafkaStreamsBinderWithStandardConfigurationCanStart-foo",
"--spring.cloud.stream.kafka.streams.bindings.input3-in-0.consumer.application-id"
+ "=testKafkaStreamsBinderWithStandardConfigurationCanStart-foobar",
"--spring.cloud.stream.kafka.streams.binder.brokers="
+ embeddedKafka.getBrokersAsString());

applicationContext.close();
private String excludeKafkaAutoConfigParam(boolean excludeKafkaAutoConfig) {
return excludeKafkaAutoConfig ?
"--spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration" : "foo=bar";
}

@Test
Expand Down
Loading