Skip to content
Closed

Polish #14271

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public class KafkaProperties {

/**
* Comma-delimited list of host:port pairs to use for establishing the initial
* connection to the Kafka cluster. Applies to all components unless overridden.
* connections to the Kafka cluster. Applies to all components unless overridden.
*/
private List<String> bootstrapServers = new ArrayList<>(
Collections.singletonList("localhost:9092"));
Expand Down Expand Up @@ -230,7 +230,7 @@ public static class Consumer {

/**
* Comma-delimited list of host:port pairs to use for establishing the initial
* connection to the Kafka cluster. Overrides the global property, for consumers.
* connections to the Kafka cluster. Overrides the global property, for consumers.
*/
private List<String> bootstrapServers;

Expand Down Expand Up @@ -440,7 +440,7 @@ public static class Producer {

/**
* Comma-delimited list of host:port pairs to use for establishing the initial
* connection to the Kafka cluster. Overrides the global property, for producers.
* connections to the Kafka cluster. Overrides the global property, for producers.
*/
private List<String> bootstrapServers;

Expand Down Expand Up @@ -669,7 +669,7 @@ public static class Streams {

/**
* Comma-delimited list of host:port pairs to use for establishing the initial
* connection to the Kafka cluster. Overrides the global property, for streams.
* connections to the Kafka cluster. Overrides the global property, for streams.
*/
private List<String> bootstrapServers;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public class ClientsConfiguredCondition extends SpringBootCondition {
private static final Bindable<Map<String, OAuth2ClientProperties.LoginClientRegistration>> STRING_LOGIN_REGISTRATION_MAP = Bindable
.mapOf(String.class, OAuth2ClientProperties.LoginClientRegistration.class);

private static final Bindable<Map<String, OAuth2ClientProperties.AuthorizationCodeClientRegistration>> STRING_AUTHORIZATIONCODE_REGISTRATION_MAP = Bindable
private static final Bindable<Map<String, OAuth2ClientProperties.AuthorizationCodeClientRegistration>> STRING_AUTHORIZATION_CODE_REGISTRATION_MAP = Bindable
.mapOf(String.class,
OAuth2ClientProperties.AuthorizationCodeClientRegistration.class);

Expand Down Expand Up @@ -71,7 +71,7 @@ private Map<String, OAuth2ClientProperties.BaseClientRegistration> getRegistrati
Map<String, OAuth2ClientProperties.AuthorizationCodeClientRegistration> authCodeClientRegistrations = Binder
.get(environment)
.bind("spring.security.oauth2.client.registration.authorizationcode",
STRING_AUTHORIZATIONCODE_REGISTRATION_MAP)
STRING_AUTHORIZATION_CODE_REGISTRATION_MAP)
.orElse(Collections.emptyMap());
registrations.putAll(loginClientRegistrations);
registrations.putAll(authCodeClientRegistrations);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ public void streamsProperties() {
Properties configs = context.getBean(
KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME,
KafkaStreamsConfiguration.class).asProperties();
assertThat(configs.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))
assertThat(configs.get(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG))
.isEqualTo("localhost:9092, localhost:9093");
assertThat(
configs.get(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG))
Expand Down Expand Up @@ -357,7 +357,7 @@ public void streamsApplicationIdUsesMainApplicationNameByDefault() {
Properties configs = context.getBean(
KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME,
KafkaStreamsConfiguration.class).asProperties();
assertThat(configs.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))
assertThat(configs.get(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG))
.isEqualTo("localhost:9092, localhost:9093");
assertThat(configs.get(StreamsConfig.APPLICATION_ID_CONFIG))
.isEqualTo("my-test-app");
Expand All @@ -376,7 +376,7 @@ public void streamsWithCustomKafkaConfiguration() {
Properties configs = context.getBean(
KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME,
KafkaStreamsConfiguration.class).asProperties();
assertThat(configs.get(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG))
assertThat(configs.get(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG))
.isEqualTo("localhost:9094, localhost:9095");
assertThat(configs.get(StreamsConfig.APPLICATION_ID_CONFIG))
.isEqualTo("test-id");
Expand Down Expand Up @@ -628,7 +628,7 @@ protected static class TestKafkaStreamsConfiguration {
@Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
public KafkaStreamsConfiguration kafkaStreamsConfiguration() {
Map<String, Object> streamsProperties = new HashMap<>();
streamsProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
streamsProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,
"localhost:9094, localhost:9095");
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "test-id");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ public void getClientRegistrationsWhenProviderNotSpecifiedShouldUseRegistrationI
}

@Test
public void getClientRegistrationsWhenAuhtorizationCodeClientShouldAdapt() {
public void getClientRegistrationsWhenAuthorizationCodeClientShouldAdapt() {
OAuth2ClientProperties properties = new OAuth2ClientProperties();
OAuth2ClientProperties.AuthorizationCodeClientRegistration registration = new OAuth2ClientProperties.AuthorizationCodeClientRegistration();
registration.setClientId("clientId");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
Expand Down Expand Up @@ -96,25 +95,24 @@ public void testErrorForMachineClient() {

@Test
public void testErrorForMachineClientTraceParamTrue() {
errorForMachineClientOnTraceParam(() -> createUrl("?trace=true"), true);
errorForMachineClientOnTraceParam("?trace=true", true);
}

@Test
public void testErrorForMachineClientTraceParamFalse() {
errorForMachineClientOnTraceParam(() -> createUrl("?trace=false"), false);
errorForMachineClientOnTraceParam("?trace=false", false);
}

@Test
public void testErrorForMachineClientTraceParamAbsent() {
errorForMachineClientOnTraceParam(() -> createUrl(""), false);
errorForMachineClientOnTraceParam("", false);
}

@SuppressWarnings("rawtypes")
private void errorForMachineClientOnTraceParam(Supplier<String> url,
boolean expectedTrace) {
private void errorForMachineClientOnTraceParam(String path, boolean expectedTrace) {
load("--server.error.include-exception=true",
"--server.error.include-stacktrace=on-trace-param");
ResponseEntity<Map> entity = new TestRestTemplate().getForEntity(url.get(),
ResponseEntity<Map> entity = new TestRestTemplate().getForEntity(createUrl(path),
Map.class);
assertErrorAttributes(entity.getBody(), "500", "Internal Server Error",
IllegalStateException.class, "Expected!", "/");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -549,7 +549,7 @@ content into your application. Rather, pick only the properties that you need.
spring.flyway.baseline-description= #
spring.flyway.baseline-on-migrate= #
spring.flyway.baseline-version=1 # Version to start migration
spring.flyway.batch = #
spring.flyway.batch= #
spring.flyway.check-location=true # Whether to check that migration scripts location exists.
spring.flyway.clean-disabled= #
spring.flyway.clean-on-validation-error= #
Expand Down Expand Up @@ -1040,11 +1040,11 @@ content into your application. Rather, pick only the properties that you need.
spring.kafka.admin.ssl.trust-store-location= # Location of the trust store file.
spring.kafka.admin.ssl.trust-store-password= # Store password for the trust store file.
spring.kafka.admin.ssl.trust-store-type= # Type of the trust store.
spring.kafka.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster. Applies to all components unless overridden.
spring.kafka.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connections to the Kafka cluster. Applies to all components unless overridden.
spring.kafka.client-id= # ID to pass to the server when making requests. Used for server-side logging.
spring.kafka.consumer.auto-commit-interval= # Frequency with which the consumer offsets are auto-committed to Kafka if 'enable.auto.commit' is set to true.
spring.kafka.consumer.auto-offset-reset= # What to do when there is no initial offset in Kafka or if the current offset no longer exists on the server.
spring.kafka.consumer.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster. Overrides the global property, for consumers.
spring.kafka.consumer.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connections to the Kafka cluster. Overrides the global property, for consumers.
spring.kafka.consumer.client-id= # ID to pass to the server when making requests. Used for server-side logging.
spring.kafka.consumer.enable-auto-commit= # Whether the consumer's offset is periodically committed in the background.
spring.kafka.consumer.fetch-max-wait= # Maximum amount of time the server blocks before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by "fetch.min.bytes".
Expand Down Expand Up @@ -1080,7 +1080,7 @@ content into your application. Rather, pick only the properties that you need.
spring.kafka.listener.type=single # Listener type.
spring.kafka.producer.acks= # Number of acknowledgments the producer requires the leader to have received before considering a request complete.
spring.kafka.producer.batch-size= # Default batch size in bytes.
spring.kafka.producer.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster. Overrides the global property, for producers.
spring.kafka.producer.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connections to the Kafka cluster. Overrides the global property, for producers.
spring.kafka.producer.buffer-memory= # Total bytes of memory the producer can use to buffer records waiting to be sent to the server.
spring.kafka.producer.client-id= # ID to pass to the server when making requests. Used for server-side logging.
spring.kafka.producer.compression-type= # Compression type for all data generated by the producer.
Expand All @@ -1106,9 +1106,9 @@ content into your application. Rather, pick only the properties that you need.
spring.kafka.ssl.trust-store-location= # Location of the trust store file.
spring.kafka.ssl.trust-store-password= # Store password for the trust store file.
spring.kafka.ssl.trust-store-type= # Type of the trust store.
spring.kafka.streams.application-id = # Kafka streams application.id property; default spring.application.name.
spring.kafka.streams.application-id= # Kafka streams application.id property; default spring.application.name.
spring.kafka.streams.auto-startup=true # Whether or not to auto-start the streams factory bean.
spring.kafka.streams.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connection to the Kafka cluster. Overrides the global property, for streams.
spring.kafka.streams.bootstrap-servers= # Comma-delimited list of host:port pairs to use for establishing the initial connections to the Kafka cluster. Overrides the global property, for streams.
spring.kafka.streams.cache-max-bytes-buffering= # Maximum number of memory bytes to be used for buffering across all threads.
spring.kafka.streams.client-id= # ID to pass to the server when making requests. Used for server-side logging.
spring.kafka.streams.properties.*= # Additional Kafka properties used to configure the streams.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1326,15 +1326,15 @@ source for more details.

[[howto-switch-off-the-spring-mvc-dispatcherservlet]]
=== Switch Off the Spring MVC DispatcherServlet
By default, All content is served from the root of your application (`/`) down. If you
By default, all content is served from the root of your application (`/`). If you
would rather map to a different path, you can configure one as follows:

[source,properties,indent=0,subs="verbatim"]
----
spring.mvc.servlet.path=/acme
----

If you have additional servlets you can declare a `@Bean` of type `Servlet` or
If you have additional servlets you can declare a `@Bean` of type `Servlet` or
`ServletRegistrationBean` for each and Spring Boot will register them transparently to the
container. Because servlets are registered that way, they can be mapped to a sub-context
of the `DispatcherServlet` without invoking it.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5701,12 +5701,12 @@ reference the auto-configured `KafkaTransactionManager` bean.
==== Kafka Streams
Spring for Apache Kafka provides a factory bean to create a `StreamsBuilder` object and
manage the lifecycle of its streams. Spring Boot auto-configures the required
`KafkaStreamsConfiguration` bean as long as `kafka-streams` in on the classpath and kafka
streams is enabled via the `@EnableKafkaStreams` annotation.
`KafkaStreamsConfiguration` bean as long as `kafka-streams` is on the classpath and Kafka
Streams is enabled via the `@EnableKafkaStreams` annotation.

Enabling Kafka Streams means that the application id and bootstrap servers must be set.
The former can be configured using `spring.kafka.streams.application-id`, defaulting to
`spring.application.name` if not set. The later can be set globally or
`spring.application.name` if not set. The latter can be set globally or
specifically overridden just for streams.

Several additional properties are available using dedicated properties; other arbitrary
Expand Down