Skip to content

Commit

Permalink
Bump kafka-client.version from 2.8.0 to 3.0.0 (#163)
Browse files Browse the repository at this point in the history
* Bump kafka-client.version from 2.8.0 to 3.0.0

Bumps `kafka-client.version` from 2.8.0 to 3.0.0.

Updates `kafka-clients` from 2.8.0 to 3.0.0

Updates `kafka-clients` from 2.8.0 to 3.0.0

Updates `kafka-streams` from 2.8.0 to 3.0.0

Updates `kafka-streams-test-utils` from 2.8.0 to 3.0.0

Updates `kafka_2.13` from 2.8.0 to 3.0.0

Updates `kafka_2.13` from 2.8.0 to 3.0.0

---
updated-dependencies:
- dependency-name: org.apache.kafka:kafka-clients
  dependency-type: direct:production
  update-type: version-update:semver-major
- dependency-name: org.apache.kafka:kafka-clients:test
  dependency-type: direct:production
  update-type: version-update:semver-major
- dependency-name: org.apache.kafka:kafka-streams
  dependency-type: direct:production
  update-type: version-update:semver-major
- dependency-name: org.apache.kafka:kafka-streams-test-utils
  dependency-type: direct:production
  update-type: version-update:semver-major
- dependency-name: org.apache.kafka:kafka_2.13
  dependency-type: direct:production
  update-type: version-update:semver-major
- dependency-name: org.apache.kafka:kafka_2.13:test
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <[email protected]>

* Update KafkaAdminClientFactory and replace deprecated classes/methods

* Bump spring-kafka-test from 2.7.8 to 2.8.0

Bumps [spring-kafka-test](https://github.com/spring-projects/spring-kafka) from 2.7.8 to 2.8.0.
- [Release notes](https://github.com/spring-projects/spring-kafka/releases)
- [Commits](spring-projects/spring-kafka@v2.7.8...v2.8.0)

---
updated-dependencies:
- dependency-name: org.springframework.kafka:spring-kafka-test
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <[email protected]>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jochen Schalanda <[email protected]>
  • Loading branch information
dependabot[bot] and joschi authored Nov 16, 2021
1 parent 2cfc018 commit 59fc97c
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 9 deletions.
4 changes: 2 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@

<brave.version>5.13.3</brave.version>
<dropwizard.version>2.0.25</dropwizard.version>
<kafka-client.version>2.8.1</kafka-client.version>
<spring-kafka.version>2.7.8</spring-kafka.version>
<kafka-client.version>3.0.0</kafka-client.version>
<spring-kafka.version>2.8.0</spring-kafka.version>
</properties>

<dependencyManagement>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public abstract class KafkaAdminClientFactory {

@NotNull
@JsonProperty
protected ClientDnsLookup clientDnsLookup = ClientDnsLookup.DEFAULT;
protected ClientDnsLookup clientDnsLookup = ClientDnsLookup.USE_ALL_DNS_IPS;

@NotNull
@JsonProperty
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/io/dropwizard/kafka/KafkaClientFactory.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
import io.dropwizard.kafka.security.SecurityFactory;
import io.dropwizard.kafka.tracing.TracingFactory;
import io.dropwizard.validation.ValidationMethod;
import org.hibernate.validator.constraints.NotEmpty;

import java.util.Optional;
import java.util.Set;

import javax.validation.Valid;
import javax.validation.constraints.NotEmpty;

public abstract class KafkaClientFactory {
@NotEmpty
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.record.CompressionType;
import org.hibernate.validator.constraints.NotEmpty;

import javax.annotation.Nullable;
import javax.validation.Valid;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.NotNull;
import java.util.Collection;
import java.util.Collections;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
import com.google.common.collect.ImmutableList;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.common.errors.InterruptException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;

import java.util.Collections;
import java.util.List;

import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
Expand All @@ -30,13 +30,13 @@ public void setUp() {
public void shouldReturnHealthyWhenClusterReportsTopicMetadata() {
topics.forEach(topic -> when(producerMock.partitionsFor(topic)).thenReturn(Collections.emptyList()));

Assert.assertThat(healthCheck.check().isHealthy(), is(true));
assertThat(healthCheck.check().isHealthy(), is(true));
}

@Test
public void shouldReturnUnhealthyWhenClusterFailsToReportTopicMetadata() {
topics.forEach(topic -> when(producerMock.partitionsFor(topic)).thenThrow(new InterruptException("timed out waiting")));

Assert.assertThat(healthCheck.check().isHealthy(), is(false));
assertThat(healthCheck.check().isHealthy(), is(false));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;

import java.io.File;
import java.time.Duration;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
Expand Down Expand Up @@ -99,7 +100,7 @@ public void basicConsumerShouldConnectToKafka() throws Exception {

try (final Consumer consumer = factory.build(lifecycle, healthChecks, null, null)) {
consumer.subscribe(ImmutableList.of(CONSUMER_TOPIC));
final ConsumerRecords<String, String> foundRecords = consumer.poll(10L);
final ConsumerRecords<String, String> foundRecords = consumer.poll(Duration.ofMillis(10L));

assertThat(foundRecords)
.isEmpty();
Expand Down

0 comments on commit 59fc97c

Please sign in to comment.