Skip to content

Commit 1a01a47

Browse files
authored
GEOMESA-3528 Switch to Kafka native image for testcontainers tests (#3435)
* Bump Kafka to 3.9.1
1 parent 282d9f9 commit 1a01a47

File tree

6 files changed

+82
-101
lines changed

6 files changed

+82
-101
lines changed

build/test/resources/log4j.xml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,10 @@
8585
<category name="org.apache.kafka">
8686
<priority value="error"/>
8787
</category>
88+
<!-- testcontainers output -->
89+
<category name="kafka">
90+
<priority value="error"/>
91+
</category>
8892
<category name="io.confluent">
8993
<priority value="error"/>
9094
</category>

docs/user/upgrade.rst

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,16 @@ Compatibility Matrix
9090
| Dependencies | N | N | Y |
9191
+--------------+-------+-------+-------+
9292

93+
Version 5.5.0 Upgrade Guide
94+
+++++++++++++++++++++++++++
95+
96+
Dependency Version Upgrades
97+
---------------------------
98+
99+
The following dependencies have been upgraded:
100+
101+
* kafka ``3.9.0`` -> ``3.9.1``
102+
93103
Version 5.4.0 Upgrade Guide
94104
+++++++++++++++++++++++++++
95105

geomesa-kafka/geomesa-kafka-confluent/src/test/scala/org/locationtech/geomesa/kafka/confluent/ConfluentContainerTest.scala

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -10,31 +10,29 @@ package org.locationtech.geomesa.kafka.confluent
1010

1111
import org.locationtech.geomesa.kafka.KafkaContainerTest
1212
import org.locationtech.geomesa.kafka.confluent.ConfluentContainerTest.SchemaRegistryContainer
13+
import org.locationtech.geomesa.utils.io.CloseWithLogging
1314
import org.slf4j.LoggerFactory
1415
import org.testcontainers.containers.GenericContainer
1516
import org.testcontainers.containers.output.Slf4jLogConsumer
1617
import org.testcontainers.utility.DockerImageName
1718

1819
class ConfluentContainerTest extends KafkaContainerTest {
1920

20-
private var container: SchemaRegistryContainer = _
21+
private val container =
22+
new SchemaRegistryContainer(dockerNetworkBrokers)
23+
.withNetwork(network)
24+
.withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("schema-registry")))
2125

2226
lazy val schemaRegistryUrl: String = s"http://${container.getHost}:${container.getFirstMappedPort}"
2327

2428
override def beforeAll(): Unit = {
2529
super.beforeAll()
26-
container =
27-
new SchemaRegistryContainer("kafka:9092")
28-
.withNetwork(network)
29-
.withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("schema-registry")))
3030
container.start()
3131
}
3232

3333
override def afterAll(): Unit = {
3434
try {
35-
if (container != null) {
36-
container.stop()
37-
}
35+
CloseWithLogging(container)
3836
} finally {
3937
super.afterAll()
4038
}

geomesa-kafka/geomesa-kafka-datastore/src/test/java/org/locationtech/geomesa/kafka/jstreams/GeoMesaStreamsBuilderTest.java

Lines changed: 24 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -13,107 +13,55 @@
1313
import org.apache.kafka.clients.consumer.KafkaConsumer;
1414
import org.apache.kafka.clients.producer.Producer;
1515
import org.apache.kafka.clients.producer.ProducerRecord;
16-
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
17-
import org.apache.kafka.common.serialization.ByteArraySerializer;
18-
import org.apache.kafka.common.serialization.LongDeserializer;
19-
import org.apache.kafka.common.serialization.Serdes;
16+
import org.apache.kafka.common.serialization.*;
2017
import org.apache.kafka.common.serialization.Serdes.StringSerde;
21-
import org.apache.kafka.common.serialization.StringDeserializer;
22-
import org.apache.kafka.streams.KeyValue;
23-
import org.apache.kafka.streams.StreamsConfig;
24-
import org.apache.kafka.streams.TestInputTopic;
25-
import org.apache.kafka.streams.TestOutputTopic;
26-
import org.apache.kafka.streams.TopologyTestDriver;
27-
import org.apache.kafka.streams.kstream.Consumed;
28-
import org.apache.kafka.streams.kstream.KStream;
29-
import org.apache.kafka.streams.kstream.KTable;
30-
import org.apache.kafka.streams.kstream.Materialized;
31-
import org.apache.kafka.streams.kstream.Produced;
32-
import org.apache.kafka.streams.kstream.Transformer;
18+
import org.apache.kafka.streams.*;
19+
import org.apache.kafka.streams.kstream.*;
3320
import org.apache.kafka.streams.processor.ProcessorContext;
3421
import org.apache.kafka.streams.processor.WallclockTimestampExtractor;
3522
import org.apache.kafka.streams.test.TestRecord;
36-
import org.geotools.api.data.DataStoreFinder;
37-
import org.geotools.api.data.Query;
38-
import org.geotools.api.data.SimpleFeatureReader;
39-
import org.geotools.api.data.SimpleFeatureWriter;
40-
import org.geotools.api.data.Transaction;
23+
import org.geotools.api.data.*;
4124
import org.geotools.api.feature.simple.SimpleFeature;
4225
import org.geotools.api.feature.simple.SimpleFeatureType;
43-
import org.junit.AfterClass;
4426
import org.junit.Assert;
45-
import org.junit.BeforeClass;
27+
import org.junit.ClassRule;
4628
import org.junit.Test;
4729
import org.locationtech.geomesa.features.ScalaSimpleFeature;
30+
import org.locationtech.geomesa.kafka.KafkaContainerTest;
4831
import org.locationtech.geomesa.kafka.data.KafkaDataStore;
4932
import org.locationtech.geomesa.kafka.streams.GeoMesaMessage;
5033
import org.locationtech.geomesa.utils.geotools.FeatureUtils;
5134
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes;
5235
import org.locationtech.geomesa.utils.geotools.converters.FastConverter;
53-
import org.slf4j.Logger;
5436
import org.slf4j.LoggerFactory;
55-
import org.testcontainers.containers.KafkaContainer;
5637
import org.testcontainers.containers.output.Slf4jLogConsumer;
57-
import org.testcontainers.utility.DockerImageName;
38+
import org.testcontainers.kafka.KafkaContainer;
5839

5940
import java.nio.charset.StandardCharsets;
6041
import java.time.Duration;
61-
import java.util.ArrayList;
62-
import java.util.Arrays;
63-
import java.util.Collections;
64-
import java.util.Comparator;
65-
import java.util.Date;
66-
import java.util.HashMap;
67-
import java.util.List;
68-
import java.util.Map;
69-
import java.util.Properties;
70-
import java.util.Set;
42+
import java.util.*;
7143
import java.util.concurrent.ConcurrentHashMap;
7244
import java.util.stream.Collectors;
7345

7446
public class GeoMesaStreamsBuilderTest {
7547

76-
private static final Logger logger = LoggerFactory.getLogger(GeoMesaStreamsBuilderTest.class);
77-
78-
static KafkaContainer container = null;
48+
@ClassRule
49+
public static final KafkaContainer container =
50+
new KafkaContainer(KafkaContainerTest.KafkaImage())
51+
.withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("kafka")));
7952

8053
static final SimpleFeatureType sft =
8154
SimpleFeatureTypes.createImmutableType("streams", "name:String,age:Int,dtg:Date,*geom:Point:srid=4326");
8255

8356
static final List<SimpleFeature> features = new ArrayList<>();
8457

85-
static final Set<String> zkPaths = Collections.newSetFromMap(new ConcurrentHashMap<>());
58+
static final Set<String> catalogs = Collections.newSetFromMap(new ConcurrentHashMap<>());
8659

87-
static String zookeepers() {
88-
return String.format("%s:%s", container.getHost(), container.getMappedPort(KafkaContainer.ZOOKEEPER_PORT));
89-
}
9060
static String brokers() {
9161
return container.getBootstrapServers();
9262
}
9363

94-
public Map<String, String> getParams(String zkPath) {
95-
if (!zkPaths.add(zkPath)) {
96-
throw new IllegalArgumentException("zk path '" + zkPath + "' is reused between tests, may cause conflicts");
97-
}
98-
Map<String, String> params = new HashMap<>();
99-
params.put("kafka.brokers", brokers());
100-
params.put("kafka.zookeepers", zookeepers());
101-
params.put("kafka.topic.partitions", "1");
102-
params.put("kafka.topic.replication", "1");
103-
params.put("kafka.consumer.read-back", "Inf");
104-
params.put("kafka.zk.path", zkPath);
105-
return params;
106-
}
107-
108-
@BeforeClass
109-
public static void init() {
110-
DockerImageName image =
111-
DockerImageName.parse("confluentinc/cp-kafka")
112-
.withTag(System.getProperty("confluent.docker.tag", "7.3.1"));
113-
container = new KafkaContainer(image);
114-
container.start();
115-
container.followOutput(new Slf4jLogConsumer(logger));
116-
64+
static {
11765
for (int i = 0; i < 10; i ++) {
11866
ScalaSimpleFeature sf = new ScalaSimpleFeature(sft, "id" + i, null, null);
11967
sf.setAttribute(0, "name" + i);
@@ -124,11 +72,17 @@ public static void init() {
12472
}
12573
}
12674

127-
@AfterClass
128-
public static void destroy() {
129-
if (container != null) {
130-
container.stop();
75+
public Map<String, String> getParams(String catalog) {
76+
if (!catalogs.add(catalog)) {
77+
throw new IllegalArgumentException("zk path '" + catalog + "' is reused between tests, may cause conflicts");
13178
}
79+
Map<String, String> params = new HashMap<>();
80+
params.put("kafka.brokers", brokers());
81+
params.put("kafka.catalog.topic", catalog);
82+
params.put("kafka.topic.partitions", "1");
83+
params.put("kafka.topic.replication", "1");
84+
params.put("kafka.consumer.read-back", "Inf");
85+
return params;
13286
}
13387

13488
@Test

geomesa-kafka/geomesa-kafka-datastore/src/test/scala/org/locationtech/geomesa/kafka/KafkaContainerTest.scala

Lines changed: 34 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -9,40 +9,52 @@
99
package org.locationtech.geomesa.kafka
1010

1111
import com.typesafe.scalalogging.LazyLogging
12+
import org.locationtech.geomesa.kafka.KafkaContainerTest.ZookeeperContainer
13+
import org.locationtech.geomesa.utils.io.CloseWithLogging
1214
import org.slf4j.LoggerFactory
1315
import org.specs2.mutable.Specification
1416
import org.specs2.specification.BeforeAfterAll
15-
import org.testcontainers.containers.{KafkaContainer, Network}
1617
import org.testcontainers.containers.output.Slf4jLogConsumer
18+
import org.testcontainers.containers.{GenericContainer, Network}
19+
import org.testcontainers.kafka.KafkaContainer
20+
import org.testcontainers.lifecycle.Startables
1721
import org.testcontainers.utility.DockerImageName
1822

1923
class KafkaContainerTest extends Specification with BeforeAfterAll with LazyLogging {
2024

21-
private var container: KafkaContainer = _
22-
2325
protected val network = Network.newNetwork()
2426

25-
lazy val zookeepers = s"${container.getHost}:${container.getMappedPort(KafkaContainer.ZOOKEEPER_PORT)}"
26-
lazy val brokers = container.getBootstrapServers
27-
28-
override def beforeAll(): Unit = {
29-
container =
30-
new KafkaContainer(KafkaContainerTest.KafkaImage)
31-
.withNetwork(network)
32-
.withNetworkAliases("kafka")
33-
.withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("kafka")))
34-
container.start()
35-
}
36-
37-
override def afterAll(): Unit = {
38-
if (container != null) {
39-
container.stop()
40-
}
41-
}
27+
// listener for other containers in the docker network
28+
val dockerNetworkBrokers = "kafka:19092"
29+
30+
private val kafka =
31+
new KafkaContainer(KafkaContainerTest.KafkaImage)
32+
.withNetwork(network)
33+
.withNetworkAliases("kafka")
34+
.withListener(dockerNetworkBrokers)
35+
.withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("kafka")))
36+
37+
private val zookeeper =
38+
new ZookeeperContainer(KafkaContainerTest.ZookeeperImage)
39+
.withExposedPorts(2181)
40+
41+
lazy val brokers = kafka.getBootstrapServers
42+
lazy val zookeepers = s"${zookeeper.getHost}:${zookeeper.getMappedPort(2181)}"
43+
44+
override def beforeAll(): Unit = Startables.deepStart(kafka, zookeeper).get()
45+
46+
override def afterAll(): Unit = CloseWithLogging(Seq(zookeeper, kafka))
4247
}
4348

4449
object KafkaContainerTest {
50+
4551
val KafkaImage =
46-
DockerImageName.parse("confluentinc/cp-kafka")
47-
.withTag(sys.props.getOrElse("confluent.docker.tag", "7.6.0"))
52+
DockerImageName.parse("apache/kafka-native")
53+
.withTag(sys.props.getOrElse("kafka.docker.tag", "3.9.1"))
54+
55+
val ZookeeperImage =
56+
DockerImageName.parse("zookeeper")
57+
.withTag(sys.props.getOrElse("zookeeper.docker.tag", "3.9.2"))
58+
59+
class ZookeeperContainer(image: DockerImageName) extends GenericContainer[ZookeeperContainer](image)
4860
}

pom.xml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@
142142
<protobuf.version>3.25.6</protobuf.version> <!-- see also confluent.protobuf.version, hbase.protobuf.version -->
143143
<cassandra.driver.version>3.11.5</cassandra.driver.version>
144144
<cassandra.version>3.11.19</cassandra.version>
145-
<kafka.version>3.9.0</kafka.version> <!-- needs to align with confluent version -->
145+
<kafka.version>3.9.1</kafka.version> <!-- needs to align with confluent version -->
146146
<confluent.version>7.8.0</confluent.version> <!-- confluent 7.8.x corresponds to kafka 3.8.x -->
147147
<confluent.protobuf.version>3.25.6</confluent.protobuf.version>
148148
<sedona.version>1.8.0</sedona.version>
@@ -216,6 +216,7 @@
216216
<test.cassandra.docker.tag>3.11.19</test.cassandra.docker.tag>
217217
<test.confluent.docker.tag>7.8.0</test.confluent.docker.tag> <!-- confluent 7.8.x corresponds to kafka 3.8.x -->
218218
<test.hbase.docker.tag>2.6.2</test.hbase.docker.tag>
219+
<test.kafka.docker.tag>3.9.1</test.kafka.docker.tag>
219220
<test.minio.docker.tag>RELEASE.2024-10-29T16-01-48Z</test.minio.docker.tag>
220221
<test.postgis.docker.tag>15-3.4</test.postgis.docker.tag>
221222
<test.postgres.docker.tag>15.1</test.postgres.docker.tag>
@@ -3231,6 +3232,7 @@
32313232
<accumulo.docker.tag>${test.accumulo.docker.tag}</accumulo.docker.tag>
32323233
<cassandra.docker.tag>${test.cassandra.docker.tag}</cassandra.docker.tag>
32333234
<confluent.docker.tag>${test.confluent.docker.tag}</confluent.docker.tag>
3235+
<kafka.docker.tag>${test.kafka.docker.tag}</kafka.docker.tag>
32343236
<hbase.docker.tag>${test.hbase.docker.tag}</hbase.docker.tag>
32353237
<minio.docker.tag>${test.minio.docker.tag}</minio.docker.tag>
32363238
<postgis.docker.tag>${test.postgis.docker.tag}</postgis.docker.tag>
@@ -3260,6 +3262,7 @@
32603262
<accumulo.docker.tag>${test.accumulo.docker.tag}</accumulo.docker.tag>
32613263
<cassandra.docker.tag>${test.cassandra.docker.tag}</cassandra.docker.tag>
32623264
<confluent.docker.tag>${test.confluent.docker.tag}</confluent.docker.tag>
3265+
<kafka.docker.tag>${test.kafka.docker.tag}</kafka.docker.tag>
32633266
<hbase.docker.tag>${test.hbase.docker.tag}</hbase.docker.tag>
32643267
<minio.docker.tag>${test.minio.docker.tag}</minio.docker.tag>
32653268
<postgis.docker.tag>${test.postgis.docker.tag}</postgis.docker.tag>

0 commit comments

Comments
 (0)