Skip to content

Commit 2674632

Browse files
committed
deps: Upgraded spring batch redis
1 parent cf819e6 commit 2674632

13 files changed

+85
-69
lines changed

build.gradle

+1
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ config {
4242
}
4343

4444
people {
45+
clearDomainSet()
4546
person {
4647
id = 'jruaux'
4748
name = 'Julien Ruaux'

core/redis-kafka-connect/src/main/java/com/redis/kafka/connect/sink/RedisSinkTask.java

+6-5
Original file line numberDiff line numberDiff line change
@@ -162,12 +162,13 @@ public ConditionalDel(Operation<byte[], byte[], SinkRecord, Object> delegate,
162162

163163
@Override
164164
public List<RedisFuture<Object>> execute(RedisAsyncCommands<byte[], byte[]> commands,
165-
List<? extends SinkRecord> items) {
165+
Chunk<? extends SinkRecord> items) {
166166
List<RedisFuture<Object>> futures = new ArrayList<>();
167-
List<SinkRecord> toRemove = items.stream().filter(delPredicate).collect(Collectors.toList());
168-
futures.addAll(del.execute(commands, toRemove));
169-
List<SinkRecord> toWrite = items.stream().filter(delPredicate.negate()).collect(Collectors.toList());
170-
futures.addAll(write.execute(commands, toWrite));
167+
List<SinkRecord> toRemove = items.getItems().stream().filter(delPredicate).collect(Collectors.toList());
168+
futures.addAll(del.execute(commands, new Chunk<>(toRemove)));
169+
List<SinkRecord> toWrite = items.getItems().stream().filter(delPredicate.negate())
170+
.collect(Collectors.toList());
171+
futures.addAll(write.execute(commands, new Chunk<>(toWrite)));
171172
return futures;
172173
}
173174

core/redis-kafka-connect/src/main/java/com/redis/kafka/connect/source/RedisKeysSourceTask.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ public class RedisKeysSourceTask extends SourceTask {
4949
private final Clock clock;
5050

5151
private AbstractRedisClient client;
52-
private RedisItemReader<String, String, Object> reader;
52+
private RedisItemReader<String, String> reader;
5353
private int batchSize;
5454
private String topic;
5555

@@ -66,7 +66,7 @@ public String version() {
6666
return ManifestVersionProvider.getVersion();
6767
}
6868

69-
public RedisItemReader<String, String, Object> getReader() {
69+
public RedisItemReader<String, String> getReader() {
7070
return reader;
7171
}
7272

@@ -124,7 +124,7 @@ public void stop() {
124124
}
125125
}
126126

127-
private SourceRecord convert(KeyValue<String, Object> input) {
127+
private SourceRecord convert(KeyValue<String> input) {
128128
Map<String, ?> partition = new HashMap<>();
129129
Map<String, ?> offset = new HashMap<>();
130130
String key = input.getKey();
@@ -136,7 +136,7 @@ private SourceRecord convert(KeyValue<String, Object> input) {
136136
@Override
137137
public List<SourceRecord> poll() {
138138
List<SourceRecord> records = new ArrayList<>();
139-
KeyValue<String, Object> item;
139+
KeyValue<String> item;
140140
try {
141141
while (records.size() < batchSize && (item = reader.read()) != null) {
142142
records.add(convert(item));

core/redis-kafka-connect/src/main/java/com/redis/kafka/connect/source/ToStructFunction.java

+27-26
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,11 @@
1111
import org.apache.kafka.connect.data.Struct;
1212

1313
import com.redis.lettucemod.timeseries.Sample;
14-
import com.redis.spring.batch.item.redis.common.DataType;
1514
import com.redis.spring.batch.item.redis.common.KeyValue;
1615

1716
import io.lettuce.core.ScoredValue;
1817

19-
public class ToStructFunction implements Function<KeyValue<String, Object>, Struct> {
18+
public class ToStructFunction implements Function<KeyValue<String>, Struct> {
2019

2120
public static final String FIELD_KEY = "key";
2221

@@ -60,43 +59,45 @@ public class ToStructFunction implements Function<KeyValue<String, Object>, Stru
6059
.field(FIELD_SET, SET_SCHEMA).field(FIELD_ZSET, ZSET_SCHEMA).name(VALUE_SCHEMA_NAME).build();
6160

6261
@Override
63-
public Struct apply(KeyValue<String, Object> input) {
62+
public Struct apply(KeyValue<String> input) {
6463
Struct struct = new Struct(VALUE_SCHEMA);
6564
struct.put(FIELD_KEY, input.getKey());
6665
struct.put(FIELD_TTL, input.getTtl());
6766
struct.put(FIELD_TYPE, input.getType());
68-
switch (DataType.of(input.getType())) {
69-
case HASH:
70-
struct.put(FIELD_HASH, input.getValue());
71-
break;
72-
case JSON:
73-
struct.put(FIELD_JSON, input.getValue());
74-
break;
75-
case LIST:
76-
struct.put(FIELD_LIST, input.getValue());
77-
break;
78-
case SET:
79-
struct.put(FIELD_SET, list(input));
80-
break;
81-
case STRING:
82-
struct.put(FIELD_STRING, input.getValue());
83-
break;
84-
case ZSET:
85-
struct.put(FIELD_ZSET, zsetMap(input));
86-
break;
87-
default:
88-
break;
67+
if (input.getType() != null) {
68+
switch (input.getType()) {
69+
case KeyValue.TYPE_HASH:
70+
struct.put(FIELD_HASH, input.getValue());
71+
break;
72+
case KeyValue.TYPE_JSON:
73+
struct.put(FIELD_JSON, input.getValue());
74+
break;
75+
case KeyValue.TYPE_LIST:
76+
struct.put(FIELD_LIST, input.getValue());
77+
break;
78+
case KeyValue.TYPE_SET:
79+
struct.put(FIELD_SET, list(input));
80+
break;
81+
case KeyValue.TYPE_STRING:
82+
struct.put(FIELD_STRING, input.getValue());
83+
break;
84+
case KeyValue.TYPE_ZSET:
85+
struct.put(FIELD_ZSET, zsetMap(input));
86+
break;
87+
default:
88+
break;
89+
}
8990
}
9091
return struct;
9192
}
9293

9394
@SuppressWarnings("unchecked")
94-
private Object list(KeyValue<String, Object> input) {
95+
private Object list(KeyValue<String> input) {
9596
return new ArrayList<>((Collection<String>) input.getValue());
9697
}
9798

9899
@SuppressWarnings("unchecked")
99-
public static Map<Double, String> zsetMap(KeyValue<String, Object> input) {
100+
public static Map<Double, String> zsetMap(KeyValue<String> input) {
100101
Collection<ScoredValue<String>> value = (Collection<ScoredValue<String>>) input.getValue();
101102
return zsetMap(value);
102103
}

core/redis-kafka-connect/src/test/java/com/redis/kafka/connect/AbstractSinkIntegrationTests.java

-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
import java.nio.charset.StandardCharsets;
88
import java.util.ArrayList;
99
import java.util.Arrays;
10-
import java.util.Collections;
1110
import java.util.Comparator;
1211
import java.util.HashSet;
1312
import java.util.LinkedHashMap;

core/redis-kafka-connect/src/test/java/com/redis/kafka/connect/AbstractSourceIntegrationTests.java

+9-11
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
package com.redis.kafka.connect;
22

3-
import java.time.Duration;
43
import java.util.ArrayList;
54
import java.util.Arrays;
65
import java.util.Collection;
@@ -37,7 +36,6 @@
3736
import com.redis.kafka.connect.source.ToStructFunction;
3837
import com.redis.lettucemod.api.sync.RedisModulesCommands;
3938
import com.redis.spring.batch.item.redis.RedisItemWriter;
40-
import com.redis.spring.batch.item.redis.common.DataType;
4139
import com.redis.spring.batch.item.redis.common.KeyValue;
4240
import com.redis.spring.batch.item.redis.gen.GeneratorItemReader;
4341
import com.redis.spring.batch.test.AbstractTestBase;
@@ -417,7 +415,7 @@ void pollKeys(TestInfo info) throws Exception {
417415
final List<SourceRecord> sourceRecords = new ArrayList<>();
418416
Executors.newSingleThreadScheduledExecutor().execute(() -> {
419417
GeneratorItemReader generator = generator(count);
420-
RedisItemWriter<String, String, KeyValue<String, Object>> writer = RedisItemWriter.struct();
418+
RedisItemWriter<String, String, KeyValue<String>> writer = RedisItemWriter.struct();
421419
writer.setClient(redisClient);
422420
try {
423421
run(info, step(info, 1, generator, null, writer));
@@ -455,21 +453,21 @@ public Compare(Object expected, Object actual) {
455453

456454
private Compare values(Struct struct) {
457455
String key = struct.getString(ToStructFunction.FIELD_KEY);
458-
DataType type = DataType.of(struct.getString(ToStructFunction.FIELD_TYPE));
459-
Assertions.assertEquals(redisConnection.sync().type(key), type.getString());
456+
String type = struct.getString(ToStructFunction.FIELD_TYPE);
457+
Assertions.assertEquals(redisConnection.sync().type(key), type);
460458
RedisModulesCommands<String, String> commands = redisConnection.sync();
461459
switch (type) {
462-
case HASH:
460+
case KeyValue.TYPE_HASH:
463461
return compare(commands.hgetall(key), struct.getMap(ToStructFunction.FIELD_HASH));
464-
case JSON:
462+
case KeyValue.TYPE_JSON:
465463
return compare(commands.jsonGet(key, "."), struct.getString(ToStructFunction.FIELD_JSON));
466-
case LIST:
464+
case KeyValue.TYPE_LIST:
467465
return compare(commands.lrange(key, 0, -1), struct.getArray(ToStructFunction.FIELD_LIST));
468-
case SET:
466+
case KeyValue.TYPE_SET:
469467
return compare(commands.smembers(key), new HashSet<>(struct.getArray(ToStructFunction.FIELD_SET)));
470-
case STRING:
468+
case KeyValue.TYPE_STRING:
471469
return compare(commands.get(key), struct.getString(ToStructFunction.FIELD_STRING));
472-
case ZSET:
470+
case KeyValue.TYPE_ZSET:
473471
return compare(ToStructFunction.zsetMap(commands.zrangeWithScores(key, 0, -1)),
474472
struct.getMap(ToStructFunction.FIELD_ZSET));
475473
default:

core/redis-kafka-connect/src/test/java/com/redis/kafka/connect/EnterpriseSinkIntegrationTests.java

+1-7
Original file line numberDiff line numberDiff line change
@@ -2,20 +2,14 @@
22

33
import org.junit.jupiter.api.condition.EnabledOnOs;
44
import org.junit.jupiter.api.condition.OS;
5-
import org.springframework.util.unit.DataSize;
65

7-
import com.redis.enterprise.Database;
8-
import com.redis.enterprise.RedisModule;
96
import com.redis.enterprise.testcontainers.RedisEnterpriseContainer;
107
import com.redis.testcontainers.RedisServer;
118

129
@EnabledOnOs(value = OS.LINUX)
1310
class EnterpriseSinkIntegrationTests extends AbstractSinkIntegrationTests {
1411

15-
private static final RedisEnterpriseContainer container = new RedisEnterpriseContainer(
16-
RedisEnterpriseContainer.DEFAULT_IMAGE_NAME.withTag(RedisEnterpriseContainer.DEFAULT_TAG))
17-
.withDatabase(Database.builder().name("BatchTests").memory(DataSize.ofMegabytes(50).toBytes())
18-
.ossCluster(true).modules(RedisModule.JSON, RedisModule.TIMESERIES, RedisModule.SEARCH).build());
12+
private static final RedisEnterpriseContainer container = RedisContainerFactory.enterprise();
1913

2014
@Override
2115
protected RedisServer getRedisServer() {

core/redis-kafka-connect/src/test/java/com/redis/kafka/connect/EnterpriseSourceIntegrationTests.java

+1-7
Original file line numberDiff line numberDiff line change
@@ -2,20 +2,14 @@
22

33
import org.junit.jupiter.api.condition.EnabledOnOs;
44
import org.junit.jupiter.api.condition.OS;
5-
import org.springframework.util.unit.DataSize;
65

7-
import com.redis.enterprise.Database;
8-
import com.redis.enterprise.RedisModule;
96
import com.redis.enterprise.testcontainers.RedisEnterpriseContainer;
107
import com.redis.testcontainers.RedisServer;
118

129
@EnabledOnOs(value = OS.LINUX)
1310
class EnterpriseSourceIntegrationTests extends AbstractSourceIntegrationTests {
1411

15-
private static final RedisEnterpriseContainer container = new RedisEnterpriseContainer(
16-
RedisEnterpriseContainer.DEFAULT_IMAGE_NAME.withTag(RedisEnterpriseContainer.DEFAULT_TAG))
17-
.withDatabase(Database.builder().name("BatchTests").memory(DataSize.ofMegabytes(50).toBytes())
18-
.ossCluster(true).modules(RedisModule.JSON, RedisModule.TIMESERIES, RedisModule.SEARCH).build());
12+
private static final RedisEnterpriseContainer container = RedisContainerFactory.enterprise();
1913

2014
@Override
2115
protected RedisServer getRedisServer() {
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
package com.redis.kafka.connect;
2+
3+
import com.redis.enterprise.Database;
4+
import com.redis.enterprise.RedisModule;
5+
import com.redis.enterprise.testcontainers.RedisEnterpriseContainer;
6+
import com.redis.enterprise.testcontainers.RedisEnterpriseServer;
7+
import com.redis.testcontainers.RedisStackContainer;
8+
9+
public interface RedisContainerFactory {
10+
11+
String ENTERPRISE_TAG = "7.4.6-102";
12+
String STACK_TAG = "7.2.0-v13";
13+
14+
static RedisStackContainer stack() {
15+
return new RedisStackContainer(RedisStackContainer.DEFAULT_IMAGE_NAME.withTag(STACK_TAG));
16+
}
17+
18+
@SuppressWarnings("resource")
19+
static RedisEnterpriseContainer enterprise() {
20+
return new RedisEnterpriseContainer(RedisEnterpriseContainer.DEFAULT_IMAGE_NAME.withTag(ENTERPRISE_TAG))
21+
.withDatabase(Database.builder().name("ConnectorTests").memoryMB(50).ossCluster(true)
22+
.modules(RedisModule.TIMESERIES, RedisModule.JSON, RedisModule.SEARCH).build());
23+
}
24+
25+
static RedisEnterpriseServer enterpriseServer() {
26+
RedisEnterpriseServer server = new RedisEnterpriseServer();
27+
server.withDatabase(Database.builder().shardCount(2).port(12001).ossCluster(true)
28+
.modules(RedisModule.JSON, RedisModule.SEARCH, RedisModule.TIMESERIES).build());
29+
return server;
30+
}
31+
32+
}

core/redis-kafka-connect/src/test/java/com/redis/kafka/connect/StackSinkIntegrationTests.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,7 @@
55

66
class StackSinkIntegrationTests extends AbstractSinkIntegrationTests {
77

8-
private static final RedisStackContainer container = new RedisStackContainer(
9-
RedisStackContainer.DEFAULT_IMAGE_NAME.withTag(RedisStackContainer.DEFAULT_TAG));
8+
private static final RedisStackContainer container = RedisContainerFactory.stack();
109

1110
@Override
1211
protected RedisServer getRedisServer() {

core/redis-kafka-connect/src/test/java/com/redis/kafka/connect/StackSourceIntegrationTests.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,7 @@
55

66
class StackSourceIntegrationTests extends AbstractSourceIntegrationTests {
77

8-
private static final RedisStackContainer container = new RedisStackContainer(
9-
RedisStackContainer.DEFAULT_IMAGE_NAME.withTag(RedisStackContainer.DEFAULT_TAG));
8+
private static final RedisStackContainer container = RedisContainerFactory.stack();
109

1110
@Override
1211
protected RedisServer getRedisServer() {

docker-compose.yml

-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
---
2-
version: '2'
31
services:
42
zookeeper:
53
image: confluentinc/cp-zookeeper:7.2.0

gradle.properties

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ targetCompatibility = 17
2121
reproducibleBuild = true
2222

2323
asciidoctorVersion = 3.3.2
24-
bootPluginVersion = 3.3.4
24+
bootPluginVersion = 3.3.5
2525
dependencyPluginVersion = 1.1.6
2626
gitPluginVersion = 3.0.0
2727
jacocoPluginVersion = 0.8.12
@@ -30,7 +30,7 @@ kordampPluginVersion = 0.54.0
3030
shadowPluginVersion = 8.1.8
3131

3232
lettucemodVersion = 4.1.0
33-
springBatchRedisVersion = 4.4.9-SNAPSHOT
33+
springBatchRedisVersion = 4.5.1-SNAPSHOT
3434
testcontainersRedisVersion = 2.2.2
3535

3636
org.gradle.daemon = false

0 commit comments

Comments
 (0)