Skip to content

Commit a17dda0

Browse files
author
Julien Ruaux
committed
feat: Added support for Redis cluster mode
1 parent e065f7d commit a17dda0

29 files changed

+556
-420
lines changed

.github/workflows/early-access.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ on:
77
jobs:
88
earlyaccess:
99
name: EarlyAccess
10-
if: github.repository == 'redis-field-engineering/redis-enterprise-kafka' && startsWith(github.event.head_commit.message, 'Releasing version') != true
10+
if: startsWith(github.event.head_commit.message, 'Releasing version') != true
1111
runs-on: ubuntu-18.04
1212
steps:
1313
- name: Checkout

README.adoc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
= Redis Enterprise Kafka Connector
1+
= Redis Kafka Connector
22
:linkattrs:
33
:project-owner: redis-field-engineering
44
:project-name: redis-enterprise-kafka
@@ -8,7 +8,7 @@
88
image:https://github.com/{project-owner}/{project-name}/actions/workflows/early-access.yml/badge.svg["Build Status", link="https://github.com/{project-owner}/{project-name}/actions"]
99
image:https://codecov.io/gh/{project-owner}/{project-name}/branch/master/graph/badge.svg?token=MTMRRGEWBD["Coverage", link="https://codecov.io/gh/{project-owner}/{project-name}"]
1010

11-
Kafka Connect source and sink connectors for https://redis.com/redis-enterprise-software/overview/[Redis Enterprise]
11+
Kafka Connect source and sink connectors for Redis
1212

1313
== Documentation
1414

jreleaser.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
project:
22
name: redis-enterprise-kafka
3-
description: Kafka Connector for Redis Enterprise
4-
longDescription: Kafka Connect source and sink connectors for Redis Enterprise
3+
description: Kafka Connector for Redis
4+
longDescription: Kafka Connect source and sink connectors for Redis
55
website: https://github.com/redis-field-engineering/redis-enterprise-kafka
66
authors:
77
- Julien Ruaux

pom.xml

Lines changed: 16 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@
77
<groupId>com.redis</groupId>
88
<artifactId>redis-enterprise-kafka</artifactId>
99
<version>6.8-SNAPSHOT</version>
10-
<name>Redis Enterprise Kafka Connector</name>
11-
<description>Kafka Connect source and sink connectors for Redis Enterprise</description>
10+
<name>Redis Kafka Connector</name>
11+
<description>Kafka Connect source and sink connectors for Redis</description>
1212
<url>https://github.com/${github.owner}/${github.repo}</url>
1313
<inceptionYear>2021</inceptionYear>
1414
<properties>
@@ -83,12 +83,12 @@
8383
<version>${kafka.version}</version>
8484
<scope>provided</scope>
8585
</dependency>
86-
<dependency>
87-
<groupId>org.apache.kafka</groupId>
88-
<artifactId>connect-json</artifactId>
89-
<version>${kafka.version}</version>
90-
<scope>provided</scope>
91-
</dependency>
86+
<dependency>
87+
<groupId>org.apache.kafka</groupId>
88+
<artifactId>connect-json</artifactId>
89+
<version>${kafka.version}</version>
90+
<scope>provided</scope>
91+
</dependency>
9292
<dependency>
9393
<groupId>com.redis</groupId>
9494
<artifactId>lettucemod</artifactId>
@@ -273,7 +273,7 @@
273273
<artifactId>kafka-connect-maven-plugin</artifactId>
274274
<version>0.12.0</version>
275275
<configuration>
276-
<title>Redis Enterprise Connector</title>
276+
<title>Redis Connector (Source and Sink) by Redis</title>
277277
<ownerUsername>redis</ownerUsername>
278278
<documentationUrl>https://${github.owner}.github.io/${github.repo}/</documentationUrl>
279279
<ownerLogo>src/docs/asciidoc/images/redis_logo.png</ownerLogo>
@@ -282,18 +282,16 @@
282282
<sourceUrl>${project.scm.url}</sourceUrl>
283283
<supportProviderName>Redis</supportProviderName>
284284
<supportSummary>
285-
<![CDATA[This connector is <a href="https://redis.com/company/support/">supported by Redis</a> as part of a
286-
<a href="https://redis.com/redis-enterprise">Redis Enterprise</a> license.]]>
285+
<![CDATA[Contact us on the <a href="https://forum.redis.com">Redis Forum</a> or create an issue on <a href="https://github.com/redis-field-engineering/redis-enterprise-kafka">Github</a> where we provide support on a good faith effort basis.]]>
287286
</supportSummary>
288287
<supportUrl>${project.issueManagement.url}</supportUrl>
289288
<confluentControlCenterIntegration>true</confluentControlCenterIntegration>
290289
<componentTypes>
291-
<componentType>sink</componentType>
292290
<componentType>source</componentType>
291+
<componentType>sink</componentType>
293292
</componentTypes>
294293
<tags>
295294
<tag>redis</tag>
296-
<tag>enterprise</tag>
297295
</tags>
298296
</configuration>
299297
<executions>
@@ -305,11 +303,11 @@
305303
</goals>
306304
<configuration>
307305
<description>Kafka Connect source and sink connectors for Redis
308-
Enterprise. It is for Confluent Plaform 5.0 and above</description>
306+
(Confluent Plaform 5.x)</description>
309307
<version>5.${project.version}</version>
310308
<requirements>
311-
<requirement>Confluent Platform 5.0+</requirement>
312-
<requirement>Redis Enterprise 6.0+</requirement>
309+
<requirement>Confluent Platform 5.x</requirement>
310+
<requirement>Redis</requirement>
313311
</requirements>
314312
</configuration>
315313
</execution>
@@ -321,11 +319,11 @@
321319
</goals>
322320
<configuration>
323321
<description>Kafka Connect source and sink connectors for Redis
324-
Enterprise. It is for Confluent Plaform 6.0 and above</description>
322+
(Confluent Plaform 6.0 and above)</description>
325323
<version>6.${project.version}</version>
326324
<requirements>
327325
<requirement>Confluent Platform 6.0+</requirement>
328-
<requirement>Redis Enterprise 6.0+</requirement>
326+
<requirement>Redis</requirement>
329327
</requirements>
330328
<excludes>
331329
<exclude>io.netty:*</exclude>

run.sh

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ if lsof -Pi :6379 -sTCP:LISTEN -t >/dev/null ; then
88
fi
99
)
1010

11-
echo "Building the Redis Enterprise Kafka Connector"
11+
echo "Building the Redis Kafka Connector"
1212
(
1313
./mvnw clean package -DskipTests
1414
mv target/components/packages/redis-redis-enterprise-kafka-6.*.zip target/components/packages/redis-enterprise-kafka.zip
@@ -20,8 +20,8 @@ docker-compose up -d --build
2020
function clean_up {
2121
echo -e "\n\nSHUTTING DOWN\n\n"
2222
curl --output /dev/null -X DELETE http://localhost:8083/connectors/datagen-pageviews || true
23-
curl --output /dev/null -X DELETE http://localhost:8083/connectors/redis-enterprise-sink || true
24-
curl --output /dev/null -X DELETE http://localhost:8083/connectors/redis-enterprise-source || true
23+
curl --output /dev/null -X DELETE http://localhost:8083/connectors/redis-sink || true
24+
curl --output /dev/null -X DELETE http://localhost:8083/connectors/redis-source || true
2525
docker-compose down
2626
if [ -z "$1" ]
2727
then
@@ -84,9 +84,9 @@ sleep 5
8484

8585
echo -e "\nAdding Redis Enteprise Kafka Sink Connector for the 'pageviews' topic into the 'pageviews' stream:"
8686
curl -X POST -H "Content-Type: application/json" --data '
87-
{"name": "redis-enterprise-sink",
87+
{"name": "redis-sink",
8888
"config": {
89-
"connector.class":"com.redis.kafka.connect.RedisEnterpriseSinkConnector",
89+
"connector.class":"com.redis.kafka.connect.RedisSinkConnector",
9090
"tasks.max":"1",
9191
"topics":"pageviews",
9292
"redis.uri":"redis://redis:6379",
@@ -96,11 +96,11 @@ curl -X POST -H "Content-Type: application/json" --data '
9696
}}' http://localhost:8083/connectors -w "\n"
9797

9898
sleep 2
99-
echo -e "\nAdding Redis Enterprise Kafka Sink Connector for the 'pageviews' topic into RedisJSON:"
99+
echo -e "\nAdding Redis Kafka Sink Connector for the 'pageviews' topic into RedisJSON:"
100100
curl -X POST -H "Content-Type: application/json" --data '
101-
{"name": "redis-enterprise-sink-json",
101+
{"name": "redis-sink-json",
102102
"config": {
103-
"connector.class":"com.redis.kafka.connect.RedisEnterpriseSinkConnector",
103+
"connector.class":"com.redis.kafka.connect.RedisSinkConnector",
104104
"tasks.max":"1",
105105
"topics":"pageviews",
106106
"redis.uri":"redis://redis:6379",
@@ -114,12 +114,12 @@ curl -X POST -H "Content-Type: application/json" --data '
114114
}}' http://localhost:8083/connectors -w "\n"
115115

116116
sleep 2
117-
echo -e "\nAdding Redis Enterprise Kafka Source Connector for the 'mystream' stream:"
117+
echo -e "\nAdding Redis Kafka Source Connector for the 'mystream' stream:"
118118
curl -X POST -H "Content-Type: application/json" --data '
119-
{"name": "redis-enterprise-source",
119+
{"name": "redis-source",
120120
"config": {
121121
"tasks.max":"1",
122-
"connector.class":"com.redis.kafka.connect.RedisEnterpriseSourceConnector",
122+
"connector.class":"com.redis.kafka.connect.RedisSourceConnector",
123123
"redis.uri":"redis://redis:6379",
124124
"redis.stream.name":"mystream",
125125
"topic": "mystream"

src/docs/asciidoc/_connect.adoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[[connect]]
22
= Connect to Redis
33

4-
This section provides information on configuring the Redis Enterprise Kafka Source or Sink Connector to connect to Redis Enterprise.
4+
This section provides information on configuring the Redis Kafka Source or Sink Connector.
55

66
== Configuration
77

src/docs/asciidoc/_docker.adoc

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
[[docker]]
22
= Quick Start with Docker
33

4-
This guide provides a hands-on look at the functionality of the Redis Enterprise Kafka Source and Sink Connectors:
4+
This guide provides a hands-on look at the functionality of the Redis Kafka Source and Sink Connectors:
55

6-
* The *redis-enterprise-sink* connector reads data from a Kafka topic and writes it to a Redis stream
7-
* The *redis-enterprise-source* connector reads data from a Redis stream and writes it to a Kafka topic
6+
* The *redis-sink* connector reads data from a Kafka topic and writes it to a Redis stream
7+
* The *redis-source* connector reads data from a Redis stream and writes it to a Kafka topic
88

99
== Requirements
1010

@@ -17,7 +17,6 @@ Clone the https://github.com/{github-owner}/{github-repo}.git[{github-repo}] rep
1717
[source,console,subs="attributes"]
1818
----
1919
git clone https://github.com/{github-owner}/{github-repo}.git
20-
cd redis-enterprise-kafka/docker
2120
./run.sh
2221
----
2322

@@ -26,8 +25,8 @@ This will:
2625
* Run `docker-compose up`
2726
* Wait for Redis, Kafka, and Kafka Connect to be ready
2827
* Register the Confluent Datagen Connector
29-
* Register the Redis Enterprise Kafka Sink Connector
30-
* Register the Redis Enterprise Kafka Source Connector
28+
* Register the Redis Kafka Sink Connector
29+
* Register the Redis Kafka Source Connector
3130
* Publish some events to Kafka via the Datagen connector
3231
* Write the events to Redis
3332
* Send messages to a Redis stream

src/docs/asciidoc/_sink.adoc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
[[sink]]
22
= Sink Connector Guide
3-
:name: Redis Enterprise Kafka Sink Connector
3+
:name: Redis Kafka Sink Connector
44

5-
The {name} consumes records from a Kafka topic and writes the data to a Redis Enterprise database.
5+
The {name} consumes records from a Kafka topic and writes the data to Redis.
66

77
== Features
88

src/docs/asciidoc/_source.adoc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
[[source]]
22
= Source Connector Guide
3-
:name: Redis Enterprise Kafka Source Connector
3+
:name: Redis Kafka Source Connector
44

5-
The {name} reads from a Redis Enterprise stream and publishes messages to a Kafka topic.
5+
The {name} reads from a Redis stream and publishes messages to a Kafka topic.
66

77
== Features
88

src/docs/asciidoc/images/redis-enterprise-kafka-connector.svg renamed to src/docs/asciidoc/images/redis-kafka-connector.svg

Lines changed: 1 addition & 1 deletion
Loading

src/docs/asciidoc/index.adoc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,14 @@
55

66
== Introduction
77

8-
The {project-name} is used to import and export data between Apache Kafka and Redis Enterprise.
8+
The {project-name} is used to import and export data between Apache Kafka and Redis.
99

10-
image:images/redis-enterprise-kafka-connector.svg[]
10+
image:images/redis-kafka-connector.svg[]
1111

1212
This guide provides documentation and usage information across the following topics:
1313

1414
* <<install,Install>>
15-
* <<connect,Connect to Redis Enterprise>>
15+
* <<connect,Connect to Redis>>
1616
* <<sink,Sink Connector>>
1717
* <<source,Source Connector>>
1818
* <<docker,Docker Example>>

src/main/java/com/redis/kafka/connect/RedisEnterpriseSinkConnector.java renamed to src/main/java/com/redis/kafka/connect/RedisSinkConnector.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@
1515
*/
1616
package com.redis.kafka.connect;
1717

18-
import com.redis.kafka.connect.sink.RedisEnterpriseSinkConfig;
19-
import com.redis.kafka.connect.sink.RedisEnterpriseSinkTask;
18+
import com.redis.kafka.connect.sink.RedisSinkConfig;
19+
import com.redis.kafka.connect.sink.RedisSinkTask;
2020
import org.apache.kafka.common.config.ConfigDef;
2121
import org.apache.kafka.common.utils.AppInfoParser;
2222
import org.apache.kafka.connect.connector.Task;
@@ -26,7 +26,7 @@
2626
import java.util.List;
2727
import java.util.Map;
2828

29-
public class RedisEnterpriseSinkConnector extends SinkConnector {
29+
public class RedisSinkConnector extends SinkConnector {
3030

3131
private Map<String, String> props;
3232

@@ -37,7 +37,7 @@ public void start(Map<String, String> props) {
3737

3838
@Override
3939
public Class<? extends Task> taskClass() {
40-
return RedisEnterpriseSinkTask.class;
40+
return RedisSinkTask.class;
4141
}
4242

4343
@Override
@@ -51,7 +51,7 @@ public void stop() {
5151

5252
@Override
5353
public ConfigDef config() {
54-
return new RedisEnterpriseSinkConfig.RedisEnterpriseSinkConfigDef();
54+
return new RedisSinkConfig.RedisSinkConfigDef();
5555
}
5656

5757

src/main/java/com/redis/kafka/connect/RedisEnterpriseSourceConnector.java renamed to src/main/java/com/redis/kafka/connect/RedisSourceConnector.java

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@
1212
*/
1313
package com.redis.kafka.connect;
1414

15-
import com.redis.kafka.connect.source.RedisEnterpriseSourceConfig;
16-
import com.redis.kafka.connect.source.RedisEnterpriseSourceTask;
15+
import com.redis.kafka.connect.source.RedisSourceConfig;
16+
import com.redis.kafka.connect.source.RedisSourceTask;
1717
import org.apache.kafka.common.config.ConfigDef;
1818
import org.apache.kafka.common.config.ConfigException;
1919
import org.apache.kafka.common.utils.AppInfoParser;
@@ -28,29 +28,29 @@
2828
import java.util.Map;
2929
import java.util.stream.Collectors;
3030

31-
public class RedisEnterpriseSourceConnector extends SourceConnector {
31+
public class RedisSourceConnector extends SourceConnector {
3232

3333
private Map<String, String> props;
34-
private RedisEnterpriseSourceConfig config;
34+
private RedisSourceConfig config;
3535

3636
@Override
3737
public void start(Map<String, String> props) {
3838
this.props = props;
3939
try {
40-
this.config = new RedisEnterpriseSourceConfig(props);
40+
this.config = new RedisSourceConfig(props);
4141
} catch (ConfigException configException) {
4242
throw new ConnectException(configException);
4343
}
4444
}
4545

4646
@Override
4747
public Class<? extends Task> taskClass() {
48-
return RedisEnterpriseSourceTask.class;
48+
return RedisSourceTask.class;
4949
}
5050

5151
@Override
5252
public List<Map<String, String>> taskConfigs(int maxTasks) {
53-
if (this.config.getReaderType() == RedisEnterpriseSourceConfig.ReaderType.KEYS) {
53+
if (this.config.getReaderType() == RedisSourceConfig.ReaderType.KEYS) {
5454
// Partition the configs based on channels
5555
final List<List<String>> partitionedPatterns = ConnectorUtils
5656
.groupPartitions(this.config.getKeyPatterns(), Math.min(this.config.getKeyPatterns().size(), maxTasks));
@@ -61,15 +61,15 @@ public List<Map<String, String>> taskConfigs(int maxTasks) {
6161
List<Map<String, String>> taskConfigs = new ArrayList<>();
6262
for (int i = 0; i < maxTasks; i++) {
6363
Map<String, String> taskConfig = new HashMap<>(this.props);
64-
taskConfig.put(RedisEnterpriseSourceTask.TASK_ID, Integer.toString(i));
64+
taskConfig.put(RedisSourceTask.TASK_ID, Integer.toString(i));
6565
taskConfigs.add(taskConfig);
6666
}
6767
return taskConfigs;
6868
}
6969

7070
private Map<String, String> taskConfig(List<String> patterns) {
7171
final Map<String, String> taskConfig = new HashMap<>(this.config.originalsStrings());
72-
taskConfig.put(RedisEnterpriseSourceConfig.KEY_PATTERNS_CONFIG, String.join(",", patterns));
72+
taskConfig.put(RedisSourceConfig.KEY_PATTERNS_CONFIG, String.join(",", patterns));
7373
return taskConfig;
7474
}
7575

@@ -79,7 +79,7 @@ public void stop() {
7979

8080
@Override
8181
public ConfigDef config() {
82-
return new RedisEnterpriseSourceConfig.RedisEnterpriseSourceConfigDef();
82+
return new RedisSourceConfig.RedisSourceConfigDef();
8383
}
8484

8585
@Override

0 commit comments

Comments
 (0)