18
18
import java .io .IOException ;
19
19
import java .util .ArrayList ;
20
20
import java .util .Collection ;
21
+ import java .util .Collections ;
21
22
import java .util .HashMap ;
22
23
import java .util .LinkedHashMap ;
23
24
import java .util .List ;
28
29
import org .apache .kafka .common .TopicPartition ;
29
30
import org .apache .kafka .common .config .ConfigException ;
30
31
import org .apache .kafka .connect .data .Field ;
32
+ import org .apache .kafka .connect .data .Schema ;
31
33
import org .apache .kafka .connect .data .Struct ;
32
34
import org .apache .kafka .connect .errors .ConnectException ;
33
35
import org .apache .kafka .connect .errors .DataException ;
36
+ import org .apache .kafka .connect .json .JsonConverter ;
34
37
import org .apache .kafka .connect .sink .SinkRecord ;
35
38
import org .apache .kafka .connect .sink .SinkTask ;
39
+ import org .apache .kafka .connect .storage .Converter ;
36
40
import org .slf4j .Logger ;
37
41
import org .slf4j .LoggerFactory ;
38
42
import org .springframework .batch .item .ExecutionContext ;
@@ -73,6 +77,7 @@ public class RedisEnterpriseSinkTask extends SinkTask {
73
77
private RedisEnterpriseSinkConfig config ;
74
78
private RedisItemWriter <byte [], byte [], SinkRecord > writer ;
75
79
private StatefulRedisConnection <String , String > connection ;
80
+ private Converter jsonConverter ;
76
81
77
82
@ Override
78
83
public String version () {
@@ -84,6 +89,8 @@ public void start(final Map<String, String> props) {
84
89
config = new RedisEnterpriseSinkConfig (props );
85
90
client = RedisModulesClient .create (config .getRedisURI ());
86
91
connection = client .connect ();
92
+ jsonConverter = new JsonConverter ();
93
+ jsonConverter .configure (Collections .singletonMap ("schemas.enable" , "false" ), false );
87
94
writer = writer (client ).build ();
88
95
writer .open (new ExecutionContext ());
89
96
final java .util .Set <TopicPartition > assignment = this .context .assignment ();
@@ -139,7 +146,7 @@ private RedisOperation<byte[], byte[], SinkRecord> operation() {
139
146
return Hset .<byte [], byte [], SinkRecord >key (this ::key ).map (this ::map ).del (this ::isDelete ).build ();
140
147
case JSON :
141
148
return JsonSet .<byte [], byte [], SinkRecord >key (this ::key ).path ("." .getBytes (config .getCharset ()))
142
- .value (this ::value ).del (this ::isDelete ).build ();
149
+ .value (this ::jsonValue ).del (this ::isDelete ).build ();
143
150
case STRING :
144
151
return Set .<byte [], byte [], SinkRecord >key (this ::key ).value (this ::value ).del (this ::isDelete ).build ();
145
152
case STREAM :
@@ -166,6 +173,16 @@ private byte[] value(SinkRecord record) {
166
173
return bytes ("value" , record .value ());
167
174
}
168
175
176
+ private byte [] jsonValue (SinkRecord record ) {
177
+ if (record .value () == null ) {
178
+ return null ;
179
+ }
180
+ Schema schema = record .valueSchema ();
181
+ Object value = record .value ();
182
+
183
+ return jsonConverter .fromConnectData (record .topic (), schema , value );
184
+ }
185
+
169
186
private Long longMember (SinkRecord record ) {
170
187
Object key = record .key ();
171
188
if (key == null ) {
0 commit comments