|
1 | 1 | package com.taosdata.kafka.connect.source;
|
2 | 2 |
|
3 |
| -import com.alibaba.fastjson.JSON; |
4 | 3 | import com.taosdata.jdbc.tmq.ConsumerRecord;
|
5 | 4 | import com.taosdata.jdbc.tmq.ConsumerRecords;
|
6 | 5 | import com.taosdata.kafka.connect.db.Processor;
|
7 | 6 | import com.taosdata.kafka.connect.enums.OutputFormatEnum;
|
8 |
| -import org.apache.kafka.connect.data.Schema; |
9 |
| -import org.apache.kafka.connect.data.Struct; |
10 | 7 | import org.apache.kafka.connect.source.SourceRecord;
|
11 | 8 | import org.slf4j.Logger;
|
12 | 9 | import org.slf4j.LoggerFactory;
|
|
17 | 14 | import java.util.ArrayList;
|
18 | 15 | import java.util.List;
|
19 | 16 | import java.util.Map;
|
20 |
| -import java.util.stream.Collectors; |
21 | 17 |
|
22 | 18 | public class JsonMapper extends TableMapper {
|
23 | 19 | private static final Logger log = LoggerFactory.getLogger(JsonMapper.class);
|
24 | 20 | public static int count;
|
25 |
| - public JsonMapper(String topic, String tableName, int batchMaxRows, Processor processor) throws SQLException { |
| 21 | + |
| 22 | + private boolean outFormatJsonNoArray = true; |
| 23 | + public JsonMapper(String topic, String tableName, int batchMaxRows, Processor processor, boolean outFormatJsonNoArray) throws SQLException { |
26 | 24 | super(topic, tableName, batchMaxRows, processor, OutputFormatEnum.JSON);
|
| 25 | + this.outFormatJsonNoArray = outFormatJsonNoArray; |
27 | 26 | }
|
28 | 27 |
|
29 | 28 | @Override
|
@@ -70,21 +69,26 @@ public List<SourceRecord> process(List<ConsumerRecords<Map<String, Object>>> rec
|
70 | 69 |
|
71 | 70 | long ts = (Long) value.get(timestampColumn);
|
72 | 71 | for (String tag : tags) {
|
73 |
| - tagStruct.put(tag, value.get(tag)); |
| 72 | + tagStruct.put(tag, getValue(value.get(tag), columnType.get(tag))); |
74 | 73 | }
|
75 | 74 | TDStruct valueStruct = new TDStruct(valueSchema);
|
76 | 75 | valueStruct.put(timestampColumn, ts);
|
77 | 76 | for (String column : columns) {
|
78 |
| - valueStruct.put(column, value.get(column)); |
| 77 | + valueStruct.put(column, getValue(value.get(column), columnType.get(column))); |
79 | 78 | }
|
80 | 79 | if (!tags.isEmpty()) {
|
81 | 80 | valueStruct.put("tags", tagStruct);
|
82 | 81 | }
|
83 | 82 |
|
84 | 83 | structs.add(valueStruct);
|
85 | 84 |
|
86 |
| - pendingRecords.add(new SourceRecord( |
87 |
| - partition, offset.toMap(), topic, valueSchema, structs)); |
| 85 | + if (outFormatJsonNoArray){ |
| 86 | + pendingRecords.add(new SourceRecord( |
| 87 | + partition, offset.toMap(), topic, valueSchema, structs.get(0))); |
| 88 | + }else { |
| 89 | + pendingRecords.add(new SourceRecord( |
| 90 | + partition, offset.toMap(), topic, valueSchema, structs)); |
| 91 | + } |
88 | 92 | }
|
89 | 93 | }
|
90 | 94 |
|
|
0 commit comments