1919
2020import com .dtstack .flink .sql .format .FormatType ;
2121import com .dtstack .flink .sql .format .SerializationMetricWrapper ;
22+ import com .dtstack .flink .sql .sink .kafka .serialization .AvroCRowSerializationSchema ;
23+ import com .dtstack .flink .sql .sink .kafka .serialization .CsvCRowSerializationSchema ;
24+ import com .dtstack .flink .sql .sink .kafka .serialization .JsonCRowSerializationSchema ;
2225import com .dtstack .flink .sql .sink .kafka .table .KafkaSinkTableInfo ;
2326import org .apache .commons .lang3 .StringUtils ;
2427import org .apache .flink .api .common .serialization .SerializationSchema ;
2528import org .apache .flink .api .common .typeinfo .TypeInformation ;
26- import org .apache .flink .formats .avro .AvroRowSerializationSchema ;
27- import org .apache .flink .formats .csv .CsvRowSerializationSchema ;
28- import org .apache .flink .formats .json .JsonRowSerializationSchema ;
2929import org .apache .flink .streaming .api .functions .sink .RichSinkFunction ;
3030import org .apache .flink .streaming .connectors .kafka .partitioner .FlinkKafkaPartitioner ;
31- import org .apache .flink .types .Row ;
31+ import org .apache .flink .table . runtime . types .CRow ;
3232
3333import java .util .Optional ;
3434import java .util .Properties ;
@@ -51,42 +51,37 @@ public abstract class AbstractKafkaProducerFactory {
5151 * @param partitioner
5252 * @return
5353 */
54- public abstract RichSinkFunction <Row > createKafkaProducer (KafkaSinkTableInfo kafkaSinkTableInfo , TypeInformation <Row > typeInformation , Properties properties , Optional <FlinkKafkaPartitioner <Row >> partitioner , String [] partitionKeys );
54+ public abstract RichSinkFunction <CRow > createKafkaProducer (KafkaSinkTableInfo kafkaSinkTableInfo , TypeInformation <CRow > typeInformation , Properties properties , Optional <FlinkKafkaPartitioner <CRow >> partitioner , String [] partitionKeys );
5555
56- protected SerializationMetricWrapper createSerializationMetricWrapper (KafkaSinkTableInfo kafkaSinkTableInfo , TypeInformation <Row > typeInformation ) {
57- return new SerializationMetricWrapper (createSerializationSchema (kafkaSinkTableInfo , typeInformation ));
56+ protected SerializationMetricWrapper createSerializationMetricWrapper (KafkaSinkTableInfo kafkaSinkTableInfo , TypeInformation <CRow > typeInformation ) {
57+ SerializationSchema <CRow > serializationSchema = createSerializationSchema (kafkaSinkTableInfo , typeInformation );
58+ return new SerializationMetricWrapper (serializationSchema );
5859 }
5960
60- private SerializationSchema <Row > createSerializationSchema (KafkaSinkTableInfo kafkaSinkTableInfo , TypeInformation <Row > typeInformation ) {
61- SerializationSchema <Row > serializationSchema = null ;
61+ private SerializationSchema <CRow > createSerializationSchema (KafkaSinkTableInfo kafkaSinkTableInfo , TypeInformation <CRow > typeInformation ) {
62+ SerializationSchema <CRow > serializationSchema = null ;
6263 if (FormatType .JSON .name ().equalsIgnoreCase (kafkaSinkTableInfo .getSinkDataType ())) {
63-
6464 if (StringUtils .isNotBlank (kafkaSinkTableInfo .getSchemaString ())) {
65- serializationSchema = new JsonRowSerializationSchema (kafkaSinkTableInfo .getSchemaString ());
65+ serializationSchema = new JsonCRowSerializationSchema (kafkaSinkTableInfo .getSchemaString (), kafkaSinkTableInfo . getUpdateMode ());
6666 } else if (typeInformation != null && typeInformation .getArity () != 0 ) {
67- serializationSchema = new JsonRowSerializationSchema (typeInformation );
67+ serializationSchema = new JsonCRowSerializationSchema (typeInformation , kafkaSinkTableInfo . getUpdateMode () );
6868 } else {
6969 throw new IllegalArgumentException ("sinkDataType:" + FormatType .JSON .name () + " must set schemaString(JSON Schema)or TypeInformation<Row>" );
7070 }
71-
7271 } else if (FormatType .CSV .name ().equalsIgnoreCase (kafkaSinkTableInfo .getSinkDataType ())) {
73-
7472 if (StringUtils .isBlank (kafkaSinkTableInfo .getFieldDelimiter ())) {
7573 throw new IllegalArgumentException ("sinkDataType:" + FormatType .CSV .name () + " must set fieldDelimiter" );
7674 }
77-
78- final CsvRowSerializationSchema .Builder serSchemaBuilder = new CsvRowSerializationSchema .Builder (typeInformation );
75+ final CsvCRowSerializationSchema .Builder serSchemaBuilder = new CsvCRowSerializationSchema .Builder (typeInformation );
7976 serSchemaBuilder .setFieldDelimiter (kafkaSinkTableInfo .getFieldDelimiter ().toCharArray ()[0 ]);
80- serializationSchema = serSchemaBuilder .build ( );
77+ serSchemaBuilder .setUpdateMode ( kafkaSinkTableInfo . getUpdateMode () );
8178
79+ serializationSchema = serSchemaBuilder .build ();
8280 } else if (FormatType .AVRO .name ().equalsIgnoreCase (kafkaSinkTableInfo .getSinkDataType ())) {
83-
8481 if (StringUtils .isBlank (kafkaSinkTableInfo .getSchemaString ())) {
8582 throw new IllegalArgumentException ("sinkDataType:" + FormatType .AVRO .name () + " must set schemaString" );
8683 }
87-
88- serializationSchema = new AvroRowSerializationSchema (kafkaSinkTableInfo .getSchemaString ());
89-
84+ serializationSchema = new AvroCRowSerializationSchema (kafkaSinkTableInfo .getSchemaString (), kafkaSinkTableInfo .getUpdateMode ());
9085 }
9186
9287 if (null == serializationSchema ) {
0 commit comments