Commit c5470ee1 authored by shezaixing's avatar shezaixing

可配置逻辑删除

可配置cdc日志记录写入
parent 0153cd84
package com.dsk.flink.dsc.common.function; package com.dsk.flink.dsc.common.function;
import cn.hutool.core.collection.CollUtil; import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.map.MapUtil;
import cn.hutool.core.util.StrUtil; import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
...@@ -80,6 +81,12 @@ public class AsyncMysqlDataTransferFunction extends RichAsyncFunction<JSONObject ...@@ -80,6 +81,12 @@ public class AsyncMysqlDataTransferFunction extends RichAsyncFunction<JSONObject
} }
JSONObject dataObj = dataList.getJSONObject(0); JSONObject dataObj = dataList.getJSONObject(0);
Boolean logicalDelete = MapUtil.getBool(dbInfoMap, "logical_delete", false);
if(logicalDelete){
mysqlType.put("is_del", "int");
dataObj.put("is_del", "DELETE".equals(type) ? 1 : 0);
}
if("INSERT".equals(type)){ if("INSERT".equals(type)){
excueteSql = tranferInsertSql(table,dataObj,mysqlType); excueteSql = tranferInsertSql(table,dataObj,mysqlType);
} }
...@@ -90,8 +97,17 @@ public class AsyncMysqlDataTransferFunction extends RichAsyncFunction<JSONObject ...@@ -90,8 +97,17 @@ public class AsyncMysqlDataTransferFunction extends RichAsyncFunction<JSONObject
} }
if("DELETE".equals(type)){ if("DELETE".equals(type)){
excueteSql = transferDeleteSql(table,dataObj,mysqlType,pkNameSet); excueteSql = logicalDelete ? tranferInsertSql(table,dataObj,mysqlType) : transferDeleteSql(table,dataObj,mysqlType,pkNameSet);
}
//处理先后顺序
//获取该条数据的表名和主键作为唯一的groupKey
String groupKey = table;
for (String pk : pkNameSet) {
String pkValue = getValueString(dataObj, pk, mysqlType.getString(pk));
groupKey = table.concat("-").concat(pkValue);
} }
Long ts = value.getLong("ts");
resultFuture.complete(Collections.singleton(excueteSql)); resultFuture.complete(Collections.singleton(excueteSql));
}catch (Exception e){ }catch (Exception e){
......
package com.dsk.flink.dsc.sync; package com.dsk.flink.dsc.sync;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.StrUtil; import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.dsk.flink.dsc.common.function.AsyncMysqlDataTransferFunction; import com.dsk.flink.dsc.common.function.AsyncMysqlDataTransferFunctionNew;
import com.dsk.flink.dsc.common.sink.MysqlDataTransferSink; import com.dsk.flink.dsc.common.sink.MysqlDataTransferSink;
import com.dsk.flink.dsc.utils.EnvProperties; import com.dsk.flink.dsc.utils.EnvProperties;
import com.dsk.flink.dsc.utils.EnvPropertiesUtil; import com.dsk.flink.dsc.utils.EnvPropertiesUtil;
...@@ -12,15 +13,26 @@ import lombok.extern.slf4j.Slf4j; ...@@ -12,15 +13,26 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.FilterFunction; import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies; import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema; import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.utils.ParameterTool; import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.CheckpointingMode; import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream; import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig; import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer; import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
/** /**
* @author shezaixing * @author shezaixing
...@@ -55,7 +67,9 @@ public class SyncCustomerDataSource { ...@@ -55,7 +67,9 @@ public class SyncCustomerDataSource {
//TODO 到时需要改这里,改成正式的消费组 //TODO 到时需要改这里,改成正式的消费组
FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<String>(envProps.getKafka_topic(), new SimpleStringSchema(), EtlUtils.getKafkaConfig(envProps.getKafka_brokers(), EtlUtils.getKafkaGroup(envProps), envProps.getKafka_username(),envProps.getKafka_password())); FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<String>(envProps.getKafka_topic(), new SimpleStringSchema(), EtlUtils.getKafkaConfig(envProps.getKafka_brokers(), EtlUtils.getKafkaGroup(envProps), envProps.getKafka_username(),envProps.getKafka_password()));
//System.out.println(envProps.getKafka_topic()); //System.out.println(envProps.getKafka_topic());
kafkaConsumer.setStartFromEarliest(); long defaultOffset = LocalDateTime.now().minusMinutes(5).atZone(ZoneId.systemDefault()).toInstant().toEpochMilli();
kafkaConsumer.setStartFromTimestamp(defaultOffset);
//kafkaConsumer.setStartFromLatest();
//偏移量 //偏移量
if (StrUtil.isNotBlank(offsetTimestamp)) { if (StrUtil.isNotBlank(offsetTimestamp)) {
kafkaConsumer.setStartFromTimestamp(Long.parseLong(offsetTimestamp)); kafkaConsumer.setStartFromTimestamp(Long.parseLong(offsetTimestamp));
...@@ -72,7 +86,8 @@ public class SyncCustomerDataSource { ...@@ -72,7 +86,8 @@ public class SyncCustomerDataSource {
.filter(new FilterFunction<JSONObject>() { .filter(new FilterFunction<JSONObject>() {
@Override @Override
public boolean filter(JSONObject value) throws Exception { public boolean filter(JSONObject value) throws Exception {
return !value.getBoolean("isDdl");
return !value.getBoolean("isDdl") && !"TIDB_WATERMARK".equals(value.getString("type"));
} }
}) })
.name("canalJsonStream") .name("canalJsonStream")
...@@ -80,19 +95,54 @@ public class SyncCustomerDataSource { ...@@ -80,19 +95,54 @@ public class SyncCustomerDataSource {
//canalJsonStream.print("canal stream"); //canalJsonStream.print("canal stream");
SingleOutputStreamOperator<String> sqlResultStream = AsyncDataStream.orderedWait(canalJsonStream, new AsyncMysqlDataTransferFunction(envProps), 1200L, TimeUnit.SECONDS, 20) // SingleOutputStreamOperator<String> sqlResultStream = AsyncDataStream.orderedWait(canalJsonStream, new AsyncMysqlDataTransferFunction(envProps), 1200L, TimeUnit.SECONDS, 20)
.filter(new FilterFunction<String>() { // .filter(new FilterFunction<String>() {
// @Override
// public boolean filter(String value) throws Exception {
// return StrUtil.isNotBlank(value) && !"err".equals(value);
// }
// })
// .name("sqlResultStream")
// .uid("sqlResultStream");
//
// //sqlResultStream.print("sql result");
//
// sqlResultStream.addSink(new MysqlDataTransferSink(envProps)).name("sqlSinkStream").uid("sqlSinkStream");
SingleOutputStreamOperator<Tuple3<String, String, Long>> sqlResultStream1 = AsyncDataStream.orderedWait(canalJsonStream,
new AsyncMysqlDataTransferFunctionNew(envProps), 1200L, TimeUnit.SECONDS, 20)
.filter(new FilterFunction<Tuple3<String, String, Long>>() {
@Override @Override
public boolean filter(String value) throws Exception { public boolean filter(Tuple3<String, String, Long> value) throws Exception {
return StrUtil.isNotBlank(value) && !"err".equals(value); return StrUtil.isNotBlank(value.f0) && !"err".equals(value.f0);
} }
}) })
.name("sqlResultStream") .name("sqlResultStream")
.uid("sqlResultStream"); .uid("sqlResultStream");
//sqlResultStream.print("sql result"); SingleOutputStreamOperator<String> groupWindowSqlResultStream = sqlResultStream1.keyBy(value -> value.f1)
.window(TumblingProcessingTimeWindows.of(Time.seconds(3)))
.process(new ProcessWindowFunction<Tuple3<String, String, Long>, String, String, TimeWindow>() {
@Override
public void process(String s, ProcessWindowFunction<Tuple3<String, String, Long>, String, String,
TimeWindow>.Context context, Iterable<Tuple3<String, String, Long>> elements,
Collector<String> out) throws Exception {
List<Tuple3<String, String, Long>> list = CollUtil.list(false, elements);
if ("dsc_cdc_log".equals(list.get(0).f1)) {
list = list.stream().sorted(Comparator.comparing(x -> x.f2,Comparator.reverseOrder() )).collect(Collectors.toList());
list.forEach(x -> {out.collect(x.f0);});
return;
}
Tuple3<String, String, Long> maxTsElement =
list.stream().max(Comparator.comparing(x -> x.f2)).get();
out.collect(maxTsElement.f0);
}
})
.name("groupWindowSqlResultStream")
.uid("groupWindowSqlResultStream");
groupWindowSqlResultStream.print("sql result");
sqlResultStream.addSink(new MysqlDataTransferSink(envProps)).name("sqlSinkStream").uid("sqlSinkStream"); groupWindowSqlResultStream.addSink(new MysqlDataTransferSink(envProps)).name("sqlSinkStream").uid("sqlSinkStream");
env.execute(); env.execute();
} }
} }
...@@ -113,6 +113,26 @@ public class EnvProperties extends Properties { ...@@ -113,6 +113,26 @@ public class EnvProperties extends Properties {
String solr_urls; String solr_urls;
String solr_zk_hosts; String solr_zk_hosts;
String logical_delete;
String log_enable;
public String getLog_enable() {
return logical_delete == null ? this.getProperty("logical_delete") : logical_delete;
}
public void setLog_enable(String log_enable) {
this.log_enable = log_enable;
}
public String getLogical_delete() {
return logical_delete == null ? this.getProperty("logical_delete") : logical_delete;
}
public void setLogical_delete(String logical_delete) {
this.logical_delete = logical_delete;
}
public String getEnv() { public String getEnv() {
return env == null ? this.getProperty("env") : env; return env == null ? this.getProperty("env") : env;
} }
......
...@@ -145,7 +145,7 @@ public class EnvPropertiesUtil { ...@@ -145,7 +145,7 @@ public class EnvPropertiesUtil {
EnvProperties envProperties = new EnvProperties(); EnvProperties envProperties = new EnvProperties();
if(StrUtil.isBlank(filePath)){ if(StrUtil.isBlank(filePath)){
filePath = System.getProperties().getProperty("os.name").contains("Windows") ? "D:\\Env\\application_pro.properties" : "/home/module/flink-job/application.properties"; filePath = System.getProperties().getProperty("os.name").contains("Windows") ? "D:\\Env\\application.properties" : "/home/module/flink-job/application.properties";
} }
File file = new File(filePath); File file = new File(filePath);
if (!file.exists()) { if (!file.exists()) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment