Commit 5502068c authored by shezaixing's avatar shezaixing

init commit

parents
Pipeline #366 canceled with stages
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.dsk.flink.dsc</groupId>
<artifactId>dsk-dsc-flink</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<flink-version>1.13.6</flink-version>
<flink-cdc-version>2.3.0</flink-cdc-version>
<hutool-version>5.8.4</hutool-version>
<lombok-version>1.18.24</lombok-version>
<fast-json-version>1.2.75</fast-json-version>
<kafka-clinet-veriosn>2.7.0</kafka-clinet-veriosn>
<mysql-connector-version>8.0.17</mysql-connector-version>
<flink-jdbc-version>1.10.0</flink-jdbc-version>
<redisson-version>3.17.7</redisson-version>
<project.build.sourceEncoding>UTF8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>com.belerweb</groupId>
<artifactId>pinyin4j</artifactId>
<version>2.5.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-csv</artifactId>
<version>${flink-version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-json</artifactId>
<version>${flink-version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-avro</artifactId>
<version>${flink-version}</version>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-jdbc-client</artifactId>
<version>3.5.4</version>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-core</artifactId>
<version>3.5.4</version>
<exclusions>
<exclusion>
<artifactId>netty-buffer</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<artifactId>netty-codec</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<artifactId>netty-common</artifactId>
<groupId>io.netty</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge_2.12</artifactId>
<version>${flink-version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-statebackend-rocksdb_2.12</artifactId>
<version>${flink-version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.2.11</version>
</dependency>
<!-- redis -->
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>4.2.3</version>
</dependency>
<dependency>
<groupId>io.lettuce</groupId>
<artifactId>lettuce-core</artifactId>
<version>6.2.1.RELEASE</version>
</dependency>
<dependency>
<groupId>org.apache.bahir</groupId>
<artifactId>flink-connector-redis_2.11</artifactId>
<version>1.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.11</artifactId>
</exclusion>
<exclusion>
<artifactId>jedis</artifactId>
<groupId>redis.clients</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-jdbc_2.12</artifactId>
<version>${flink-jdbc-version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-format-common</artifactId>
<version>${flink-version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-json</artifactId>
<version>${flink-version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafka-clinet-veriosn}</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>${lombok-version}</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>${hutool-version}</version>
</dependency>
<dependency>
<groupId>com.dsk.io.tidb</groupId>
<artifactId>flink-tidb-connector-1.13</artifactId>
<version>0.0.5.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-scala_2.12</artifactId>
<version>${flink-version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-runtime-web_2.12</artifactId>
<version>${flink-version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-scala-bridge_2.12</artifactId>
<version>${flink-version}</version>
<exclusions>
<exclusion>
<artifactId>force-shading</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
<exclusion>
<artifactId>scala-library</artifactId>
<groupId>org.scala-lang</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc_2.12</artifactId>
<version>${flink-version}</version>
<exclusions>
<exclusion>
<artifactId>force-shading</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-connector-tidb-cdc</artifactId>
<version>${flink-cdc-version}</version>
<exclusions>
<exclusion>
<artifactId>protobuf-java</artifactId>
<groupId>com.google.protobuf</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-databind</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
</exclusion>
<exclusion>
<artifactId>commons-lang3</artifactId>
<groupId>org.apache.commons</groupId>
</exclusion>
<exclusion>
<artifactId>javassist</artifactId>
<groupId>org.javassist</groupId>
</exclusion>
<exclusion>
<artifactId>kafka-clients</artifactId>
<groupId>org.apache.kafka</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>tikv-client-java</artifactId>
<groupId>org.tikv</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink-version}</version>
<exclusions>
<exclusion>
<artifactId>force-shading</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-base</artifactId>
<version>${flink-version}</version>
<exclusions>
<exclusion>
<artifactId>force-shading</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.12</artifactId>
<version>${flink-version}</version>
<exclusions>
<exclusion>
<artifactId>force-shading</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
<exclusion>
<artifactId>lz4-java</artifactId>
<groupId>org.lz4</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>snappy-java</artifactId>
<groupId>org.xerial.snappy</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.12</artifactId>
<version>${flink-version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-common</artifactId>
<version>${flink-version}</version>
<exclusions>
<exclusion>
<artifactId>force-shading</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-connector-mysql-cdc</artifactId>
<version>${flink-cdc-version}</version>
<exclusions>
<!--<exclusion>
<artifactId>mysql-connector-java</artifactId>
<groupId>mysql</groupId>
</exclusion>-->
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>flink-connector-debezium</artifactId>
<groupId>com.ververica</groupId>
</exclusion>
</exclusions>
</dependency>
<!--<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-sql-connector-kafka_2.12</artifactId>
<version>${flink-version}</version>
</dependency>-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.12</artifactId>
<version>${flink-version}</version>
<exclusions>
<exclusion>
<artifactId>flink-connector-base</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
<exclusion>
<artifactId>force-shading</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
<exclusion>
<artifactId>kafka-clients</artifactId>
<groupId>org.apache.kafka</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>${fast-json-version}</version>
</dependency>
<dependency>
<groupId>org.dom4j</groupId>
<artifactId>dom4j</artifactId>
<version>2.1.3</version>
</dependency>
</dependencies>
<repositories>
<repository>
<id>dskmaven</id>
<name>dskmaven</name>
<url>http://120.27.13.145:8081/nexus/content/groups/public/</url>
</repository>
</repositories>
<build>
<finalName>${artifactId}</finalName>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.10.1</version>
<!-- 下面指定为自己需要的版本 -->
<configuration>
<encoding>utf8</encoding>
<source>1.8</source>
<target>1.8</target>
<excludes>
<exclude>org.slf4j:*</exclude>
<exclude>log4j:*</exclude>
<exclude>ch.qos.logback:*</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.3.0</version>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
</configuration>
<executions>
<!-- Run shade goal on package phase -->
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<!--<transformers combine.children="append">
&lt;!&ndash; The service transformer is needed to merge META-INF/services files &ndash;&gt;
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
&lt;!&ndash; ... &ndash;&gt;
</transformers>-->
<!-- 合并多个connetor 的META-INF.services 文件-->
<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>reference.conf</resource>
</transformer>
<!-- The service transformer is needed to merge META-INF/services files -->
<transformer
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer">
<projectName>Apache Flink</projectName>
<encoding>UTF-8</encoding>
</transformer>
</transformers>
<!-- 自动排除不使用的类,缩小jar包体积-->
<!-- <minimizeJar>true</minimizeJar>-->
<artifactSet>
<excludes>
<exclude>org.apache.flink:force-shading</exclude>
<exclude>org.slf4j:*</exclude>
<exclude>org.apache.logging.log4j:*</exclude>
</excludes>
<!--<includes>
<indlude>com.aliyun.openservices:flink-log-connector</indlude>
<indlude>com.alibaba.hologres:hologres-connector-flink-1.12</indlude>
<indlude>com.google.guava:guava</indlude>
<indlude>org.projectlombok:lombok</indlude>
</includes>-->
</artifactSet>
<filters>
<filter>
<!-- Do not copy the signatures in the META-INF folder.
Otherwise, this might cause SecurityExceptions when using the JAR. -->
<artifact>*:*</artifact>
<excludes>
<exclude>module-info.class</exclude>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
</plugins>
</build>
</project>
package com.dsk.flink.dsc.common.function;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.dsk.flink.dsc.utils.EnvProperties;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* @author shezaixing
* @date 2023/12/6 15:40
* @description mysql/tidb sink 数据组装 sql
*
*/
public class AsyncMysqlDataTransferFunction extends RichAsyncFunction<JSONObject,String> {
static Logger logger = LoggerFactory.getLogger(AsyncMysqlDataTransferFunction.class);
//数据库连接信息
EnvProperties dbInfoMap;
//线程池
private transient ExecutorService executorService;
public AsyncMysqlDataTransferFunction(EnvProperties dbInfoMap) {
this.dbInfoMap = dbInfoMap;
}
@Override
public void open(Configuration parameters) throws Exception {
//初始化线程池
executorService = new ThreadPoolExecutor(4 , 4, 20, TimeUnit.MINUTES, new LinkedBlockingDeque<>());
}
@Override
public void close() throws Exception {
executorService.shutdown();
}
@Override
public void timeout(JSONObject input, ResultFuture<String> resultFuture) throws Exception {
resultFuture.complete(Collections.singleton("err"));
}
@Override
public void asyncInvoke(JSONObject value, ResultFuture<String> resultFuture) throws Exception {
executorService.submit(() -> {
try {
String type = value.getString("type");
JSONArray dataList = value.getJSONArray("data");
JSONObject mysqlType = value.getJSONObject("mysqlType");
JSONArray oldDataList = value.getJSONArray("old");
String table = value.getString("table");
Boolean isDdl = value.getBoolean("isDdl");
JSONArray pkNames = value.getJSONArray("pkNames");
Set<String> pkNameSet = new HashSet<>();
if(CollUtil.isNotEmpty(pkNames)){
pkNames.forEach(name -> pkNameSet.add(String.valueOf(name)));
}
String excueteSql = "";
if(isDdl){
excueteSql = value.getString("sql");
if(StrUtil.isNotBlank(excueteSql)){
excueteSql = StrUtil.subBefore(excueteSql,"AFTER",true);
}
resultFuture.complete(Collections.singleton(excueteSql));
return;
}
JSONObject dataObj = dataList.getJSONObject(0);
if("INSERT".equals(type)){
excueteSql = tranferInsertSql(table,dataObj,mysqlType);
}
if("UPDATE".equals(type)){
JSONObject oldDataObj = oldDataList.getJSONObject(0);
// excueteSql = tranferUpdateSql(table,dataObj,oldDataObj,mysqlType,pkNameSet);
excueteSql = tranferInsertSql(table,dataObj,mysqlType);
}
if("DELETE".equals(type)){
excueteSql = transferDeleteSql(table,dataObj,mysqlType,pkNameSet);
}
resultFuture.complete(Collections.singleton(excueteSql));
}catch (Exception e){
e.printStackTrace();
resultFuture.complete(Collections.singleton("err"));
}finally {
}
});
}
private static final String[] STR_SQL_TYPE = new String[]{"VARCHAR","CHAR","TINYBLOB","BLOB","MEDIUMBLOB","LONGBLOB","TINYTEXT","TEXT","MEDIUMTEXT","LONGTEXT","TIME","TIMESTAMP"};
private static String tranferInsertSql(String table, JSONObject dataObj, JSONObject mysqlType) {
Set<String> columnSet = mysqlType.keySet();
List<String> valueList = new ArrayList<>();
List<String> updateList = new ArrayList<>();
for (String col : columnSet) {
valueList.add(getValueString(dataObj,col,mysqlType.getString(col)));
updateList.add(col.concat(" = VALUES(").concat(col).concat(")"));
}
String columnString = String.join(",",columnSet);
String valueString = String.join(",",valueList);
String updateString = String.join(",",updateList);
return String.format("INSERT INTO %s (%s) values (%s) ON DUPLICATE KEY UPDATE %s;",table,columnString,valueString,updateString);
}
private String tranferUpdateSql(String table, JSONObject dataObj, JSONObject oldDataObj, JSONObject mysqlType,Set<String> pkNameSet) {
Set<String> columnSet = mysqlType.keySet();
List<String> setList = new ArrayList<>();
List<String> whereList = new ArrayList<>();
for (String col : columnSet) {
String setString = col.concat(" = ").concat(getValueString(dataObj,col,mysqlType.getString(col)));
setList.add(setString);
}
for (String pk : pkNameSet) {
String whereString = pk.concat(" = ").concat(getValueString(oldDataObj,pk,mysqlType.getString(pk)));
whereList.add(whereString);
}
String setString = String.join(",",setList);
String whereString = String.join(" and ",whereList);
return String.format("UPDATE %s SET %s WHERE %s",table,setString,whereString);
}
private String transferDeleteSql(String table, JSONObject dataObj, JSONObject mysqlType, Set<String> pkNameSet) {
List<String> whereList = new ArrayList<>();
for (String pk : pkNameSet) {
String whereString = pk.concat(" = ").concat(getValueString(dataObj,pk,mysqlType.getString(pk)));
whereList.add(whereString);
}
String whereString = String.join(" and ",whereList);
return String.format("DELETE FROM %s WHERE %s",table,whereString);
}
/**
* @author shezaixing
* @date 2023/12/7 14:23
* @description 判断拼接字符串时类型(是否需要加上引号)
*
*/
private static String getValueString(JSONObject dataObj,String columnKey,String mysqlType){
if(null == dataObj.get(columnKey)){
return "null";
}
//需要处理成字符串加引号的类型
if(Arrays.asList(STR_SQL_TYPE).contains(mysqlType.toUpperCase())){
return String.format("'%s'",dataObj.getString(columnKey));
}
//时间字段处理
if("DATE".equalsIgnoreCase(mysqlType) || "DATETIME".equalsIgnoreCase(mysqlType)){
SimpleDateFormat df = "DATETIME".equalsIgnoreCase(mysqlType) ? new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") : new SimpleDateFormat("yyyy-MM-dd");
return String.format("\"%s\"",df.format(dataObj.getDate(columnKey)));
}
return dataObj.getString(columnKey);
}
public static void main(String[] args) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("id",1);
jsonObject.put("name","Nana");
jsonObject.put("age",26);
jsonObject.put("salary",20000);
jsonObject.put("date1","2023-10-01");
jsonObject.put("date2","2023-10-02 11:11:00");
JSONObject mysqlType = new JSONObject();
mysqlType.put("id","int");
mysqlType.put("name","varchar");
mysqlType.put("age","bigint");
mysqlType.put("salary","double");
mysqlType.put("date1","date");
mysqlType.put("date2","datetime");
mysqlType.put("relation",null);
String table = "test";
String s= "ff8940af-c080-40cc-9d83-8c7dc8b86ed4";
System.out.println(s.length());
String s1 = "hello string sss";
System.out.println(StrUtil.subBefore(s1,"string",true));
System.out.println(tranferInsertSql(table,jsonObject,mysqlType));
}
}
package com.dsk.flink.dsc.common.sink;
import cn.hutool.core.date.DateUtil;
import com.alibaba.druid.pool.DruidDataSource;
import com.dsk.flink.dsc.utils.EnvProperties;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.concurrent.*;
public class MysqlDataTransferSink extends RichSinkFunction<String> {
static Logger logger = LoggerFactory.getLogger(MysqlDataTransferSink.class);
EnvProperties envProps;
ExecutorService executorService;
DruidDataSource dataSource;
public MysqlDataTransferSink(EnvProperties envProps) {
this.envProps = envProps;
}
@Override
public void open(Configuration parameters) throws Exception {
executorService = new ThreadPoolExecutor(4, 4, 20, TimeUnit.MINUTES, new LinkedBlockingDeque<>());
//初始化获取配置
String configTidbUrl = String.format(envProps.getDb_url(), envProps.getDb_host(), envProps.getDb_port(), envProps.getDb_database());
dataSource = new DruidDataSource();
dataSource.setDriverClassName("com.mysql.cj.jdbc.Driver");
dataSource.setUsername(envProps.getDb_username());
dataSource.setPassword(envProps.getDb_password());
dataSource.setUrl(configTidbUrl);
dataSource.setMaxActive(30);
dataSource.setInitialSize(10);
dataSource.setTestWhileIdle(true);
dataSource.setMaxWait(20000);
dataSource.setValidationQuery("select 1");
}
@Override
public void close() throws Exception {
executorService.shutdown();
dataSource.close();
}
@Override
public void invoke(String value, Context context) throws Exception {
executorService.execute(() -> {
try {
executeSql(value);
}catch (Exception e){
e.printStackTrace();
}
});
}
private void executeSql(String sql) throws Exception{
Connection connection = null;
PreparedStatement pt = null;
try {
connection = dataSource.getConnection();
pt = connection.prepareStatement(sql);
pt.execute();
} catch (Exception e) {
e.printStackTrace();
logger.error("------错误时间:{}-----,sql:{}--------异常:{}", DateUtil.now(),sql,e.getMessage());
} finally {
if (pt != null) {
pt.close();
}
if (connection != null) {
connection.close();
}
}
}
}
package com.dsk.flink.dsc.sync;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSONObject;
import com.dsk.flink.dsc.common.function.AsyncMysqlDataTransferFunction;
import com.dsk.flink.dsc.common.sink.MysqlDataTransferSink;
import com.dsk.flink.dsc.utils.EnvProperties;
import com.dsk.flink.dsc.utils.EnvPropertiesUtil;
import com.dsk.flink.dsc.utils.EtlUtils;
import io.tidb.bigdata.tidb.JdbcConnectionProviderFactory;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import java.util.concurrent.TimeUnit;
/**
* @author shezaixing
* @date 2023/12/5 14:44
* @description 同步至客户目标数据源任务
*
*/
@Slf4j
public class SyncCustomerDataSource {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 30000));
env.enableCheckpointing(60000);
env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
env.getCheckpointConfig().setMinPauseBetweenCheckpoints(60000);
env.getCheckpointConfig().setCheckpointTimeout(7200000);
env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
env.getCheckpointConfig().setTolerableCheckpointFailureNumber(3);
//获取用户自己的配置信息
ParameterTool parameterTool = ParameterTool.fromArgs(args);
String propertiesPath = parameterTool.get("propertiesPath");
EnvProperties envProps = EnvPropertiesUtil.getPropertiesFromArgsPath(propertiesPath);
envProps.put("providerImpl", JdbcConnectionProviderFactory.HikariDataSourceJdbcConnectionProvider.class.getName());
System.out.println("读取到的配置文件:-> " + envProps.toString());
FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<String>(envProps.getKafka_topic(), new SimpleStringSchema(), EtlUtils.getKafkaConfig(envProps.getKafka_brokers(), envProps.getKafka_topic().concat("-group")));
kafkaConsumer.setStartFromEarliest();
SingleOutputStreamOperator<String> kafkaSource = env.addSource(kafkaConsumer)
.setParallelism(1)
.name("kafka-source")
.uid("kafka-source");
//kafkaSource.print("kafaka stream");
SingleOutputStreamOperator<JSONObject> canalJsonStream = kafkaSource.map(JSONObject::parseObject)
.name("canalJsonStream")
.uid("canalJsonStream");
canalJsonStream.print("canal stream");
SingleOutputStreamOperator<String> sqlResultStream = AsyncDataStream.orderedWait(canalJsonStream, new AsyncMysqlDataTransferFunction(envProps), 1200L, TimeUnit.SECONDS, 20)
.filter(new FilterFunction<String>() {
@Override
public boolean filter(String value) throws Exception {
return StrUtil.isNotBlank(value) && !"err".equals(value);
}
})
.name("sqlResultStream")
.uid("sqlResultStream");
sqlResultStream.print("sql result");
sqlResultStream.addSink(new MysqlDataTransferSink(envProps)).name("sqlSinkStream").uid("sqlSinkStream");
env.execute();
}
}
package com.dsk.flink.dsc.utils;
import java.util.Properties;
/**
* @Description:
* @author zhaowei 2022年10月9日
*/
public class EnvProperties extends Properties {
// #连接
String db_url;
// #建设库TIDB库
String db_host;
String db_port;
String db_username;
String db_password;
String tidb_pd_addresses;
String db_database;
String kafka_brokers;
String kafka_topic;
String st_kafka_brokers;
//公共组件tidb
String tidb_plugin_host;
String tidb_plugin_port;
String tidb_plugin_username;
String tidb_plugin_password;
String tidb_plugin_pd_addresses;
// #建设库RDS用户库
String rds_jsk_host;
String rds_jsk_port;
String rds_jsk_username;
String rds_jsk_password;
// #建设库RDSData库
String rds_jsk_data_host;
String rds_jsk_data_port;
String rds_jsk_data_username;
String rds_jsk_data_password;
// #Open库
String rds_jsk_open_host;
String rds_jsk_open_port;
String rds_jsk_open_username;
String rds_jsk_open_password;
// #CRM库
String rds_dsk_crm_host;
String rds_dsk_crm_port;
String rds_dsk_crm_username;
String rds_dsk_crm_password;
// #采集部同步库
String rds_cjb_source_host;
String rds_cjb_source_port;
String rds_cjb_source_username;
String rds_cjb_source_password;
// #采集部下推库
String rds_cjb_sink_host;
String rds_cjb_sink_port;
String rds_cjb_sink_username;
String rds_cjb_sink_password;
// #建设库ES
String es_jsk_host;
String es_jsk_port;
String es_jsk_username;
String es_jsk_password;
// #企业ES
String es_com_hosts;
String es_com_user_name;
String es_com_password;
String es_bulk_flush_max_actions;
String es_bulk_flush_max_size_mb;
String es_bulk_flush_interval;
// redis运行模式
String redis_active;
// #Redis集群=
String cluster_redis_host;
String cluster_redis_password;
// #Redis单机
String standalone_redis_host;
String standalone_redis_port;
String standalone_redis_password;
// #线程池
String thread_pool_threads;
//数据库连接池
String datasource_max_active;
String datasource_initial_active;
String datasource_max_wait;
//solr
String solr_urls;
String solr_zk_hosts;
public String getDb_url() {
return db_url == null ? this.getProperty("db_url") : db_url;
}
public void setDb_url(String db_url) {
this.db_url = db_url;
}
public String getDb_host() {
return db_host == null ? this.getProperty("tidb_host") : db_host;
}
public void setDb_host(String db_host) {
this.db_host = db_host;
}
public String getDb_port() {
return db_port == null ? this.getProperty("db_port") : db_port;
}
public void setDb_port(String db_port) {
this.db_port = db_port;
}
public String getDb_username() {
return db_username == null ? this.getProperty("db_username") : db_username;
}
public void setDb_username(String db_username) {
this.db_username = db_username;
}
public String getDb_password() {
return db_password == null ? this.getProperty("db_password") : db_password;
}
public void setDb_password(String db_password) {
this.db_password = db_password;
}
public String getTidb_pd_addresses() {
return tidb_pd_addresses == null ? this.getProperty("tidb_pd_addresses") : tidb_pd_addresses;
}
public void setTidb_pd_addresses(String tidb_pd_addresses) {
this.tidb_pd_addresses = tidb_pd_addresses;
}
public String getDb_database() {
return db_database == null ? this.getProperty("db_database") : db_database;
}
public void setDb_database(String db_database) {
this.db_database = db_database;
}
public String getKafka_brokers() {
return kafka_brokers == null ? this.getProperty("kafka_brokers") : kafka_brokers;
}
public void setKafka_brokers(String kafka_brokers) {
this.kafka_brokers = kafka_brokers;
}
public String getSt_kafka_brokers() {
return st_kafka_brokers == null ? this.getProperty("st_kafka_brokers") : st_kafka_brokers;
}
public void setSt_kafka_brokers(String st_kafka_brokers) {
this.st_kafka_brokers = st_kafka_brokers;
}
public String getRds_jsk_host() {
return rds_jsk_host == null ? this.getProperty("rds_jsk_host") : rds_jsk_host;
}
public void setRds_jsk_host(String rds_jsk_host) {
this.rds_jsk_host = rds_jsk_host;
}
public String getRds_jsk_port() {
return rds_jsk_port == null ? this.getProperty("rds_jsk_port") : rds_jsk_port;
}
public void setRds_jsk_port(String rds_jsk_port) {
this.rds_jsk_port = rds_jsk_port;
}
public String getRds_jsk_username() {
return rds_jsk_username == null ? this.getProperty("rds_jsk_username") : rds_jsk_username;
}
public void setRds_jsk_username(String rds_jsk_username) {
this.rds_jsk_username = rds_jsk_username;
}
public String getRds_jsk_password() {
return rds_jsk_password == null ? this.getProperty("rds_jsk_password") : rds_jsk_password;
}
public void setRds_jsk_password(String rds_jsk_password) {
this.rds_jsk_password = rds_jsk_password;
}
public String getRds_jsk_data_host() {
return rds_jsk_data_host == null ? this.getProperty("rds_jsk_data_host") : rds_jsk_data_host;
}
public void setRds_jsk_data_host(String rds_jsk_data_host) {
this.rds_jsk_data_host = rds_jsk_data_host;
}
public String getRds_jsk_data_port() {
return rds_jsk_data_port == null ? this.getProperty("rds_jsk_data_port") : rds_jsk_data_port;
}
public void setRds_jsk_data_port(String rds_jsk_data_port) {
this.rds_jsk_data_port = rds_jsk_data_port;
}
public String getRds_jsk_data_username() {
return rds_jsk_data_username == null ? this.getProperty("rds_jsk_data_username") : rds_jsk_data_username;
}
public void setRds_jsk_data_username(String rds_jsk_data_username) {
this.rds_jsk_data_username = rds_jsk_data_username;
}
public String getRds_jsk_data_password() {
return rds_jsk_data_password == null ? this.getProperty("rds_jsk_data_password") : rds_jsk_data_password;
}
public void setRds_jsk_data_password(String rds_jsk_data_password) {
this.rds_jsk_data_password = rds_jsk_data_password;
}
public String getRds_jsk_open_host() {
return rds_jsk_open_host == null ? this.getProperty("rds_jsk_open_host") : rds_jsk_open_host;
}
public void setRds_jsk_open_host(String rds_jsk_open_host) {
this.rds_jsk_open_host = rds_jsk_open_host;
}
public String getRds_jsk_open_port() {
return rds_jsk_open_port == null ? this.getProperty("rds_jsk_open_port") : rds_jsk_open_port;
}
public void setRds_jsk_open_port(String rds_jsk_open_port) {
this.rds_jsk_open_port = rds_jsk_open_port;
}
public String getRds_jsk_open_username() {
return rds_jsk_open_username == null ? this.getProperty("rds_jsk_open_username") : rds_jsk_open_username;
}
public void setRds_jsk_open_username(String rds_jsk_open_username) {
this.rds_jsk_open_username = rds_jsk_open_username;
}
public String getRds_jsk_open_password() {
return rds_jsk_open_password == null ? this.getProperty("rds_jsk_open_password") : rds_jsk_open_password;
}
public void setRds_jsk_open_password(String rds_jsk_open_password) {
this.rds_jsk_open_password = rds_jsk_open_password;
}
public String getRds_dsk_crm_host() {
return rds_dsk_crm_host == null ? this.getProperty("rds_dsk_crm_host") : rds_dsk_crm_host;
}
public void setRds_dsk_crm_host(String rds_dsk_crm_host) {
this.rds_dsk_crm_host = rds_dsk_crm_host;
}
public String getRds_dsk_crm_port() {
return rds_dsk_crm_port == null ? this.getProperty("rds_dsk_crm_port") : rds_dsk_crm_port;
}
public void setRds_dsk_crm_port(String rds_dsk_crm_port) {
this.rds_dsk_crm_port = rds_dsk_crm_port;
}
public String getRds_dsk_crm_username() {
return rds_dsk_crm_username == null ? this.getProperty("rds_dsk_crm_username") : rds_dsk_crm_username;
}
public void setRds_dsk_crm_username(String rds_dsk_crm_username) {
this.rds_dsk_crm_username = rds_dsk_crm_username;
}
public String getRds_dsk_crm_password() {
return rds_dsk_crm_password == null ? this.getProperty("rds_dsk_crm_password") : rds_dsk_crm_password;
}
public void setRds_dsk_crm_password(String rds_dsk_crm_password) {
this.rds_dsk_crm_password = rds_dsk_crm_password;
}
public String getRds_cjb_source_host() {
return rds_cjb_source_host == null ? this.getProperty("rds_cjb_source_host") : rds_cjb_source_host;
}
public void setRds_cjb_source_host(String rds_cjb_source_host) {
this.rds_cjb_source_host = rds_cjb_source_host;
}
public String getRds_cjb_source_port() {
return rds_cjb_source_port == null ? this.getProperty("rds_cjb_source_port") : rds_cjb_source_port;
}
public void setRds_cjb_source_port(String rds_cjb_source_port) {
this.rds_cjb_source_port = rds_cjb_source_port;
}
public String getRds_cjb_source_username() {
return rds_cjb_source_username == null ? this.getProperty("rds_cjb_source_username") : rds_cjb_source_username;
}
public void setRds_cjb_source_username(String rds_cjb_source_username) {
this.rds_cjb_source_username = rds_cjb_source_username;
}
public String getRds_cjb_source_password() {
return rds_cjb_source_password == null ? this.getProperty("rds_cjb_source_password") : rds_cjb_source_password;
}
public void setRds_cjb_source_password(String rds_cjb_source_password) {
this.rds_cjb_source_password = rds_cjb_source_password;
}
public String getRds_cjb_sink_host() {
return rds_cjb_sink_host == null ? this.getProperty("rds_cjb_sink_host") : rds_cjb_sink_host;
}
public void setRds_cjb_sink_host(String rds_cjb_sink_host) {
this.rds_cjb_sink_host = rds_cjb_sink_host;
}
public String getRds_cjb_sink_port() {
return rds_cjb_sink_port == null ? this.getProperty("rds_cjb_sink_port") : rds_cjb_sink_port;
}
public void setRds_cjb_sink_port(String rds_cjb_sink_port) {
this.rds_cjb_sink_port = rds_cjb_sink_port;
}
public String getRds_cjb_sink_username() {
return rds_cjb_sink_username == null ? this.getProperty("rds_cjb_sink_username") : rds_cjb_sink_username;
}
public void setRds_cjb_sink_username(String rds_cjb_sink_username) {
this.rds_cjb_sink_username = rds_cjb_sink_username;
}
public String getRds_cjb_sink_password() {
return rds_cjb_sink_password == null ? this.getProperty("rds_cjb_sink_password") : rds_cjb_sink_password;
}
public void setRds_cjb_sink_password(String rds_cjb_sink_password) {
this.rds_cjb_sink_password = rds_cjb_sink_password;
}
public String getEs_jsk_host() {
return es_jsk_host == null ? this.getProperty("es_jsk_host") : es_jsk_host;
}
public void setEs_jsk_host(String es_jsk_host) {
this.es_jsk_host = es_jsk_host;
}
public String getEs_jsk_port() {
return es_jsk_port == null ? this.getProperty("es_jsk_port") : es_jsk_port;
}
public void setEs_jsk_port(String es_jsk_port) {
this.es_jsk_port = es_jsk_port;
}
public String getEs_jsk_username() {
return es_jsk_username == null ? this.getProperty("es_jsk_username") : es_jsk_username;
}
public void setEs_jsk_username(String es_jsk_username) {
this.es_jsk_username = es_jsk_username;
}
public String getEs_jsk_password() {
return es_jsk_password == null ? this.getProperty("es_jsk_password") : es_jsk_password;
}
public void setEs_jsk_password(String es_jsk_password) {
this.es_jsk_password = es_jsk_password;
}
public String getEs_com_hosts() {
return es_com_hosts == null ? this.getProperty("es_com_hosts") : es_com_hosts;
}
public void setEs_com_hosts(String es_com_hosts) {
this.es_com_hosts = es_com_hosts;
}
public String getEs_com_user_name() {
return es_com_user_name == null ? this.getProperty("es_com_user_name") : es_com_user_name;
}
public void setEs_com_user_name(String es_com_user_name) {
this.es_com_user_name = es_com_user_name;
}
public String getEs_com_password() {
return es_com_password == null ? this.getProperty("es_com_password") : es_com_password;
}
public void setEs_com_password(String es_com_password) {
this.es_com_password = es_com_password;
}
public String getEs_bulk_flush_max_actions() {
return es_bulk_flush_max_actions == null ? this.getProperty("es_bulk_flush_max_actions") : es_bulk_flush_max_actions;
}
public void setEs_bulk_flush_max_actions(String es_bulk_flush_max_actions) {
this.es_bulk_flush_max_actions = es_bulk_flush_max_actions;
}
public String getEs_bulk_flush_max_size_mb() {
return es_bulk_flush_max_size_mb == null ? this.getProperty("es_bulk_flush_max_size_mb") : es_bulk_flush_max_size_mb;
}
public void setEs_bulk_flush_max_size_mb(String es_bulk_flush_max_size_mb) {
this.es_bulk_flush_max_size_mb = es_bulk_flush_max_size_mb;
}
public String getEs_bulk_flush_interval() {
return es_bulk_flush_interval == null ? this.getProperty("es_bulk_flush_interval") : es_bulk_flush_interval;
}
public void setEs_bulk_flush_interval(String es_bulk_flush_interval) {
this.es_bulk_flush_interval = es_bulk_flush_interval;
}
public String getRedis_active() {
return redis_active == null ? this.getProperty("redis_active") : redis_active;
}
public void setRedis_active(String redis_active) {
this.redis_active = redis_active;
}
public String getCluster_redis_host() {
return cluster_redis_host == null ? this.getProperty("cluster_redis_host") : cluster_redis_host;
}
public void setCluster_redis_host(String cluster_redis_host) {
this.cluster_redis_host = cluster_redis_host;
}
public String getCluster_redis_password() {
return cluster_redis_password == null ? this.getProperty("cluster_redis_password") : cluster_redis_password;
}
public void setCluster_redis_password(String cluster_redis_password) {
this.cluster_redis_password = cluster_redis_password;
}
public String getStandalone_redis_host() {
return standalone_redis_host == null ? this.getProperty("standalone_redis_host") : standalone_redis_host;
}
public void setStandalone_redis_host(String standalone_redis_host) {
this.standalone_redis_host = standalone_redis_host;
}
public String getStandalone_redis_port() {
return standalone_redis_port == null ? this.getProperty("standalone_redis_port") : standalone_redis_port;
}
public void setStandalone_redis_port(String standalone_redis_port) {
this.standalone_redis_port = standalone_redis_port;
}
public String getStandalone_redis_password() {
return standalone_redis_password == null ? this.getProperty("standalone_redis_password") : standalone_redis_password;
}
public void setStandalone_redis_password(String standalone_redis_password) {
this.standalone_redis_password = standalone_redis_password;
}
public String getThread_pool_threads() {
return thread_pool_threads == null ? this.getProperty("thread_pool_threads") : thread_pool_threads;
}
public void setThread_pool_threads(String thread_pool_threads) {
this.thread_pool_threads = thread_pool_threads;
}
public String getDatasource_max_active() {
return datasource_max_active == null ? this.getProperty("datasource_max_active") : datasource_max_active;
}
public void setDatasource_max_active(String datasource_max_active) {
this.datasource_max_active = datasource_max_active;
}
public String getDatasource_initial_active() {
return datasource_initial_active == null ? this.getProperty("datasource_initial_active") : datasource_initial_active;
}
public void setDatasource_initial_active(String datasource_initial_active) {
this.datasource_initial_active = datasource_initial_active;
}
public String getDatasource_max_wait() {
return datasource_max_wait == null ? this.getProperty("datasource_max_wait") : datasource_max_wait;
}
public void setDatasource_max_wait(String datasource_max_wait) {
this.datasource_max_wait = datasource_max_wait;
}
/*public String getSolr_urls() {
return solr_urls == null ? this.getProperty("solr_urls") : solr_urls;
}
public void setSolr_urls(String solr_urls) {
this.solr_urls = solr_urls;
}*/
public String getSolr_zk_hosts() {
return solr_zk_hosts == null ? this.getProperty("solr_zk_hosts") : solr_zk_hosts;
}
public void setSolr_zk_hosts(String solr_zk_hosts) {
this.solr_urls = solr_zk_hosts;
}
public String getTidb_plugin_host() {
return tidb_plugin_host == null ? this.getProperty("tidb_plugin_host") : tidb_plugin_host;
}
public void setTidb_plugin_host(String tidb_plugin_host) {
this.tidb_plugin_host = tidb_plugin_host;
}
public String getTidb_plugin_port() {
return tidb_plugin_port == null ? this.getProperty("tidb_plugin_port") : tidb_plugin_port;
}
public void setTidb_plugin_port(String tidb_plugin_port) {
this.tidb_plugin_port = tidb_plugin_port;
}
public String getTidb_plugin_username() {
return tidb_plugin_username == null ? this.getProperty("tidb_plugin_username") : tidb_plugin_username;
}
public void setTidb_plugin_username(String tidb_plugin_username) {
this.tidb_plugin_username = tidb_plugin_username;
}
public String getTidb_plugin_password() {
return tidb_plugin_password == null ? this.getProperty("tidb_plugin_password") : tidb_plugin_password;
}
public void setTidb_plugin_password(String tidb_plugin_password) {
this.tidb_plugin_password = tidb_plugin_password;
}
public String getTidb_plugin_pd_addresses() {
return tidb_plugin_pd_addresses == null ? this.getProperty("tidb_plugin_pd_addresses") : tidb_plugin_pd_addresses;
}
public void setTidb_plugin_pd_addresses(String tidb_plugin_pd_addresses) {
this.tidb_plugin_pd_addresses = tidb_plugin_pd_addresses;
}
public String getKafka_topic() {
return kafka_topic == null ? this.getProperty("kafka_topic") : kafka_topic;
}
public void setKafka_topic(String kafka_topic) {
this.kafka_topic = kafka_topic;
}
}
package com.dsk.flink.dsc.utils;
import cn.hutool.core.util.StrUtil;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.util.AbstractMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.jar.JarFile;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.ZipEntry;
/**
* @Description:
* @author zhaowei 2022年9月29日
*/
public class EnvPropertiesUtil {
private static String SYSTEM_ENV = "dev";
private static EnvProperties properties = new EnvProperties();
static {
SYSTEM_ENV = System.getProperty("profiles.active") == null ? SYSTEM_ENV : System.getProperty("profiles.active");
System.out.println("Env: " + SYSTEM_ENV + " properties");
URI uri = null;
try {
uri = EnvPropertiesUtil.class.getProtectionDomain().getCodeSource().getLocation().toURI();
} catch (URISyntaxException e1) {
e1.printStackTrace();
}
if (uri.toString().endsWith(".jar")) {
// jar包内
String jarPath = uri.toString();
uri = URI.create(jarPath.substring(jarPath.indexOf("file:"),jarPath.indexOf(".jar") + 4));
try {
allPropertiesJar(uri, properties);
}catch (Exception e) {
e.printStackTrace();
}
} else {
//本地
try {
allPropertiesLocal(uri, properties);
}catch (Exception e) {
e.printStackTrace();
}
}
}
/**
*
* @Description: 获取默认配置
* @param key
* @return
*/
public static EnvProperties getProperties() {
return (EnvProperties) properties.clone();
}
/**
*
* @Description: 获取配置值
* @param key
* @return
*/
public static String getProperty(String key) {
return properties.getProperty(key);
}
/**
* 本地环境读资源
* @throws IOException
*/
private static void allPropertiesLocal(URI uri, Properties properties) throws IOException {
File resources = new File(uri.getPath());
if (!resources.exists() || !resources.isDirectory()) {
return ;
}
File[] propertiesFiles = resources.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
if(!name.endsWith(".properties")) return false;
if(name.matches(getApplicationPropertiesRegx()) && !name.equals(getApplicationPropertiesName())) {
return false;
}
return true;
}
});
if(propertiesFiles == null || propertiesFiles.length == 0) {
return ;
}
for(File file : propertiesFiles) {
properties.load(new FileInputStream(file));
}
}
/**
* Jar环境读资源
* @throws IOException
*/
private static void allPropertiesJar(URI uri, Properties properties) throws IOException {
List<Map.Entry<ZipEntry, InputStream>> collect = readJarFile(new JarFile(uri.getPath())).collect(Collectors.toList());
if(collect == null || collect.isEmpty()) {
return;
}
for (Map.Entry<ZipEntry, InputStream> entry : collect) {
// 文件相对路径
String key = entry.getKey().getName();
// 文件流
InputStream stream = entry.getValue();
properties.load(stream);
}
}
private static Stream<Map.Entry<ZipEntry, InputStream>> readJarFile(JarFile jarFile) {
Stream<Map.Entry<ZipEntry, InputStream>> readingStream = jarFile.stream()
.filter(entry -> {
if(entry.getName().matches(getApplicationPropertiesRegx()) && !entry.getName().equals(getApplicationPropertiesName())) {
return false;
}
return !entry.isDirectory() && entry.getName().endsWith(".properties");
})
.map(entry -> {
try {
return new AbstractMap.SimpleEntry<>(entry, jarFile.getInputStream(entry));
} catch (IOException e) {
return new AbstractMap.SimpleEntry<>(entry, null);
}
});
return readingStream.onClose(() -> {
try {
jarFile.close();
} catch (IOException e) {
e.printStackTrace();
}
});
}
public static EnvProperties getPropertiesFromArgsPath(String filePath) throws IOException {
EnvProperties envProperties = new EnvProperties();
if(StrUtil.isBlank(filePath)){
filePath = System.getProperties().getProperty("os.name").contains("Windows") ? "D:\\Env\\application_pro.properties" : "/home/module/flink-job/application.properties";
}
File file = new File(filePath);
if (!file.exists()) {
return new EnvProperties();
}
envProperties.load(Files.newInputStream(file.toPath()));
//System.out.println("获取到的kafka_topic为:"+envProperties.getKafka_topic());
return envProperties;
}
private static String getApplicationPropertiesName() {
return "application_"+SYSTEM_ENV+".properties";
}
private static String getApplicationPropertiesRegx() {
return "application_[a-z]{3,5}\\.properties";
}
public static void main(String[] args) throws IOException {
String filePath = "D:\\Env\\application_pro.properties";
System.out.println(getPropertiesFromArgsPath(filePath).toString());
EnvProperties envProperties = getPropertiesFromArgsPath(filePath);
System.out.println(envProperties.getDb_database());
}
}
package com.dsk.flink.dsc.utils;
import java.util.Properties;
public class EtlUtils {
public static Properties getKafkaConfig(String url, String groupId) {
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", url);
properties.setProperty("group.id", groupId);
properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//properties.setProperty("auto.offset.reset", "earliest");
properties.setProperty("auto.offset.reset", "latest");
//properties.setProperty("max.poll.interval.ms", "604800000");
//properties.setProperty("session.timeout.ms", "20160000");
//properties.setProperty("heartbeat.interval.ms", "6720000");
//properties.setProperty("max.partition.fetch.bytes", "349525");
//properties.setProperty("max.poll.records", "50");
/*properties.setProperty("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"dsk\" password=\"LU1SRhTzoxssRoCp\";");
properties.setProperty("security.protocol", "SASL_PLAINTEXT");
properties.setProperty("sasl.mechanism", "PLAIN");*/
properties.setProperty("sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required username=\"dsk_st\" password=\"vIfVv6esjpwU2jT\";");
properties.setProperty("security.protocol", "SASL_PLAINTEXT");
properties.setProperty("sasl.mechanism", "SCRAM-SHA-512");
return properties;
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment